From baa55ab6ae14ae23864f650cec2bb180b36fdabe Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Sat, 25 May 2019 04:38:01 +0300 Subject: [PATCH 01/49] Feature: do dev environment via makefile (#17136) Simplifies dev environment creation. I also planing to utilize this logic for the LDAP benchmarking --- Makefile | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 8cfc7ce26812..9f56e3da8f1a 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ -include local/Makefile -.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive +.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive devenv devenv-down GO := GO111MODULE=on go GO_FILES := ./pkg/... @@ -84,6 +84,19 @@ revive: scripts/go/bin/revive -config ./scripts/go/configs/revive.toml \ $(GO_FILES) +# create docker-compose file with provided sources and start them +# example: make devenv sources=postgres,openldap +devenv: devenv-down + $(eval targets := $(shell echo '$(sources)' | tr "," " ")) + + @cd devenv; \ + ./create_docker_compose.sh $(targets); \ + docker-compose up -d + +# drop down the envs +devenv-down: + @cd devenv; docker-compose down; + # TODO recheck the rules and leave only necessary exclusions gosec: scripts/go/bin/gosec @scripts/go/bin/gosec -quiet \ From 07d37b4a929cb0d35376d0294cb268d88cb1da73 Mon Sep 17 00:00:00 2001 From: olcbean Date: Sat, 25 May 2019 20:41:25 +0200 Subject: [PATCH 02/49] fix typo in basic_concepts.md (#17285) --- docs/sources/guides/basic_concepts.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/sources/guides/basic_concepts.md b/docs/sources/guides/basic_concepts.md index d3f8dd0ba633..3c610e21ce23 100644 --- a/docs/sources/guides/basic_concepts.md +++ b/docs/sources/guides/basic_concepts.md @@ -66,7 +66,7 @@ There are a wide variety of styling and formatting options that each Panel expos Panels can be dragged and dropped and rearranged on the Dashboard. They can also be resized. -There are currently four Panel types: [Graph](/reference/graph/), [Singlestat](/reference/singlestat/), [Dashlist](/reference/dashlist/), [Table](/reference/table_panel/),and [Text](/reference/text/). +There are currently five Panel types: [Graph](/reference/graph/), [Singlestat](/reference/singlestat/), [Dashlist](/reference/dashlist/), [Table](/reference/table_panel/), and [Text](/reference/text/). Panels like the [Graph](/reference/graph/) panel allow you to graph as many metrics and series as you want. Other panels like [Singlestat](/reference/singlestat/) require a reduction of a single query into a single number. [Dashlist](/reference/dashlist/) and [Text](/reference/text/) are special panels that do not connect to any Data Source. From df6a4914c4eca3ae87068df3ca29ad9eb101f03c Mon Sep 17 00:00:00 2001 From: Tim Butler Date: Mon, 27 May 2019 15:11:30 +1000 Subject: [PATCH 03/49] Tech: Update jQuery to 3.4.1 (#17290) Fixes #17289 Special notes for your reviewer: Updates jQuery to 3.4.1 (from 3.4.0) to fix the jQuery bug: https://blog.jquery.com/2019/05/01/jquery-3-4-1-triggering-focus-events-in-ie-and-finding-root-elements-in-ios-10/ --- package.json | 2 +- yarn.lock | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package.json b/package.json index e2153dcf3419..e084f705b024 100644 --- a/package.json +++ b/package.json @@ -206,7 +206,7 @@ "fast-text-encoding": "^1.0.0", "file-saver": "1.3.8", "immutable": "3.8.2", - "jquery": "3.4.0", + "jquery": "3.4.1", "lodash": "4.17.11", "moment": "2.24.0", "mousetrap": "1.6.3", diff --git a/yarn.lock b/yarn.lock index e092d5045691..f48cba0ec948 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10138,10 +10138,10 @@ jest@24.8.0: import-local "^2.0.0" jest-cli "^24.8.0" -jquery@3.4.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.0.tgz#8de513fa0fa4b2c7d2e48a530e26f0596936efdf" - integrity sha512-ggRCXln9zEqv6OqAGXFEcshF5dSBvCkzj6Gm2gzuR5fWawaX8t7cxKVkkygKODrDAzKdoYw3l/e3pm3vlT4IbQ== +jquery@3.4.1: + version "3.4.1" + resolved "https://registry.yarnpkg.com/jquery/-/jquery-3.4.1.tgz#714f1f8d9dde4bdfa55764ba37ef214630d80ef2" + integrity sha512-36+AdBzCL+y6qjw5Tx7HgzeGCzC81MDDgaUP8ld2zhx58HdqXGoBd+tHdrBMiyjGQs0Hxs/MLZTu/eHNJJuWPw== js-base64@^2.1.8, js-base64@^2.1.9: version "2.5.1" From 5884e235fcf8cdbb4c42a94bdafe19881832bc54 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 27 May 2019 09:24:15 +0200 Subject: [PATCH 04/49] database: retry transaction if sqlite returns database is locked error (#17276) Adds an additional sqlite error code 5 (SQLITE_BUSY) to the transaction retry handler to add retries when sqlite returns database is locked error. More info: https://www.sqlite.org/rescode.html#busy Ref #17247 #16638 --- pkg/services/sqlstore/transactions.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/services/sqlstore/transactions.go b/pkg/services/sqlstore/transactions.go index a0f648043399..9b744fd32884 100644 --- a/pkg/services/sqlstore/transactions.go +++ b/pkg/services/sqlstore/transactions.go @@ -40,12 +40,12 @@ func inTransactionWithRetryCtx(ctx context.Context, engine *xorm.Engine, callbac err = callback(sess) - // special handling of database locked errors for sqlite, then we can retry 3 times + // special handling of database locked errors for sqlite, then we can retry 5 times if sqlError, ok := err.(sqlite3.Error); ok && retry < 5 { - if sqlError.Code == sqlite3.ErrLocked { + if sqlError.Code == sqlite3.ErrLocked || sqlError.Code == sqlite3.ErrBusy { sess.Rollback() time.Sleep(time.Millisecond * time.Duration(10)) - sqlog.Info("Database table locked, sleeping then retrying", "retry", retry) + sqlog.Info("Database locked, sleeping then retrying", "error", err, "retry", retry) return inTransactionWithRetry(callback, retry+1) } } From de92c360a1ad2882f8cc7acac0d2ebbe7847c257 Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Mon, 27 May 2019 10:36:49 +0300 Subject: [PATCH 05/49] LDAP: reduce API and allow its extension (#17209) * Removes Add/Remove methods * Publicise necessary fields and methods so we could extend it * Publicise mock API * More comments and additional simplifications * Sync with master Still having low coverage :/ - should be addressed in #17208 --- pkg/services/ldap/ldap.go | 159 ++++++++------------- pkg/services/ldap/ldap_helpers_test.go | 91 ++---------- pkg/services/ldap/ldap_login_test.go | 74 +++++----- pkg/services/ldap/ldap_test.go | 183 ++++++++++--------------- pkg/services/ldap/test.go | 58 ++++---- pkg/services/multildap/multildap.go | 52 ------- 6 files changed, 209 insertions(+), 408 deletions(-) diff --git a/pkg/services/ldap/ldap.go b/pkg/services/ldap/ldap.go index 9fa680d1e19b..c64533d34922 100644 --- a/pkg/services/ldap/ldap.go +++ b/pkg/services/ldap/ldap.go @@ -29,18 +29,17 @@ type IConnection interface { // IServer is interface for LDAP authorization type IServer interface { Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error) - Add(string, map[string][]string) error - Remove(string) error Users([]string) ([]*models.ExternalUserInfo, error) ExtractGrafanaUser(*UserInfo) (*models.ExternalUserInfo, error) + InitialBind(string, string) error Dial() error Close() } // Server is basic struct of LDAP authorization type Server struct { - config *ServerConfig - connection IConnection + Config *ServerConfig + Connection IConnection requireSecondBind bool log log.Logger } @@ -49,7 +48,6 @@ var ( // ErrInvalidCredentials is returned if username and password do not match ErrInvalidCredentials = errors.New("Invalid Username or Password") - ErrLDAPUserNotFound = errors.New("LDAP user not found") ) var dial = func(network, addr string) (IConnection, error) { @@ -59,7 +57,7 @@ var dial = func(network, addr string) (IConnection, error) { // New creates the new LDAP auth func New(config *ServerConfig) IServer { return &Server{ - config: config, + Config: config, log: log.New("ldap"), } } @@ -68,9 +66,9 @@ func New(config *ServerConfig) IServer { func (server *Server) Dial() error { var err error var certPool *x509.CertPool - if server.config.RootCACert != "" { + if server.Config.RootCACert != "" { certPool = x509.NewCertPool() - for _, caCertFile := range strings.Split(server.config.RootCACert, " ") { + for _, caCertFile := range strings.Split(server.Config.RootCACert, " ") { pem, err := ioutil.ReadFile(caCertFile) if err != nil { return err @@ -81,35 +79,35 @@ func (server *Server) Dial() error { } } var clientCert tls.Certificate - if server.config.ClientCert != "" && server.config.ClientKey != "" { - clientCert, err = tls.LoadX509KeyPair(server.config.ClientCert, server.config.ClientKey) + if server.Config.ClientCert != "" && server.Config.ClientKey != "" { + clientCert, err = tls.LoadX509KeyPair(server.Config.ClientCert, server.Config.ClientKey) if err != nil { return err } } - for _, host := range strings.Split(server.config.Host, " ") { - address := fmt.Sprintf("%s:%d", host, server.config.Port) - if server.config.UseSSL { + for _, host := range strings.Split(server.Config.Host, " ") { + address := fmt.Sprintf("%s:%d", host, server.Config.Port) + if server.Config.UseSSL { tlsCfg := &tls.Config{ - InsecureSkipVerify: server.config.SkipVerifySSL, + InsecureSkipVerify: server.Config.SkipVerifySSL, ServerName: host, RootCAs: certPool, } if len(clientCert.Certificate) > 0 { tlsCfg.Certificates = append(tlsCfg.Certificates, clientCert) } - if server.config.StartTLS { - server.connection, err = dial("tcp", address) + if server.Config.StartTLS { + server.Connection, err = dial("tcp", address) if err == nil { - if err = server.connection.StartTLS(tlsCfg); err == nil { + if err = server.Connection.StartTLS(tlsCfg); err == nil { return nil } } } else { - server.connection, err = ldap.DialTLS("tcp", address, tlsCfg) + server.Connection, err = ldap.DialTLS("tcp", address, tlsCfg) } } else { - server.connection, err = dial("tcp", address) + server.Connection, err = dial("tcp", address) } if err == nil { @@ -121,16 +119,16 @@ func (server *Server) Dial() error { // Close closes the LDAP connection func (server *Server) Close() { - server.connection.Close() + server.Connection.Close() } -// Log in user by searching and serializing it +// Login user by searching and serializing it func (server *Server) Login(query *models.LoginUserQuery) ( *models.ExternalUserInfo, error, ) { // Perform initial authentication - err := server.initialBind(query.Username, query.Password) + err := server.InitialBind(query.Username, query.Password) if err != nil { return nil, err } @@ -160,56 +158,6 @@ func (server *Server) Login(query *models.LoginUserQuery) ( return user, nil } -// Add adds stuff to LDAP -func (server *Server) Add(dn string, values map[string][]string) error { - err := server.initialBind( - server.config.BindDN, - server.config.BindPassword, - ) - if err != nil { - return err - } - - attributes := make([]ldap.Attribute, 0) - for key, value := range values { - attributes = append(attributes, ldap.Attribute{ - Type: key, - Vals: value, - }) - } - - request := &ldap.AddRequest{ - DN: dn, - Attributes: attributes, - } - - err = server.connection.Add(request) - if err != nil { - return err - } - - return nil -} - -// Remove removes stuff from LDAP -func (server *Server) Remove(dn string) error { - err := server.initialBind( - server.config.BindDN, - server.config.BindPassword, - ) - if err != nil { - return err - } - - request := ldap.NewDelRequest(dn, nil) - err = server.connection.Del(request) - if err != nil { - return err - } - - return nil -} - // Users gets LDAP users func (server *Server) Users(logins []string) ( []*models.ExternalUserInfo, @@ -217,10 +165,10 @@ func (server *Server) Users(logins []string) ( ) { var result *ldap.SearchResult var err error - var config = server.config + var Config = server.Config - for _, base := range config.SearchBaseDNs { - result, err = server.connection.Search( + for _, base := range Config.SearchBaseDNs { + result, err = server.Connection.Search( server.getSearchRequest(base, logins), ) if err != nil { @@ -254,7 +202,7 @@ func (server *Server) ExtractGrafanaUser(user *UserInfo) (*models.ExternalUserIn // If there are no ldap group mappings access is true // otherwise a single group must match func (server *Server) validateGrafanaUser(user *models.ExternalUserInfo) error { - if len(server.config.Groups) > 0 && len(user.OrgRoles) < 1 { + if len(server.Config.Groups) > 0 && len(user.OrgRoles) < 1 { server.log.Error( "user does not belong in any of the specified LDAP groups", "username", user.Login, @@ -301,7 +249,7 @@ func (server *Server) getSearchRequest( ) *ldap.SearchRequest { attributes := []string{} - inputs := server.config.Attr + inputs := server.Config.Attr attributes = appendIfNotEmpty( attributes, inputs.Username, @@ -314,7 +262,7 @@ func (server *Server) getSearchRequest( search := "" for _, login := range logins { query := strings.Replace( - server.config.SearchFilter, + server.Config.SearchFilter, "%s", ldap.EscapeFilter(login), -1, ) @@ -347,7 +295,7 @@ func (server *Server) buildGrafanaUser(user *UserInfo) *models.ExternalUserInfo OrgRoles: map[int64]models.RoleType{}, } - for _, group := range server.config.Groups { + for _, group := range server.Config.Groups { // only use the first match for each org if extUser.OrgRoles[group.OrgId] != "" { continue @@ -366,15 +314,15 @@ func (server *Server) buildGrafanaUser(user *UserInfo) *models.ExternalUserInfo func (server *Server) serverBind() error { bindFn := func() error { - return server.connection.Bind( - server.config.BindDN, - server.config.BindPassword, + return server.Connection.Bind( + server.Config.BindDN, + server.Config.BindPassword, ) } - if server.config.BindPassword == "" { + if server.Config.BindPassword == "" { bindFn = func() error { - return server.connection.UnauthenticatedBind(server.config.BindDN) + return server.Connection.UnauthenticatedBind(server.Config.BindDN) } } @@ -397,7 +345,7 @@ func (server *Server) secondBind( user *models.ExternalUserInfo, userPassword string, ) error { - err := server.connection.Bind(user.AuthId, userPassword) + err := server.Connection.Bind(user.AuthId, userPassword) if err != nil { server.log.Info("Second bind failed", "error", err) @@ -412,24 +360,25 @@ func (server *Server) secondBind( return nil } -func (server *Server) initialBind(username, userPassword string) error { - if server.config.BindPassword != "" || server.config.BindDN == "" { - userPassword = server.config.BindPassword +// InitialBind intiates first bind to LDAP server +func (server *Server) InitialBind(username, userPassword string) error { + if server.Config.BindPassword != "" || server.Config.BindDN == "" { + userPassword = server.Config.BindPassword server.requireSecondBind = true } - bindPath := server.config.BindDN + bindPath := server.Config.BindDN if strings.Contains(bindPath, "%s") { - bindPath = fmt.Sprintf(server.config.BindDN, username) + bindPath = fmt.Sprintf(server.Config.BindDN, username) } bindFn := func() error { - return server.connection.Bind(bindPath, userPassword) + return server.Connection.Bind(bindPath, userPassword) } if userPassword == "" { bindFn = func() error { - return server.connection.UnauthenticatedBind(bindPath) + return server.Connection.UnauthenticatedBind(bindPath) } } @@ -451,16 +400,16 @@ func (server *Server) initialBind(username, userPassword string) error { func (server *Server) requestMemberOf(searchResult *ldap.SearchResult) ([]string, error) { var memberOf []string - for _, groupSearchBase := range server.config.GroupSearchBaseDNs { + for _, groupSearchBase := range server.Config.GroupSearchBaseDNs { var filterReplace string - if server.config.GroupSearchFilterUserAttribute == "" { - filterReplace = getLDAPAttr(server.config.Attr.Username, searchResult) + if server.Config.GroupSearchFilterUserAttribute == "" { + filterReplace = getLDAPAttr(server.Config.Attr.Username, searchResult) } else { - filterReplace = getLDAPAttr(server.config.GroupSearchFilterUserAttribute, searchResult) + filterReplace = getLDAPAttr(server.Config.GroupSearchFilterUserAttribute, searchResult) } filter := strings.Replace( - server.config.GroupSearchFilter, "%s", + server.Config.GroupSearchFilter, "%s", ldap.EscapeFilter(filterReplace), -1, ) @@ -468,7 +417,7 @@ func (server *Server) requestMemberOf(searchResult *ldap.SearchResult) ([]string server.log.Info("Searching for user's groups", "filter", filter) // support old way of reading settings - groupIDAttribute := server.config.Attr.MemberOf + groupIDAttribute := server.Config.Attr.MemberOf // but prefer dn attribute if default settings are used if groupIDAttribute == "" || groupIDAttribute == "memberOf" { groupIDAttribute = "dn" @@ -482,7 +431,7 @@ func (server *Server) requestMemberOf(searchResult *ldap.SearchResult) ([]string Filter: filter, } - groupSearchResult, err := server.connection.Search(&groupSearchReq) + groupSearchResult, err := server.Connection.Search(&groupSearchReq) if err != nil { return nil, err } @@ -518,22 +467,22 @@ func (server *Server) serializeUsers( index, ), LastName: getLDAPAttrN( - server.config.Attr.Surname, + server.Config.Attr.Surname, users, index, ), FirstName: getLDAPAttrN( - server.config.Attr.Name, + server.Config.Attr.Name, users, index, ), Username: getLDAPAttrN( - server.config.Attr.Username, + server.Config.Attr.Username, users, index, ), Email: getLDAPAttrN( - server.config.Attr.Email, + server.Config.Attr.Email, users, index, ), @@ -553,8 +502,8 @@ func (server *Server) serializeUsers( func (server *Server) getMemberOf(search *ldap.SearchResult) ( []string, error, ) { - if server.config.GroupSearchFilter == "" { - memberOf := getLDAPAttrArray(server.config.Attr.MemberOf, search) + if server.Config.GroupSearchFilter == "" { + memberOf := getLDAPAttrArray(server.Config.Attr.MemberOf, search) return memberOf, nil } diff --git a/pkg/services/ldap/ldap_helpers_test.go b/pkg/services/ldap/ldap_helpers_test.go index 3f25633460c6..48e6bce8b5ba 100644 --- a/pkg/services/ldap/ldap_helpers_test.go +++ b/pkg/services/ldap/ldap_helpers_test.go @@ -13,7 +13,7 @@ func TestLDAPHelpers(t *testing.T) { Convey("serializeUsers()", t, func() { Convey("simple case", func() { server := &Server{ - config: &ServerConfig{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -22,7 +22,7 @@ func TestLDAPHelpers(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: &mockConnection{}, + Connection: &MockConnection{}, log: log.New("test-logger"), } @@ -46,7 +46,7 @@ func TestLDAPHelpers(t *testing.T) { Convey("without lastname", func() { server := &Server{ - config: &ServerConfig{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -55,7 +55,7 @@ func TestLDAPHelpers(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: &mockConnection{}, + Connection: &MockConnection{}, log: log.New("test-logger"), } @@ -75,74 +75,9 @@ func TestLDAPHelpers(t *testing.T) { }) }) - Convey("initialBind", t, func() { - Convey("Given bind dn and password configured", func() { - connection := &mockConnection{} - var actualUsername, actualPassword string - connection.bindProvider = func(username, password string) error { - actualUsername = username - actualPassword = password - return nil - } - server := &Server{ - connection: connection, - config: &ServerConfig{ - BindDN: "cn=%s,o=users,dc=grafana,dc=org", - BindPassword: "bindpwd", - }, - } - err := server.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(server.requireSecondBind, ShouldBeTrue) - So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") - So(actualPassword, ShouldEqual, "bindpwd") - }) - - Convey("Given bind dn configured", func() { - connection := &mockConnection{} - var actualUsername, actualPassword string - connection.bindProvider = func(username, password string) error { - actualUsername = username - actualPassword = password - return nil - } - server := &Server{ - connection: connection, - config: &ServerConfig{ - BindDN: "cn=%s,o=users,dc=grafana,dc=org", - }, - } - err := server.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(server.requireSecondBind, ShouldBeFalse) - So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") - So(actualPassword, ShouldEqual, "pwd") - }) - - Convey("Given empty bind dn and password", func() { - connection := &mockConnection{} - unauthenticatedBindWasCalled := false - var actualUsername string - connection.unauthenticatedBindProvider = func(username string) error { - unauthenticatedBindWasCalled = true - actualUsername = username - return nil - } - server := &Server{ - connection: connection, - config: &ServerConfig{}, - } - err := server.initialBind("user", "pwd") - So(err, ShouldBeNil) - So(server.requireSecondBind, ShouldBeTrue) - So(unauthenticatedBindWasCalled, ShouldBeTrue) - So(actualUsername, ShouldBeEmpty) - }) - }) - Convey("serverBind()", t, func() { Convey("Given bind dn and password configured", func() { - connection := &mockConnection{} + connection := &MockConnection{} var actualUsername, actualPassword string connection.bindProvider = func(username, password string) error { actualUsername = username @@ -150,8 +85,8 @@ func TestLDAPHelpers(t *testing.T) { return nil } server := &Server{ - connection: connection, - config: &ServerConfig{ + Connection: connection, + Config: &ServerConfig{ BindDN: "o=users,dc=grafana,dc=org", BindPassword: "bindpwd", }, @@ -163,7 +98,7 @@ func TestLDAPHelpers(t *testing.T) { }) Convey("Given bind dn configured", func() { - connection := &mockConnection{} + connection := &MockConnection{} unauthenticatedBindWasCalled := false var actualUsername string connection.unauthenticatedBindProvider = func(username string) error { @@ -172,8 +107,8 @@ func TestLDAPHelpers(t *testing.T) { return nil } server := &Server{ - connection: connection, - config: &ServerConfig{ + Connection: connection, + Config: &ServerConfig{ BindDN: "o=users,dc=grafana,dc=org", }, } @@ -184,7 +119,7 @@ func TestLDAPHelpers(t *testing.T) { }) Convey("Given empty bind dn and password", func() { - connection := &mockConnection{} + connection := &MockConnection{} unauthenticatedBindWasCalled := false var actualUsername string connection.unauthenticatedBindProvider = func(username string) error { @@ -193,8 +128,8 @@ func TestLDAPHelpers(t *testing.T) { return nil } server := &Server{ - connection: connection, - config: &ServerConfig{}, + Connection: connection, + Config: &ServerConfig{}, } err := server.serverBind() So(err, ShouldBeNil) diff --git a/pkg/services/ldap/ldap_login_test.go b/pkg/services/ldap/ldap_login_test.go index 5bd0edc79cb1..573a9a560e84 100644 --- a/pkg/services/ldap/ldap_login_test.go +++ b/pkg/services/ldap/ldap_login_test.go @@ -13,12 +13,12 @@ import ( func TestLDAPLogin(t *testing.T) { Convey("Login()", t, func() { - authScenario("When user is log in and updated", func(sc *scenarioContext) { + serverScenario("When user is log in and updated", func(sc *scenarioContext) { // arrange - mockConnection := &mockConnection{} + mockConnection := &MockConnection{} - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Host: "", RootCACert: "", Groups: []*GroupToOrgRole{ @@ -33,7 +33,7 @@ func TestLDAPLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: mockConnection, + Connection: mockConnection, log: log.New("test-logger"), } @@ -61,7 +61,7 @@ func TestLDAPLogin(t *testing.T) { sc.userOrgsQueryReturns([]*models.UserOrgDTO{}) // act - extUser, _ := auth.Login(query) + extUser, _ := server.Login(query) userInfo, err := user.Upsert(&user.UpsertArgs{ SignupAllowed: true, ExternalUser: extUser, @@ -73,7 +73,7 @@ func TestLDAPLogin(t *testing.T) { So(err, ShouldBeNil) // User should be searched in ldap - So(mockConnection.searchCalled, ShouldBeTrue) + So(mockConnection.SearchCalled, ShouldBeTrue) // Info should be updated (email differs) So(userInfo.Email, ShouldEqual, "roel@test.com") @@ -82,8 +82,8 @@ func TestLDAPLogin(t *testing.T) { So(sc.addOrgUserCmd.Role, ShouldEqual, "Admin") }) - authScenario("When login with invalid credentials", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When login with invalid credentials", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{} result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} connection.setSearchResult(&result) @@ -93,8 +93,8 @@ func TestLDAPLogin(t *testing.T) { ResultCode: 49, } } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -102,19 +102,19 @@ func TestLDAPLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - _, err := auth.Login(scenario.loginUserQuery) + _, err := server.Login(scenario.loginUserQuery) Convey("it should return invalid credentials error", func() { So(err, ShouldEqual, ErrInvalidCredentials) }) }) - authScenario("When login with valid credentials", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When login with valid credentials", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{ DN: "dn", Attributes: []*ldap.EntryAttribute{ {Name: "username", Values: []string{"markelog"}}, @@ -130,8 +130,8 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -139,18 +139,18 @@ func TestLDAPLogin(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - resp, err := auth.Login(scenario.loginUserQuery) + resp, err := server.Login(scenario.loginUserQuery) So(err, ShouldBeNil) So(resp.Login, ShouldEqual, "markelog") }) - authScenario("When user not found in LDAP, but exist in Grafana", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When user not found in LDAP, but exist in Grafana", func(scenario *scenarioContext) { + connection := &MockConnection{} result := ldap.SearchResult{Entries: []*ldap.Entry{}} connection.setSearchResult(&result) @@ -160,15 +160,15 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - _, err := auth.Login(scenario.loginUserQuery) + _, err := server.Login(scenario.loginUserQuery) Convey("it should disable user", func() { So(scenario.disableExternalUserCalled, ShouldBeTrue) @@ -181,8 +181,8 @@ func TestLDAPLogin(t *testing.T) { }) }) - authScenario("When user not found in LDAP, and disabled in Grafana already", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When user not found in LDAP, and disabled in Grafana already", func(scenario *scenarioContext) { + connection := &MockConnection{} result := ldap.SearchResult{Entries: []*ldap.Entry{}} connection.setSearchResult(&result) @@ -192,15 +192,15 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - _, err := auth.Login(scenario.loginUserQuery) + _, err := server.Login(scenario.loginUserQuery) Convey("it should't call disable function", func() { So(scenario.disableExternalUserCalled, ShouldBeFalse) @@ -211,8 +211,8 @@ func TestLDAPLogin(t *testing.T) { }) }) - authScenario("When user found in LDAP, and disabled in Grafana", func(scenario *scenarioContext) { - connection := &mockConnection{} + serverScenario("When user found in LDAP, and disabled in Grafana", func(scenario *scenarioContext) { + connection := &MockConnection{} entry := ldap.Entry{} result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} connection.setSearchResult(&result) @@ -221,15 +221,15 @@ func TestLDAPLogin(t *testing.T) { connection.bindProvider = func(username, password string) error { return nil } - auth := &Server{ - config: &ServerConfig{ + server := &Server{ + Config: &ServerConfig{ SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: connection, + Connection: connection, log: log.New("test-logger"), } - extUser, _ := auth.Login(scenario.loginUserQuery) + extUser, _ := server.Login(scenario.loginUserQuery) _, err := user.Upsert(&user.UpsertArgs{ SignupAllowed: true, ExternalUser: extUser, diff --git a/pkg/services/ldap/ldap_test.go b/pkg/services/ldap/ldap_test.go index 266fe22a4fc7..98b15ec44576 100644 --- a/pkg/services/ldap/ldap_test.go +++ b/pkg/services/ldap/ldap_test.go @@ -9,114 +9,10 @@ import ( "github.com/grafana/grafana/pkg/infra/log" ) -func TestAuth(t *testing.T) { - Convey("Add()", t, func() { - connection := &mockConnection{} - - auth := &Server{ - config: &ServerConfig{ - SearchBaseDNs: []string{"BaseDNHere"}, - }, - connection: connection, - log: log.New("test-logger"), - } - - Convey("Adds user", func() { - err := auth.Add( - "cn=ldap-tuz,ou=users,dc=grafana,dc=org", - map[string][]string{ - "mail": {"ldap-viewer@grafana.com"}, - "userPassword": {"grafana"}, - "objectClass": { - "person", - "top", - "inetOrgPerson", - "organizationalPerson", - }, - "sn": {"ldap-tuz"}, - "cn": {"ldap-tuz"}, - }, - ) - - hasMail := false - hasUserPassword := false - hasObjectClass := false - hasSN := false - hasCN := false - - So(err, ShouldBeNil) - So(connection.addParams.Controls, ShouldBeNil) - So(connection.addCalled, ShouldBeTrue) - So( - connection.addParams.DN, - ShouldEqual, - "cn=ldap-tuz,ou=users,dc=grafana,dc=org", - ) - - attrs := connection.addParams.Attributes - for _, value := range attrs { - if value.Type == "mail" { - So(value.Vals, ShouldContain, "ldap-viewer@grafana.com") - hasMail = true - } - - if value.Type == "userPassword" { - hasUserPassword = true - So(value.Vals, ShouldContain, "grafana") - } - - if value.Type == "objectClass" { - hasObjectClass = true - So(value.Vals, ShouldContain, "person") - So(value.Vals, ShouldContain, "top") - So(value.Vals, ShouldContain, "inetOrgPerson") - So(value.Vals, ShouldContain, "organizationalPerson") - } - - if value.Type == "sn" { - hasSN = true - So(value.Vals, ShouldContain, "ldap-tuz") - } - - if value.Type == "cn" { - hasCN = true - So(value.Vals, ShouldContain, "ldap-tuz") - } - } - - So(hasMail, ShouldBeTrue) - So(hasUserPassword, ShouldBeTrue) - So(hasObjectClass, ShouldBeTrue) - So(hasSN, ShouldBeTrue) - So(hasCN, ShouldBeTrue) - }) - }) - - Convey("Remove()", t, func() { - connection := &mockConnection{} - - auth := &Server{ - config: &ServerConfig{ - SearchBaseDNs: []string{"BaseDNHere"}, - }, - connection: connection, - log: log.New("test-logger"), - } - - Convey("Removes the user", func() { - dn := "cn=ldap-tuz,ou=users,dc=grafana,dc=org" - err := auth.Remove(dn) - - So(err, ShouldBeNil) - So(connection.delCalled, ShouldBeTrue) - So(connection.delParams.Controls, ShouldBeNil) - So(connection.delParams.DN, ShouldEqual, dn) - }) - }) - +func TestPublicAPI(t *testing.T) { Convey("Users()", t, func() { Convey("find one user", func() { - mockConnection := &mockConnection{} + MockConnection := &MockConnection{} entry := ldap.Entry{ DN: "dn", Attributes: []*ldap.EntryAttribute{ {Name: "username", Values: []string{"roelgerrits"}}, @@ -126,11 +22,11 @@ func TestAuth(t *testing.T) { {Name: "memberof", Values: []string{"admins"}}, }} result := ldap.SearchResult{Entries: []*ldap.Entry{&entry}} - mockConnection.setSearchResult(&result) + MockConnection.setSearchResult(&result) // Set up attribute map without surname and email server := &Server{ - config: &ServerConfig{ + Config: &ServerConfig{ Attr: AttributeMap{ Username: "username", Name: "name", @@ -138,7 +34,7 @@ func TestAuth(t *testing.T) { }, SearchBaseDNs: []string{"BaseDNHere"}, }, - connection: mockConnection, + Connection: MockConnection, log: log.New("test-logger"), } @@ -148,10 +44,75 @@ func TestAuth(t *testing.T) { So(searchResult, ShouldNotBeNil) // User should be searched in ldap - So(mockConnection.searchCalled, ShouldBeTrue) + So(MockConnection.SearchCalled, ShouldBeTrue) // No empty attributes should be added to the search request - So(len(mockConnection.searchAttributes), ShouldEqual, 3) + So(len(MockConnection.SearchAttributes), ShouldEqual, 3) + }) + }) + + Convey("InitialBind", t, func() { + Convey("Given bind dn and password configured", func() { + connection := &MockConnection{} + var actualUsername, actualPassword string + connection.bindProvider = func(username, password string) error { + actualUsername = username + actualPassword = password + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "cn=%s,o=users,dc=grafana,dc=org", + BindPassword: "bindpwd", + }, + } + err := server.InitialBind("user", "pwd") + So(err, ShouldBeNil) + So(server.requireSecondBind, ShouldBeTrue) + So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") + So(actualPassword, ShouldEqual, "bindpwd") + }) + + Convey("Given bind dn configured", func() { + connection := &MockConnection{} + var actualUsername, actualPassword string + connection.bindProvider = func(username, password string) error { + actualUsername = username + actualPassword = password + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{ + BindDN: "cn=%s,o=users,dc=grafana,dc=org", + }, + } + err := server.InitialBind("user", "pwd") + So(err, ShouldBeNil) + So(server.requireSecondBind, ShouldBeFalse) + So(actualUsername, ShouldEqual, "cn=user,o=users,dc=grafana,dc=org") + So(actualPassword, ShouldEqual, "pwd") + }) + + Convey("Given empty bind dn and password", func() { + connection := &MockConnection{} + unauthenticatedBindWasCalled := false + var actualUsername string + connection.unauthenticatedBindProvider = func(username string) error { + unauthenticatedBindWasCalled = true + actualUsername = username + return nil + } + server := &Server{ + Connection: connection, + Config: &ServerConfig{}, + } + err := server.InitialBind("user", "pwd") + So(err, ShouldBeNil) + So(server.requireSecondBind, ShouldBeTrue) + So(unauthenticatedBindWasCalled, ShouldBeTrue) + So(actualUsername, ShouldBeEmpty) }) }) } diff --git a/pkg/services/ldap/test.go b/pkg/services/ldap/test.go index daa6a3216641..6319cddd2807 100644 --- a/pkg/services/ldap/test.go +++ b/pkg/services/ldap/test.go @@ -12,22 +12,24 @@ import ( "github.com/grafana/grafana/pkg/services/login" ) -type mockConnection struct { - searchResult *ldap.SearchResult - searchCalled bool - searchAttributes []string +// MockConnection struct for testing +type MockConnection struct { + SearchResult *ldap.SearchResult + SearchCalled bool + SearchAttributes []string - addParams *ldap.AddRequest - addCalled bool + AddParams *ldap.AddRequest + AddCalled bool - delParams *ldap.DelRequest - delCalled bool + DelParams *ldap.DelRequest + DelCalled bool bindProvider func(username, password string) error unauthenticatedBindProvider func(username string) error } -func (c *mockConnection) Bind(username, password string) error { +// Bind mocks Bind connection function +func (c *MockConnection) Bind(username, password string) error { if c.bindProvider != nil { return c.bindProvider(username, password) } @@ -35,7 +37,8 @@ func (c *mockConnection) Bind(username, password string) error { return nil } -func (c *mockConnection) UnauthenticatedBind(username string) error { +// UnauthenticatedBind mocks UnauthenticatedBind connection function +func (c *MockConnection) UnauthenticatedBind(username string) error { if c.unauthenticatedBindProvider != nil { return c.unauthenticatedBindProvider(username) } @@ -43,35 +46,40 @@ func (c *mockConnection) UnauthenticatedBind(username string) error { return nil } -func (c *mockConnection) Close() {} +// Close mocks Close connection function +func (c *MockConnection) Close() {} -func (c *mockConnection) setSearchResult(result *ldap.SearchResult) { - c.searchResult = result +func (c *MockConnection) setSearchResult(result *ldap.SearchResult) { + c.SearchResult = result } -func (c *mockConnection) Search(sr *ldap.SearchRequest) (*ldap.SearchResult, error) { - c.searchCalled = true - c.searchAttributes = sr.Attributes - return c.searchResult, nil +// Search mocks Search connection function +func (c *MockConnection) Search(sr *ldap.SearchRequest) (*ldap.SearchResult, error) { + c.SearchCalled = true + c.SearchAttributes = sr.Attributes + return c.SearchResult, nil } -func (c *mockConnection) Add(request *ldap.AddRequest) error { - c.addCalled = true - c.addParams = request +// Add mocks Add connection function +func (c *MockConnection) Add(request *ldap.AddRequest) error { + c.AddCalled = true + c.AddParams = request return nil } -func (c *mockConnection) Del(request *ldap.DelRequest) error { - c.delCalled = true - c.delParams = request +// Del mocks Del connection function +func (c *MockConnection) Del(request *ldap.DelRequest) error { + c.DelCalled = true + c.DelParams = request return nil } -func (c *mockConnection) StartTLS(*tls.Config) error { +// StartTLS mocks StartTLS connection function +func (c *MockConnection) StartTLS(*tls.Config) error { return nil } -func authScenario(desc string, fn scenarioFunc) { +func serverScenario(desc string, fn scenarioFunc) { Convey(desc, func() { defer bus.ClearBusHandlers() diff --git a/pkg/services/multildap/multildap.go b/pkg/services/multildap/multildap.go index 1b309c646e17..6c2baf1671af 100644 --- a/pkg/services/multildap/multildap.go +++ b/pkg/services/multildap/multildap.go @@ -35,9 +35,6 @@ type IMultiLDAP interface { User(login string) ( *models.ExternalUserInfo, error, ) - - Add(dn string, values map[string][]string) error - Remove(dn string) error } // MultiLDAP is basic struct of LDAP authorization @@ -52,55 +49,6 @@ func New(configs []*ldap.ServerConfig) IMultiLDAP { } } -// Add adds user to the *first* defined LDAP -func (multiples *MultiLDAP) Add( - dn string, - values map[string][]string, -) error { - if len(multiples.configs) == 0 { - return ErrNoLDAPServers - } - - config := multiples.configs[0] - ldap := ldap.New(config) - - if err := ldap.Dial(); err != nil { - return err - } - - defer ldap.Close() - - err := ldap.Add(dn, values) - if err != nil { - return err - } - - return nil -} - -// Remove removes user from the *first* defined LDAP -func (multiples *MultiLDAP) Remove(dn string) error { - if len(multiples.configs) == 0 { - return ErrNoLDAPServers - } - - config := multiples.configs[0] - ldap := ldap.New(config) - - if err := ldap.Dial(); err != nil { - return err - } - - defer ldap.Close() - - err := ldap.Remove(dn) - if err != nil { - return err - } - - return nil -} - // Login tries to log in the user in multiples LDAP func (multiples *MultiLDAP) Login(query *models.LoginUserQuery) ( *models.ExternalUserInfo, error, From 1a2841e24418df89b39ffaa308f158197c13de4f Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 27 May 2019 09:55:58 +0200 Subject: [PATCH 06/49] devenv: metricbeat and kibana for elasticsearch 7 block (#17262) --- devenv/datasources.yaml | 11 ++++++ .../blocks/elastic7/docker-compose.yaml | 16 ++++++++ devenv/docker/blocks/elastic7/metricbeat.yml | 38 +++++++++++++++++++ 3 files changed, 65 insertions(+) create mode 100644 devenv/docker/blocks/elastic7/metricbeat.yml diff --git a/devenv/datasources.yaml b/devenv/datasources.yaml index d5664d7136c0..33dde611bdcd 100644 --- a/devenv/datasources.yaml +++ b/devenv/datasources.yaml @@ -143,6 +143,17 @@ datasources: timeField: "@timestamp" esVersion: 70 + - name: gdev-elasticsearch-v7-metricbeat + type: elasticsearch + access: proxy + database: "[metricbeat-]YYYY.MM.DD" + url: http://localhost:12200 + jsonData: + interval: Daily + timeField: "@timestamp" + esVersion: 70 + timeInterval: "10s" + - name: gdev-mysql type: mysql url: localhost:3306 diff --git a/devenv/docker/blocks/elastic7/docker-compose.yaml b/devenv/docker/blocks/elastic7/docker-compose.yaml index 3ef922c890c3..45e2836f870e 100644 --- a/devenv/docker/blocks/elastic7/docker-compose.yaml +++ b/devenv/docker/blocks/elastic7/docker-compose.yaml @@ -21,3 +21,19 @@ - ./docker/blocks/elastic7/filebeat.yml:/usr/share/filebeat/filebeat.yml:ro - /var/log:/var/log:ro - ../data/log:/var/log/grafana:ro + + metricbeat7: + image: docker.elastic.co/beats/metricbeat-oss:7.0.0 + network_mode: host + command: metricbeat -e -strict.perms=false + user: root + volumes: + - ./docker/blocks/elastic7/metricbeat.yml:/usr/share/metricbeat/metricbeat.yml:ro + - /var/run/docker.sock:/var/run/docker.sock:ro + + kibana7: + image: docker.elastic.co/kibana/kibana-oss:7.0.0 + ports: + - "5601:5601" + environment: + ELASTICSEARCH_HOSTS: http://elasticsearch7:9200 diff --git a/devenv/docker/blocks/elastic7/metricbeat.yml b/devenv/docker/blocks/elastic7/metricbeat.yml new file mode 100644 index 000000000000..4788c0cdd9a5 --- /dev/null +++ b/devenv/docker/blocks/elastic7/metricbeat.yml @@ -0,0 +1,38 @@ +metricbeat.config: + modules: + path: ${path.config}/modules.d/*.yml + # Reload module configs as they change: + reload.enabled: false + +metricbeat.autodiscover: + providers: + - type: docker + hints.enabled: true + +metricbeat.modules: +- module: docker + metricsets: + - "container" + - "cpu" + - "diskio" + - "healthcheck" + - "info" + #- "image" + - "memory" + - "network" + hosts: ["unix:///var/run/docker.sock"] + period: 10s + enabled: true + +processors: + - add_cloud_metadata: ~ + +output.elasticsearch: + hosts: ["localhost:12200"] + index: "metricbeat-%{+yyyy.MM.dd}" + +setup.template.name: "metricbeat" +setup.template.pattern: "metricbeat-*" +setup.template.settings: + index.number_of_shards: 1 + index.number_of_replicas: 1 \ No newline at end of file From b9181df21285dff20f25397ede06198f0d024d8d Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 27 May 2019 10:38:17 +0200 Subject: [PATCH 07/49] Auth Proxy: Log any error in middleware (#17275) Fixes so that errors happening in auth proxy middleware is logged. Ref #17247 --- pkg/middleware/auth_proxy.go | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/pkg/middleware/auth_proxy.go b/pkg/middleware/auth_proxy.go index 890fd5e4f24b..9ec5852b73dc 100644 --- a/pkg/middleware/auth_proxy.go +++ b/pkg/middleware/auth_proxy.go @@ -31,6 +31,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Check if allowed to continue with this IP if result, err := auth.IsAllowedIP(); !result { + ctx.Logger.Error("auth proxy: failed to check whitelisted ip addresses", "message", err.Error(), "error", err.DetailsError) ctx.Handle(407, err.Error(), err.DetailsError) return true } @@ -38,6 +39,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Try to log in user from various providers id, err := auth.Login() if err != nil { + ctx.Logger.Error("auth proxy: failed to login", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } @@ -45,6 +47,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Get full user info user, err := auth.GetSignedUser(id) if err != nil { + ctx.Logger.Error("auth proxy: failed to get signed in user", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } @@ -55,6 +58,7 @@ func initContextWithAuthProxy(store *remotecache.RemoteCache, ctx *m.ReqContext, // Remember user data it in cache if err := auth.Remember(id); err != nil { + ctx.Logger.Error("auth proxy: failed to store user in cache", "message", err.Error(), "error", err.DetailsError) ctx.Handle(500, err.Error(), err.DetailsError) return true } From 151b24b95fb52a777533c9fd76db48ae8967a74e Mon Sep 17 00:00:00 2001 From: Andrej Ocenas Date: Mon, 27 May 2019 10:47:21 +0200 Subject: [PATCH 08/49] CLI: Add command to migrate all datasources to use encrypted password fields (#17118) closes: #17107 --- pkg/cmd/grafana-cli/commands/commands.go | 23 +++- .../encrypt_datasource_passwords.go | 126 ++++++++++++++++++ .../encrypt_datasource_passwords_test.go | 67 ++++++++++ .../grafana-cli/commands/install_command.go | 7 +- .../commands/listremote_command.go | 3 +- .../commands/listversions_command.go | 5 +- pkg/cmd/grafana-cli/commands/ls_command.go | 3 +- .../grafana-cli/commands/remove_command.go | 5 +- .../commands/reset_password_command.go | 4 +- .../commands/upgrade_all_command.go | 3 +- .../grafana-cli/commands/upgrade_command.go | 3 +- .../{commands => utils}/command_line.go | 16 +-- pkg/util/strings.go | 17 +++ pkg/util/strings_test.go | 9 ++ 14 files changed, 266 insertions(+), 25 deletions(-) create mode 100644 pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go create mode 100644 pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go rename pkg/cmd/grafana-cli/{commands => utils}/command_line.go (64%) diff --git a/pkg/cmd/grafana-cli/commands/commands.go b/pkg/cmd/grafana-cli/commands/commands.go index d5add2b71684..ebaee5573482 100644 --- a/pkg/cmd/grafana-cli/commands/commands.go +++ b/pkg/cmd/grafana-cli/commands/commands.go @@ -7,14 +7,16 @@ import ( "github.com/codegangsta/cli" "github.com/fatih/color" "github.com/grafana/grafana/pkg/bus" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/commands/datamigrations" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/setting" ) -func runDbCommand(command func(commandLine CommandLine) error) func(context *cli.Context) { +func runDbCommand(command func(commandLine utils.CommandLine, sqlStore *sqlstore.SqlStore) error) func(context *cli.Context) { return func(context *cli.Context) { - cmd := &contextCommandLine{context} + cmd := &utils.ContextCommandLine{Context: context} cfg := setting.NewCfg() cfg.Load(&setting.CommandLineArgs{ @@ -28,7 +30,7 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli engine.Bus = bus.GetBus() engine.Init() - if err := command(cmd); err != nil { + if err := command(cmd, engine); err != nil { logger.Errorf("\n%s: ", color.RedString("Error")) logger.Errorf("%s\n\n", err) @@ -40,10 +42,10 @@ func runDbCommand(command func(commandLine CommandLine) error) func(context *cli } } -func runPluginCommand(command func(commandLine CommandLine) error) func(context *cli.Context) { +func runPluginCommand(command func(commandLine utils.CommandLine) error) func(context *cli.Context) { return func(context *cli.Context) { - cmd := &contextCommandLine{context} + cmd := &utils.ContextCommandLine{Context: context} if err := command(cmd); err != nil { logger.Errorf("\n%s: ", color.RedString("Error")) logger.Errorf("%s %s\n\n", color.RedString("✗"), err) @@ -107,6 +109,17 @@ var adminCommands = []cli.Command{ }, }, }, + { + Name: "data-migration", + Usage: "Runs a script that migrates or cleanups data in your db", + Subcommands: []cli.Command{ + { + Name: "encrypt-datasource-passwords", + Usage: "Migrates passwords from unsecured fields to secure_json_data field. Return ok unless there is an error. Safe to execute multiple times.", + Action: runDbCommand(datamigrations.EncryptDatasourcePaswords), + }, + }, + }, } var Commands = []cli.Command{ diff --git a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go new file mode 100644 index 000000000000..e55fa2d70b88 --- /dev/null +++ b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords.go @@ -0,0 +1,126 @@ +package datamigrations + +import ( + "context" + "encoding/json" + + "github.com/fatih/color" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/grafana/grafana/pkg/setting" + "github.com/grafana/grafana/pkg/util" + "github.com/grafana/grafana/pkg/util/errutil" +) + +var ( + datasourceTypes = []string{ + "mysql", + "influxdb", + "elasticsearch", + "graphite", + "prometheus", + "opentsdb", + } +) + +// EncryptDatasourcePaswords migrates un-encrypted secrets on datasources +// to the secureJson Column. +func EncryptDatasourcePaswords(c utils.CommandLine, sqlStore *sqlstore.SqlStore) error { + return sqlStore.WithDbSession(context.Background(), func(session *sqlstore.DBSession) error { + passwordsUpdated, err := migrateColumn(session, "password") + if err != nil { + return err + } + + basicAuthUpdated, err := migrateColumn(session, "basic_auth_password") + if err != nil { + return err + } + + logger.Info("\n") + if passwordsUpdated > 0 { + logger.Infof("%s Encrypted password field for %d datasources \n", color.GreenString("✔"), passwordsUpdated) + } + + if basicAuthUpdated > 0 { + logger.Infof("%s Encrypted basic_auth_password field for %d datasources \n", color.GreenString("✔"), basicAuthUpdated) + } + + if passwordsUpdated == 0 && basicAuthUpdated == 0 { + logger.Infof("%s All datasources secrets are allready encrypted\n", color.GreenString("✔")) + } + + logger.Info("\n") + + logger.Warn("Warning: Datasource provisioning files need to be manually changed to prevent overwriting of " + + "the data during provisioning. See https://grafana.com/docs/installation/upgrading/#upgrading-to-v6-2 for " + + "details") + return nil + }) +} + +func migrateColumn(session *sqlstore.DBSession, column string) (int, error) { + var rows []map[string]string + + session.Cols("id", column, "secure_json_data") + session.Table("data_source") + session.In("type", datasourceTypes) + session.Where(column + " IS NOT NULL AND " + column + " != ''") + err := session.Find(&rows) + + if err != nil { + return 0, errutil.Wrapf(err, "failed to select column: %s", column) + } + + rowsUpdated, err := updateRows(session, rows, column) + return rowsUpdated, errutil.Wrapf(err, "failed to update column: %s", column) +} + +func updateRows(session *sqlstore.DBSession, rows []map[string]string, passwordFieldName string) (int, error) { + var rowsUpdated int + + for _, row := range rows { + newSecureJSONData, err := getUpdatedSecureJSONData(row, passwordFieldName) + if err != nil { + return 0, err + } + + data, err := json.Marshal(newSecureJSONData) + if err != nil { + return 0, errutil.Wrap("marshaling newSecureJsonData failed", err) + } + + newRow := map[string]interface{}{"secure_json_data": data, passwordFieldName: ""} + session.Table("data_source") + session.Where("id = ?", row["id"]) + // Setting both columns while having value only for secure_json_data should clear the [passwordFieldName] column + session.Cols("secure_json_data", passwordFieldName) + + _, err = session.Update(newRow) + if err != nil { + return 0, err + } + + rowsUpdated++ + } + return rowsUpdated, nil +} + +func getUpdatedSecureJSONData(row map[string]string, passwordFieldName string) (map[string]interface{}, error) { + encryptedPassword, err := util.Encrypt([]byte(row[passwordFieldName]), setting.SecretKey) + if err != nil { + return nil, err + } + + var secureJSONData map[string]interface{} + + if err := json.Unmarshal([]byte(row["secure_json_data"]), &secureJSONData); err != nil { + return nil, err + } + + jsonFieldName := util.ToCamelCase(passwordFieldName) + secureJSONData[jsonFieldName] = encryptedPassword + return secureJSONData, nil +} diff --git a/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go new file mode 100644 index 000000000000..64987423decd --- /dev/null +++ b/pkg/cmd/grafana-cli/commands/datamigrations/encrypt_datasource_passwords_test.go @@ -0,0 +1,67 @@ +package datamigrations + +import ( + "testing" + "time" + + "github.com/grafana/grafana/pkg/cmd/grafana-cli/commands/commandstest" + "github.com/grafana/grafana/pkg/components/securejsondata" + "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/sqlstore" + "github.com/stretchr/testify/assert" +) + +func TestPasswordMigrationCommand(t *testing.T) { + //setup datasources with password, basic_auth and none + sqlstore := sqlstore.InitTestDB(t) + session := sqlstore.NewSession() + defer session.Close() + + datasources := []*models.DataSource{ + {Type: "influxdb", Name: "influxdb", Password: "foobar"}, + {Type: "graphite", Name: "graphite", BasicAuthPassword: "foobar"}, + {Type: "prometheus", Name: "prometheus", SecureJsonData: securejsondata.GetEncryptedJsonData(map[string]string{})}, + } + + // set required default values + for _, ds := range datasources { + ds.Created = time.Now() + ds.Updated = time.Now() + ds.SecureJsonData = securejsondata.GetEncryptedJsonData(map[string]string{}) + } + + _, err := session.Insert(&datasources) + assert.Nil(t, err) + + //run migration + err = EncryptDatasourcePaswords(&commandstest.FakeCommandLine{}, sqlstore) + assert.Nil(t, err) + + //verify that no datasources still have password or basic_auth + var dss []*models.DataSource + err = session.SQL("select * from data_source").Find(&dss) + assert.Nil(t, err) + assert.Equal(t, len(dss), 3) + + for _, ds := range dss { + sj := ds.SecureJsonData.Decrypt() + + if ds.Name == "influxdb" { + assert.Equal(t, ds.Password, "") + v, exist := sj["password"] + assert.True(t, exist) + assert.Equal(t, v, "foobar", "expected password to be moved to securejson") + } + + if ds.Name == "graphite" { + assert.Equal(t, ds.BasicAuthPassword, "") + v, exist := sj["basicAuthPassword"] + assert.True(t, exist) + assert.Equal(t, v, "foobar", "expected basic_auth_password to be moved to securejson") + } + + if ds.Name == "prometheus" { + assert.Equal(t, len(sj), 0) + } + } +} diff --git a/pkg/cmd/grafana-cli/commands/install_command.go b/pkg/cmd/grafana-cli/commands/install_command.go index 99cef15e50e3..db3907682638 100644 --- a/pkg/cmd/grafana-cli/commands/install_command.go +++ b/pkg/cmd/grafana-cli/commands/install_command.go @@ -14,13 +14,14 @@ import ( "strings" "github.com/fatih/color" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" ) -func validateInput(c CommandLine, pluginFolder string) error { +func validateInput(c utils.CommandLine, pluginFolder string) error { arg := c.Args().First() if arg == "" { return errors.New("please specify plugin to install") @@ -46,7 +47,7 @@ func validateInput(c CommandLine, pluginFolder string) error { return nil } -func installCommand(c CommandLine) error { +func installCommand(c utils.CommandLine) error { pluginFolder := c.PluginDirectory() if err := validateInput(c, pluginFolder); err != nil { return err @@ -60,7 +61,7 @@ func installCommand(c CommandLine) error { // InstallPlugin downloads the plugin code as a zip file from the Grafana.com API // and then extracts the zip into the plugins directory. -func InstallPlugin(pluginName, version string, c CommandLine) error { +func InstallPlugin(pluginName, version string, c utils.CommandLine) error { pluginFolder := c.PluginDirectory() downloadURL := c.PluginURL() if downloadURL == "" { diff --git a/pkg/cmd/grafana-cli/commands/listremote_command.go b/pkg/cmd/grafana-cli/commands/listremote_command.go index 4798369def11..7351ee58a371 100644 --- a/pkg/cmd/grafana-cli/commands/listremote_command.go +++ b/pkg/cmd/grafana-cli/commands/listremote_command.go @@ -3,9 +3,10 @@ package commands import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func listremoteCommand(c CommandLine) error { +func listremoteCommand(c utils.CommandLine) error { plugin, err := s.ListAllPlugins(c.RepoDirectory()) if err != nil { diff --git a/pkg/cmd/grafana-cli/commands/listversions_command.go b/pkg/cmd/grafana-cli/commands/listversions_command.go index 95c536e94f0a..78d681c06a3a 100644 --- a/pkg/cmd/grafana-cli/commands/listversions_command.go +++ b/pkg/cmd/grafana-cli/commands/listversions_command.go @@ -5,9 +5,10 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func validateVersionInput(c CommandLine) error { +func validateVersionInput(c utils.CommandLine) error { arg := c.Args().First() if arg == "" { return errors.New("please specify plugin to list versions for") @@ -16,7 +17,7 @@ func validateVersionInput(c CommandLine) error { return nil } -func listversionsCommand(c CommandLine) error { +func listversionsCommand(c utils.CommandLine) error { if err := validateVersionInput(c); err != nil { return err } diff --git a/pkg/cmd/grafana-cli/commands/ls_command.go b/pkg/cmd/grafana-cli/commands/ls_command.go index 30745ce3172d..63492d732e98 100644 --- a/pkg/cmd/grafana-cli/commands/ls_command.go +++ b/pkg/cmd/grafana-cli/commands/ls_command.go @@ -8,6 +8,7 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) var ls_getPlugins func(path string) []m.InstalledPlugin = s.GetLocalPlugins @@ -31,7 +32,7 @@ var validateLsCommand = func(pluginDir string) error { return nil } -func lsCommand(c CommandLine) error { +func lsCommand(c utils.CommandLine) error { pluginDir := c.PluginDirectory() if err := validateLsCommand(pluginDir); err != nil { return err diff --git a/pkg/cmd/grafana-cli/commands/remove_command.go b/pkg/cmd/grafana-cli/commands/remove_command.go index e51929dc95cb..eb536d7b8c7a 100644 --- a/pkg/cmd/grafana-cli/commands/remove_command.go +++ b/pkg/cmd/grafana-cli/commands/remove_command.go @@ -5,12 +5,13 @@ import ( "fmt" "strings" - services "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) var removePlugin func(pluginPath, id string) error = services.RemoveInstalledPlugin -func removeCommand(c CommandLine) error { +func removeCommand(c utils.CommandLine) error { pluginPath := c.PluginDirectory() plugin := c.Args().First() diff --git a/pkg/cmd/grafana-cli/commands/reset_password_command.go b/pkg/cmd/grafana-cli/commands/reset_password_command.go index af2b8b3f89ae..4a6a4b674f2e 100644 --- a/pkg/cmd/grafana-cli/commands/reset_password_command.go +++ b/pkg/cmd/grafana-cli/commands/reset_password_command.go @@ -6,13 +6,15 @@ import ( "github.com/fatih/color" "github.com/grafana/grafana/pkg/bus" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/grafana/grafana/pkg/models" + "github.com/grafana/grafana/pkg/services/sqlstore" "github.com/grafana/grafana/pkg/util" ) const AdminUserId = 1 -func resetPasswordCommand(c CommandLine) error { +func resetPasswordCommand(c utils.CommandLine, sqlStore *sqlstore.SqlStore) error { newPassword := c.Args().First() password := models.Password(newPassword) diff --git a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go index e01df2dab602..a5aadbbb0c23 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_all_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_all_command.go @@ -4,6 +4,7 @@ import ( "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" m "github.com/grafana/grafana/pkg/cmd/grafana-cli/models" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" "github.com/hashicorp/go-version" ) @@ -27,7 +28,7 @@ func ShouldUpgrade(installed string, remote m.Plugin) bool { return false } -func upgradeAllCommand(c CommandLine) error { +func upgradeAllCommand(c utils.CommandLine) error { pluginsDir := c.PluginDirectory() localPlugins := s.GetLocalPlugins(pluginsDir) diff --git a/pkg/cmd/grafana-cli/commands/upgrade_command.go b/pkg/cmd/grafana-cli/commands/upgrade_command.go index 396371d35772..f32961ce5895 100644 --- a/pkg/cmd/grafana-cli/commands/upgrade_command.go +++ b/pkg/cmd/grafana-cli/commands/upgrade_command.go @@ -4,9 +4,10 @@ import ( "github.com/fatih/color" "github.com/grafana/grafana/pkg/cmd/grafana-cli/logger" s "github.com/grafana/grafana/pkg/cmd/grafana-cli/services" + "github.com/grafana/grafana/pkg/cmd/grafana-cli/utils" ) -func upgradeCommand(c CommandLine) error { +func upgradeCommand(c utils.CommandLine) error { pluginsDir := c.PluginDirectory() pluginName := c.Args().First() diff --git a/pkg/cmd/grafana-cli/commands/command_line.go b/pkg/cmd/grafana-cli/utils/command_line.go similarity index 64% rename from pkg/cmd/grafana-cli/commands/command_line.go rename to pkg/cmd/grafana-cli/utils/command_line.go index d487aff8aaaa..d3142d0f195e 100644 --- a/pkg/cmd/grafana-cli/commands/command_line.go +++ b/pkg/cmd/grafana-cli/utils/command_line.go @@ -1,4 +1,4 @@ -package commands +package utils import ( "github.com/codegangsta/cli" @@ -22,30 +22,30 @@ type CommandLine interface { PluginURL() string } -type contextCommandLine struct { +type ContextCommandLine struct { *cli.Context } -func (c *contextCommandLine) ShowHelp() { +func (c *ContextCommandLine) ShowHelp() { cli.ShowCommandHelp(c.Context, c.Command.Name) } -func (c *contextCommandLine) ShowVersion() { +func (c *ContextCommandLine) ShowVersion() { cli.ShowVersion(c.Context) } -func (c *contextCommandLine) Application() *cli.App { +func (c *ContextCommandLine) Application() *cli.App { return c.App } -func (c *contextCommandLine) PluginDirectory() string { +func (c *ContextCommandLine) PluginDirectory() string { return c.GlobalString("pluginsDir") } -func (c *contextCommandLine) RepoDirectory() string { +func (c *ContextCommandLine) RepoDirectory() string { return c.GlobalString("repo") } -func (c *contextCommandLine) PluginURL() string { +func (c *ContextCommandLine) PluginURL() string { return c.GlobalString("pluginUrl") } diff --git a/pkg/util/strings.go b/pkg/util/strings.go index 9eaa141edbfb..9ce5d03e126c 100644 --- a/pkg/util/strings.go +++ b/pkg/util/strings.go @@ -4,6 +4,7 @@ import ( "fmt" "math" "regexp" + "strings" "time" ) @@ -66,3 +67,19 @@ func GetAgeString(t time.Time) string { return "< 1m" } + +// ToCamelCase changes kebab case, snake case or mixed strings to camel case. See unit test for examples. +func ToCamelCase(str string) string { + var finalParts []string + parts := strings.Split(str, "_") + + for _, part := range parts { + finalParts = append(finalParts, strings.Split(part, "-")...) + } + + for index, part := range finalParts[1:] { + finalParts[index+1] = strings.Title(part) + } + + return strings.Join(finalParts, "") +} diff --git a/pkg/util/strings_test.go b/pkg/util/strings_test.go index 0cc1905baff8..4bc52ee75217 100644 --- a/pkg/util/strings_test.go +++ b/pkg/util/strings_test.go @@ -37,3 +37,12 @@ func TestDateAge(t *testing.T) { So(GetAgeString(time.Now().Add(-time.Hour*24*409)), ShouldEqual, "1y") }) } + +func TestToCamelCase(t *testing.T) { + Convey("ToCamelCase", t, func() { + So(ToCamelCase("kebab-case-string"), ShouldEqual, "kebabCaseString") + So(ToCamelCase("snake_case_string"), ShouldEqual, "snakeCaseString") + So(ToCamelCase("mixed-case_string"), ShouldEqual, "mixedCaseString") + So(ToCamelCase("alreadyCamelCase"), ShouldEqual, "alreadyCamelCase") + }) +} From 3dda812f12653479084c567cbf63fa83be2a0659 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 27 May 2019 11:48:17 +0200 Subject: [PATCH 09/49] Chore: Update jquery to 3.4.1 in grafana ui (#17295) --- packages/grafana-ui/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/grafana-ui/package.json b/packages/grafana-ui/package.json index 46b402c92f97..1e672eb46ebf 100644 --- a/packages/grafana-ui/package.json +++ b/packages/grafana-ui/package.json @@ -23,7 +23,7 @@ "@types/react-color": "2.17.0", "classnames": "2.2.6", "d3": "5.9.1", - "jquery": "3.4.0", + "jquery": "3.4.1", "lodash": "4.17.11", "moment": "2.24.0", "papaparse": "4.6.3", From 5358c5fe6b0a966dcfe0495b47b06d18155ed647 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Mon, 27 May 2019 12:13:08 +0200 Subject: [PATCH 10/49] Gauge/BarGauge: font size improvements (#17292) --- .../src/components/BarGauge/BarGauge.tsx | 24 +++++++++++++------ .../__snapshots__/BarGauge.test.tsx.snap | 3 ++- .../grafana-ui/src/components/Gauge/Gauge.tsx | 5 ++-- 3 files changed, 22 insertions(+), 10 deletions(-) diff --git a/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx b/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx index 4351b6671d6e..cb08d1b15aba 100644 --- a/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx +++ b/packages/grafana-ui/src/components/BarGauge/BarGauge.tsx @@ -11,8 +11,9 @@ import { DisplayValue, Themeable, TimeSeriesValue, Threshold, VizOrientation } f const MIN_VALUE_HEIGHT = 18; const MAX_VALUE_HEIGHT = 50; const MIN_VALUE_WIDTH = 50; -const MAX_VALUE_WIDTH = 100; -const LINE_HEIGHT = 1.5; +const MAX_VALUE_WIDTH = 150; +const TITLE_LINE_HEIGHT = 1.5; +const VALUE_LINE_HEIGHT = 1; export interface Props extends Themeable { height: number; @@ -227,7 +228,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { return { fontSize: 14, width: width, - height: 14 * LINE_HEIGHT, + height: 14 * TITLE_LINE_HEIGHT, placement: 'below', }; } @@ -238,7 +239,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { const titleHeight = Math.max(Math.min(height * maxTitleHeightRatio, MAX_VALUE_HEIGHT), 17); return { - fontSize: titleHeight / LINE_HEIGHT, + fontSize: titleHeight / TITLE_LINE_HEIGHT, width: 0, height: titleHeight, placement: 'above', @@ -251,7 +252,7 @@ function calculateTitleDimensions(props: Props): TitleDimensions { const titleHeight = Math.max(height * maxTitleHeightRatio, MIN_VALUE_HEIGHT); return { - fontSize: titleHeight / LINE_HEIGHT, + fontSize: titleHeight / TITLE_LINE_HEIGHT, height: 0, width: Math.min(Math.max(width * maxTitleWidthRatio, 50), 200), placement: 'left', @@ -485,7 +486,7 @@ export function getValueColor(props: Props): string { * Only exported to for unit test */ function getValueStyles(value: string, color: string, width: number, height: number): CSSProperties { - const heightFont = height / LINE_HEIGHT; + const heightFont = height / VALUE_LINE_HEIGHT; const guess = width / (value.length * 1.1); const fontSize = Math.min(Math.max(guess, 14), heightFont); @@ -495,6 +496,15 @@ function getValueStyles(value: string, color: string, width: number, height: num width: `${width}px`, display: 'flex', alignItems: 'center', - fontSize: fontSize.toFixed(2) + 'px', + lineHeight: VALUE_LINE_HEIGHT, + fontSize: fontSize.toFixed(4) + 'px', }; } + +// function getTextWidth(text: string): number { +// const canvas = getTextWidth.canvas || (getTextWidth.canvas = document.createElement("canvas")); +// var context = canvas.getContext("2d"); +// context.font = "'Roboto', 'Helvetica Neue', Arial, sans-serif"; +// var metrics = context.measureText(text); +// return metrics.width; +// } diff --git a/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap b/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap index 4bb9395dd96e..1d341a9b0d4c 100644 --- a/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap +++ b/packages/grafana-ui/src/components/BarGauge/__snapshots__/BarGauge.test.tsx.snap @@ -18,8 +18,9 @@ exports[`BarGauge Render with basic options should render 1`] = ` "alignItems": "center", "color": "#73BF69", "display": "flex", - "fontSize": "27.27px", + "fontSize": "27.2727px", "height": "300px", + "lineHeight": 1, "paddingLeft": "10px", "width": "60px", } diff --git a/packages/grafana-ui/src/components/Gauge/Gauge.tsx b/packages/grafana-ui/src/components/Gauge/Gauge.tsx index eb49891d298e..0a0495c4848d 100644 --- a/packages/grafana-ui/src/components/Gauge/Gauge.tsx +++ b/packages/grafana-ui/src/components/Gauge/Gauge.tsx @@ -58,7 +58,7 @@ export class Gauge extends PureComponent { if (length > 12) { return FONT_SCALE - (length * 5) / 110; } - return FONT_SCALE - (length * 5) / 100; + return FONT_SCALE - (length * 5) / 101; } draw() { @@ -78,7 +78,8 @@ export class Gauge extends PureComponent { const gaugeWidthReduceRatio = showThresholdLabels ? 1.5 : 1; const gaugeWidth = Math.min(dimension / 5.5, 40) / gaugeWidthReduceRatio; const thresholdMarkersWidth = gaugeWidth / 5; - const fontSize = Math.min(dimension / 5.5, 100) * (value.text !== null ? this.getFontScale(value.text.length) : 1); + const fontSize = Math.min(dimension / 4, 100) * (value.text !== null ? this.getFontScale(value.text.length) : 1); + const thresholdLabelFontSize = fontSize / 2.5; const options: any = { From db32c7dcf51e1b508933e6e2c2ce97ca4daa9c61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20H=C3=A4ggmark?= Date: Mon, 27 May 2019 12:22:43 +0200 Subject: [PATCH 11/49] Build: Enables end-to-end tests in build-master workflow (#17268) * Fix: Adds back necessary aria-labels for e2e tests * Build: Adds end-to-end-tests job to build-master workflow * Build: Changes grafana image used --- .circleci/config.yml | 9 ++++++--- .../datasources/settings/DataSourceSettingsPage.tsx | 6 ++++-- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 91ace22d33b9..e90af0509225 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -73,7 +73,7 @@ jobs: end-to-end-test: docker: - image: circleci/node:8-browsers - - image: grafana/grafana:master + - image: grafana/grafana-dev:master-$CIRCLE_SHA1 steps: - run: dockerize -wait tcp://127.0.0.1:3000 -timeout 120s - checkout @@ -629,7 +629,7 @@ workflows: - mysql-integration-test - postgres-integration-test - build-oss-msi - filters: *filter-only-master + filters: *filter-only-master - grafana-docker-master: requires: - build-all @@ -662,7 +662,10 @@ workflows: - mysql-integration-test - postgres-integration-test filters: *filter-only-master - + - end-to-end-test: + requires: + - grafana-docker-master + filters: *filter-only-master release: jobs: - build-all: diff --git a/public/app/features/datasources/settings/DataSourceSettingsPage.tsx b/public/app/features/datasources/settings/DataSourceSettingsPage.tsx index 5c31b946149c..30d4d6ea38ca 100644 --- a/public/app/features/datasources/settings/DataSourceSettingsPage.tsx +++ b/public/app/features/datasources/settings/DataSourceSettingsPage.tsx @@ -276,7 +276,7 @@ export class DataSourceSettingsPage extends PureComponent {
{testingMessage && ( -
+
{testingStatus === 'error' ? ( @@ -285,7 +285,9 @@ export class DataSourceSettingsPage extends PureComponent { )}
-
{testingMessage}
+
+ {testingMessage} +
)} From 2146f837cfe07931fa0943036292a83a4258bc3c Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Mon, 27 May 2019 14:05:32 +0300 Subject: [PATCH 12/49] Build(devenv): correct the context issue (#17291) With the previous configuration `docker-compose build` was always failing. This moves the dockerfiles in the parent dir and changes paths as a result. Ref moby/moby#2745 --- .../Dockerfile => admins-ldap-server.Dockerfile} | 8 ++++---- .../docker/blocks/multiple-openldap/docker-compose.yaml | 8 ++++++-- .../Dockerfile => ldap-server.Dockerfile} | 8 ++++---- 3 files changed, 14 insertions(+), 10 deletions(-) rename devenv/docker/blocks/multiple-openldap/{ldap-server/Dockerfile => admins-ldap-server.Dockerfile} (74%) rename devenv/docker/blocks/multiple-openldap/{admins-ldap-server/Dockerfile => ldap-server.Dockerfile} (75%) diff --git a/devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile b/devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile similarity index 74% rename from devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile rename to devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile index 979d01c7dad4..29e581d2b137 100644 --- a/devenv/docker/blocks/multiple-openldap/ldap-server/Dockerfile +++ b/devenv/docker/blocks/multiple-openldap/admins-ldap-server.Dockerfile @@ -19,11 +19,11 @@ EXPOSE 389 VOLUME ["/etc/ldap", "/var/lib/ldap"] -COPY modules/ /etc/ldap.dist/modules -COPY prepopulate/ /etc/ldap.dist/prepopulate +COPY admins-ldap-server/modules/ /etc/ldap.dist/modules +COPY admins-ldap-server/prepopulate/ /etc/ldap.dist/prepopulate -COPY ../entrypoint.sh /entrypoint.sh -COPY ../prepopulate.sh /prepopulate.sh +COPY ./entrypoint.sh /entrypoint.sh +COPY ./prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] diff --git a/devenv/docker/blocks/multiple-openldap/docker-compose.yaml b/devenv/docker/blocks/multiple-openldap/docker-compose.yaml index 74f5d29a90ff..7ed0ca2e840a 100644 --- a/devenv/docker/blocks/multiple-openldap/docker-compose.yaml +++ b/devenv/docker/blocks/multiple-openldap/docker-compose.yaml @@ -1,5 +1,7 @@ admins-openldap: - build: docker/blocks/multiple-openldap/admins-ldap-server + build: + context: docker/blocks/multiple-openldap + dockerfile: ./admins-ldap-server.Dockerfile environment: SLAPD_PASSWORD: grafana SLAPD_DOMAIN: grafana.org @@ -8,7 +10,9 @@ - "389:389" openldap: - build: docker/blocks/multiple-openldap/ldap-server + build: + context: docker/blocks/multiple-openldap + dockerfile: ./ldap-server.Dockerfile environment: SLAPD_PASSWORD: grafana SLAPD_DOMAIN: grafana.org diff --git a/devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile b/devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile similarity index 75% rename from devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile rename to devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile index 979d01c7dad4..7604d1118a3f 100644 --- a/devenv/docker/blocks/multiple-openldap/admins-ldap-server/Dockerfile +++ b/devenv/docker/blocks/multiple-openldap/ldap-server.Dockerfile @@ -19,11 +19,11 @@ EXPOSE 389 VOLUME ["/etc/ldap", "/var/lib/ldap"] -COPY modules/ /etc/ldap.dist/modules -COPY prepopulate/ /etc/ldap.dist/prepopulate +COPY ldap-server/modules/ /etc/ldap.dist/modules +COPY ldap-server/prepopulate/ /etc/ldap.dist/prepopulate -COPY ../entrypoint.sh /entrypoint.sh -COPY ../prepopulate.sh /prepopulate.sh +COPY ./entrypoint.sh /entrypoint.sh +COPY ./prepopulate.sh /prepopulate.sh ENTRYPOINT ["/entrypoint.sh"] From 4965e10db1941e061adf41fb06b7f8c2d16543ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20H=C3=A4ggmark?= Date: Mon, 27 May 2019 13:16:33 +0200 Subject: [PATCH 13/49] Build: Removes e2e-tests from Grafana master workflow (#17301) --- .circleci/config.yml | 4 ---- 1 file changed, 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index e90af0509225..42a4d1bb2b91 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -662,10 +662,6 @@ workflows: - mysql-integration-test - postgres-integration-test filters: *filter-only-master - - end-to-end-test: - requires: - - grafana-docker-master - filters: *filter-only-master release: jobs: - build-all: From 0b3768e551c771bc05d155184ea8cf8d5f63fd75 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 27 May 2019 14:59:42 +0200 Subject: [PATCH 14/49] release: v6.2.1 changelog update (#17303) --- CHANGELOG.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9224bb241aeb..a649bf0cc229 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,18 @@ +# 6.3.0 (unreleased) + +# 6.2.1 (2019-05-27) + +### Features / Enhancements + * **CLI**: Add command to migrate all datasources to use encrypted password fields . [#17118](https://github.com/grafana/grafana/pull/17118), [@aocenas](https://github.com/aocenas) + * **Gauge/BarGauge**: Improvements to auto value font size . [#17292](https://github.com/grafana/grafana/pull/17292), [@torkelo](https://github.com/torkelo) + +### Bug Fixes + * **Auth Proxy**: Resolve database is locked errors. [#17274](https://github.com/grafana/grafana/pull/17274), [@marefr](https://github.com/marefr) + * **Database**: Retry transaction if sqlite returns database is locked error. [#17276](https://github.com/grafana/grafana/pull/17276), [@marefr](https://github.com/marefr) + * **Explore**: Fixes so clicking in a Prometheus Table the query is filtered by clicked value. [#17083](https://github.com/grafana/grafana/pull/17083), [@hugohaggmark](https://github.com/hugohaggmark) + * **Singlestat**: Fixes issue with value placement and line wraps. [#17249](https://github.com/grafana/grafana/pull/17249), [@torkelo](https://github.com/torkelo) + * **Tech**: Update jQuery to 3.4.1 to fix issue on iOS 10 based browers as well as Chrome 53.x . [#17290](https://github.com/grafana/grafana/pull/17290), [@timbutler](https://github.com/timbutler) + # 6.2.0 (2019-05-22) ### Bug Fixes From b547a0cb34615d9eb3d280a9cf8dd50eb3ae6ccb Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 27 May 2019 16:15:25 +0200 Subject: [PATCH 15/49] update latest.json to latest stable version (#17306) --- latest.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/latest.json b/latest.json index 18770c57cce2..97724801882c 100644 --- a/latest.json +++ b/latest.json @@ -1,4 +1,4 @@ { - "stable": "6.2.0", - "testing": "6.2.0" + "stable": "6.2.1", + "testing": "6.2.1" } From 04d473b3e533db0cc1b9abed28b9953941da499e Mon Sep 17 00:00:00 2001 From: Abhilash Gnan Date: Mon, 27 May 2019 17:47:29 +0200 Subject: [PATCH 16/49] HTTP Server: Serve Grafana with a custom URL path prefix (#17048) Adds a new [server] setting `serve_from_sub_path`. By enabling this setting and using a subpath in `root_url` setting, e.g. `root_url = http://localhost:3000/grafana`, Grafana will be accessible on `http://localhost:3000/grafana`. By default it is set to `false` for compatibility reasons. Closes #16623 --- conf/defaults.ini | 3 +++ conf/sample.ini | 3 +++ docs/sources/installation/configuration.md | 9 +++++++++ pkg/api/http_server.go | 6 +++++- pkg/setting/setting.go | 17 +++++++++++------ 5 files changed, 31 insertions(+), 7 deletions(-) diff --git a/conf/defaults.ini b/conf/defaults.ini index ca49f1212698..ec83a5ea1a5a 100644 --- a/conf/defaults.ini +++ b/conf/defaults.ini @@ -47,6 +47,9 @@ enforce_domain = false # The full public facing url root_url = %(protocol)s://%(domain)s:%(http_port)s/ +# Serve Grafana from subpath specified in `root_url` setting. By default it is set to `false` for compatibility reasons. +serve_from_sub_path = false + # Log web requests router_logging = false diff --git a/conf/sample.ini b/conf/sample.ini index de684bc98f67..dc7d5bc54677 100644 --- a/conf/sample.ini +++ b/conf/sample.ini @@ -48,6 +48,9 @@ # If you use reverse proxy and sub path specify full url (with sub path) ;root_url = http://localhost:3000 +# Serve Grafana from subpath specified in `root_url` setting. By default it is set to `false` for compatibility reasons. +;serve_from_sub_path = false + # Log web requests ;router_logging = false diff --git a/docs/sources/installation/configuration.md b/docs/sources/installation/configuration.md index a865234ebeba..af0261032d4b 100644 --- a/docs/sources/installation/configuration.md +++ b/docs/sources/installation/configuration.md @@ -154,6 +154,15 @@ callback URL to be correct). > in front of Grafana that exposes it through a subpath. In that > case add the subpath to the end of this URL setting. +### serve_from_sub_path + +Serve Grafana from subpath specified in `root_url` setting. By +default it is set to `false` for compatibility reasons. + +By enabling this setting and using a subpath in `root_url` above, e.g. +`root_url = http://localhost:3000/grafana`, Grafana will be accessible on +`http://localhost:3000/grafana`. + ### static_root_path The path to the directory where the front end files (HTML, JS, and CSS diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go index 7ec4fbaa3b3c..d2094b33cb1a 100644 --- a/pkg/api/http_server.go +++ b/pkg/api/http_server.go @@ -30,7 +30,7 @@ import ( "github.com/grafana/grafana/pkg/setting" "github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus/promhttp" - "gopkg.in/macaron.v1" + macaron "gopkg.in/macaron.v1" ) func init() { @@ -227,6 +227,10 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() { m.Use(middleware.AddDefaultResponseHeaders()) + if setting.ServeFromSubPath && setting.AppSubUrl != "" { + m.SetURLPrefix(setting.AppSubUrl) + } + m.Use(macaron.Renderer(macaron.RenderOptions{ Directory: path.Join(setting.StaticRootPath, "views"), IndentJSON: macaron.Env != macaron.PROD, diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go index 65e4e0e25021..a6c07d232e10 100644 --- a/pkg/setting/setting.go +++ b/pkg/setting/setting.go @@ -47,10 +47,11 @@ var ( var ( // App settings. - Env = DEV - AppUrl string - AppSubUrl string - InstanceName string + Env = DEV + AppUrl string + AppSubUrl string + ServeFromSubPath bool + InstanceName string // build BuildVersion string @@ -205,8 +206,9 @@ type Cfg struct { Logger log.Logger // HTTP Server Settings - AppUrl string - AppSubUrl string + AppUrl string + AppSubUrl string + ServeFromSubPath bool // Paths ProvisioningPath string @@ -610,8 +612,11 @@ func (cfg *Cfg) Load(args *CommandLineArgs) error { if err != nil { return err } + ServeFromSubPath = server.Key("serve_from_sub_path").MustBool(false) + cfg.AppUrl = AppUrl cfg.AppSubUrl = AppSubUrl + cfg.ServeFromSubPath = ServeFromSubPath Protocol = HTTP protocolStr, err := valueAsString(server, "protocol", "http") From a07296bf24856bb66b2103929af179ee627f8985 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 27 May 2019 18:43:04 +0200 Subject: [PATCH 17/49] explore: don't parse log levels if provided by field or label (#17180) If a field or a label named level is returned from datasource that is used as log level for the logs result instead of parsing the log level from the message. Closes #17122 --- packages/grafana-ui/src/utils/logs.ts | 9 +++++++++ public/app/core/logs_model.ts | 13 ++++++++++++- public/app/core/specs/logs_model.test.ts | 18 ++++++++++++------ 3 files changed, 33 insertions(+), 7 deletions(-) diff --git a/packages/grafana-ui/src/utils/logs.ts b/packages/grafana-ui/src/utils/logs.ts index fb8c7977e2ad..b5c45b635daf 100644 --- a/packages/grafana-ui/src/utils/logs.ts +++ b/packages/grafana-ui/src/utils/logs.ts @@ -23,6 +23,15 @@ export function getLogLevel(line: string): LogLevel { return LogLevel.unknown; } +export function getLogLevelFromKey(key: string): LogLevel { + const level = (LogLevel as any)[key]; + if (level) { + return level; + } + + return LogLevel.unknown; +} + export function addLogLevelToSeries(series: SeriesData, lineIndex: number): SeriesData { return { ...series, // Keeps Tags, RefID etc diff --git a/public/app/core/logs_model.ts b/public/app/core/logs_model.ts index d2a4780b62a7..5fe95a182d07 100644 --- a/public/app/core/logs_model.ts +++ b/public/app/core/logs_model.ts @@ -13,6 +13,7 @@ import { toLegacyResponseData, FieldCache, FieldType, + getLogLevelFromKey, LogRowModel, LogsModel, LogsMetaItem, @@ -368,7 +369,17 @@ export function processLogSeriesRow( const timeEpochMs = time.valueOf(); const timeFromNow = time.fromNow(); const timeLocal = time.format('YYYY-MM-DD HH:mm:ss'); - const logLevel = getLogLevel(message); + + let logLevel = LogLevel.unknown; + const logLevelField = fieldCache.getFieldByName('level'); + + if (logLevelField) { + logLevel = getLogLevelFromKey(row[logLevelField.index]); + } else if (series.labels && Object.keys(series.labels).indexOf('level') !== -1) { + logLevel = getLogLevelFromKey(series.labels['level']); + } else { + logLevel = getLogLevel(message); + } const hasAnsi = hasAnsiCodes(message); const searchWords = series.meta && series.meta.searchWords ? series.meta.searchWords : []; diff --git a/public/app/core/specs/logs_model.test.ts b/public/app/core/specs/logs_model.test.ts index a2d47412bd06..c83f0ce6c1c0 100644 --- a/public/app/core/specs/logs_model.test.ts +++ b/public/app/core/specs/logs_model.test.ts @@ -1,4 +1,4 @@ -import { SeriesData, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy } from '@grafana/ui'; +import { SeriesData, FieldType, LogsModel, LogsMetaKind, LogsDedupStrategy, LogLevel } from '@grafana/ui'; import { dedupLogRows, calculateFieldStats, @@ -460,8 +460,12 @@ describe('seriesDataToLogsModel', () => { name: 'message', type: FieldType.string, }, + { + name: 'level', + type: FieldType.string, + }, ], - rows: [['1970-01-01T00:00:01Z', 'WARN boooo']], + rows: [['1970-01-01T00:00:01Z', 'WARN boooo', 'dbug']], }, ]; const logsModel = seriesDataToLogsModel(series, 0); @@ -470,7 +474,7 @@ describe('seriesDataToLogsModel', () => { { entry: 'WARN boooo', labels: undefined, - logLevel: 'warning', + logLevel: LogLevel.debug, uniqueLabels: {}, }, ]); @@ -482,6 +486,7 @@ describe('seriesDataToLogsModel', () => { labels: { foo: 'bar', baz: '1', + level: 'dbug', }, fields: [ { @@ -500,6 +505,7 @@ describe('seriesDataToLogsModel', () => { labels: { foo: 'bar', baz: '2', + level: 'err', }, fields: [ { @@ -521,19 +527,19 @@ describe('seriesDataToLogsModel', () => { { entry: 'INFO 2', labels: { foo: 'bar', baz: '2' }, - logLevel: 'info', + logLevel: LogLevel.error, uniqueLabels: { baz: '2' }, }, { entry: 'WARN boooo', labels: { foo: 'bar', baz: '1' }, - logLevel: 'warning', + logLevel: LogLevel.debug, uniqueLabels: { baz: '1' }, }, { entry: 'INFO 1', labels: { foo: 'bar', baz: '2' }, - logLevel: 'info', + logLevel: LogLevel.error, uniqueLabels: { baz: '2' }, }, ]); From 83187fd8eae9b8bb54121083761c849d0adbdcde Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 28 May 2019 09:06:30 +0200 Subject: [PATCH 18/49] update v6.2-beta1 changelog with missing pr (#17308) --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a649bf0cc229..64d8d41d5f5b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -85,6 +85,7 @@ repo on July 1st. Make sure you have switched to the new repo by then. The new r * **Provisioning**: Add API endpoint to reload provisioning configs. [#16579](https://github.com/grafana/grafana/pull/16579), [@aocenas](https://github.com/aocenas) * **Provisioning**: Do not allow deletion of provisioned dashboards. [#16211](https://github.com/grafana/grafana/pull/16211), [@aocenas](https://github.com/aocenas) * **Provisioning**: Interpolate env vars in provisioning files. [#16499](https://github.com/grafana/grafana/pull/16499), [@aocenas](https://github.com/aocenas) + * **Provisioning**: Support FolderUid in Dashboard Provisioning Config. [#16559](https://github.com/grafana/grafana/pull/16559), [@swtch1](https://github.com/swtch1) * **Security**: Add new setting allow_embedding. [#16853](https://github.com/grafana/grafana/pull/16853), [@marefr](https://github.com/marefr) * **Security**: Store datasource passwords encrypted in secureJsonData. [#16175](https://github.com/grafana/grafana/pull/16175), [@aocenas](https://github.com/aocenas) * **UX**: Improve Grafana usage for smaller screens. [#16783](https://github.com/grafana/grafana/pull/16783), [@torkelo](https://github.com/torkelo) From 83af1bdff3322331e89d6ba7f10fb483e524922e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0imon=20Podlipsk=C3=BD?= Date: Tue, 28 May 2019 10:13:49 +0200 Subject: [PATCH 19/49] Frontend/utils: Add missing type (#17312) --- public/app/core/utils/kbn.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index d747fa37f579..47b753f91233 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -133,7 +133,7 @@ kbn.secondsToHms = seconds => { }; kbn.secondsToHhmmss = seconds => { - const strings = []; + const strings: string[] = []; const numhours = Math.floor(seconds / 3600); const numminutes = Math.floor((seconds % 3600) / 60); const numseconds = Math.floor((seconds % 3600) % 60); From 4b0ad174ff43ba8b629a1c6b3c75051ad7f7c6c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=A0imon=20Podlipsk=C3=BD?= Date: Tue, 28 May 2019 10:17:03 +0200 Subject: [PATCH 20/49] Frontend/utils: Import has only from lodash (#17311) --- public/app/core/utils/kbn.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/public/app/core/utils/kbn.ts b/public/app/core/utils/kbn.ts index 47b753f91233..1a1cf6f56ba1 100644 --- a/public/app/core/utils/kbn.ts +++ b/public/app/core/utils/kbn.ts @@ -1,4 +1,4 @@ -import _ from 'lodash'; +import { has } from 'lodash'; import { getValueFormat, getValueFormatterIndex, getValueFormats, stringToJsRegex } from '@grafana/ui'; import deprecationWarning from '@grafana/ui/src/utils/deprecationWarning'; @@ -193,7 +193,7 @@ kbn.calculateInterval = (range, resolution, lowLimitInterval) => { kbn.describe_interval = str => { const matches = str.match(kbn.interval_regex); - if (!matches || !_.has(kbn.intervals_in_seconds, matches[2])) { + if (!matches || !has(kbn.intervals_in_seconds, matches[2])) { throw new Error('Invalid interval string, expecting a number followed by one of "Mwdhmsy"'); } else { return { From 382ebd6362c5001a4c59e30e624fbb36325d4539 Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Tue, 28 May 2019 01:18:09 -0700 Subject: [PATCH 21/49] Frontend/SeriesData: Fix for convert SeriesData to Table format (#17314) --- .../src/utils/processSeriesData.test.ts | 23 ++++++++++++++++++- .../grafana-ui/src/utils/processSeriesData.ts | 14 +++++------ 2 files changed, 28 insertions(+), 9 deletions(-) diff --git a/packages/grafana-ui/src/utils/processSeriesData.test.ts b/packages/grafana-ui/src/utils/processSeriesData.test.ts index 96afa79aa8c0..ea582e89b3a8 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.test.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.test.ts @@ -6,7 +6,7 @@ import { guessFieldTypes, guessFieldTypeFromValue, } from './processSeriesData'; -import { FieldType, TimeSeries } from '../types/data'; +import { FieldType, TimeSeries, SeriesData, TableData } from '../types/data'; import { dateTime } from './moment_wrapper'; describe('toSeriesData', () => { @@ -99,4 +99,25 @@ describe('SerisData backwards compatibility', () => { expect(isTableData(roundtrip)).toBeTruthy(); expect(roundtrip).toMatchObject(table); }); + + it('converts SeriesData to TableData to series and back again', () => { + const series: SeriesData = { + refId: 'Z', + meta: { + somethign: 8, + }, + fields: [ + { name: 'T', type: FieldType.time }, // first + { name: 'N', type: FieldType.number, filterable: true }, + { name: 'S', type: FieldType.string, filterable: true }, + ], + rows: [[1, 100, '1'], [2, 200, '2'], [3, 300, '3']], + }; + const table = toLegacyResponseData(series) as TableData; + expect(table.meta).toBe(series.meta); + expect(table.refId).toBe(series.refId); + + const names = table.columns.map(c => c.text); + expect(names).toEqual(['T', 'N', 'S']); + }); }); diff --git a/packages/grafana-ui/src/utils/processSeriesData.ts b/packages/grafana-ui/src/utils/processSeriesData.ts index 1ba31e3bbdee..84aadcc9f655 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.ts @@ -4,7 +4,7 @@ import isString from 'lodash/isString'; import isBoolean from 'lodash/isBoolean'; // Types -import { SeriesData, Field, TimeSeries, FieldType, TableData } from '../types/index'; +import { SeriesData, Field, TimeSeries, FieldType, TableData, Column } from '../types/index'; import { isDateTime } from './moment_wrapper'; function convertTableToSeriesData(table: TableData): SeriesData { @@ -171,14 +171,12 @@ export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData return { columns: fields.map(f => { - return { - text: f.name, - filterable: f.filterable, - unit: f.unit, - refId: series.refId, - meta: series.meta, - }; + const { name, ...column } = f; + (column as Column).text = name; + return column as Column; }), + refId: series.refId, + meta: series.meta, rows, }; }; From 27874a18810ca276a562556ffbac7e9c4de062cf Mon Sep 17 00:00:00 2001 From: Shavonn Brown Date: Tue, 28 May 2019 09:04:42 -0400 Subject: [PATCH 22/49] 16365 change clashing variable names (#17140) * Fix: change and to and so not clashing with grafana vars (#16365) * Fix: change and to and so not clashing with grafana vars (#16365) * Fix: update now to datetime (#16365) * Fix: test should look for datetime instead of now (#16365) * Fix: _az suffix to datasource convention (#16365) * Fix: convert vars to macro, update doc (#16365) * Fix: convert vars to macro, update doc (#16365) * Fix: remove support for time vars (#16365) * Fix: confilct from master * add migration on editor open * fix migration var name --- docs/sources/features/datasources/azuremonitor.md | 6 ++++-- .../editor/kusto/kusto.ts | 10 ++++++++++ .../log_analytics/querystring_builder.test.ts | 14 +++++++------- .../log_analytics/querystring_builder.ts | 13 ++++++++++--- .../partials/annotations.editor.html | 4 ++-- .../partials/query.editor.html | 6 +++--- .../grafana-azure-monitor-datasource/query_ctrl.ts | 7 +++++++ 7 files changed, 43 insertions(+), 17 deletions(-) diff --git a/docs/sources/features/datasources/azuremonitor.md b/docs/sources/features/datasources/azuremonitor.md index ee40248fe001..114187499f17 100644 --- a/docs/sources/features/datasources/azuremonitor.md +++ b/docs/sources/features/datasources/azuremonitor.md @@ -254,6 +254,10 @@ To make writing queries easier there are several Grafana macros that can be used `datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and` `datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z)` where the from and to datetimes are from the Grafana time picker. +- `$__timeFrom()` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T18:09:58.907Z)`. + +- `$__timeTo()` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T20:09:58.907Z)`. + - `$__escapeMulti($myVar)` - is to be used with multi-value template variables that contain illegal characters. If `$myVar` has the following two values as a string `'\\grafana-vm\Network(eth0)\Total','\\hello!'`, then it expands to: `@'\\grafana-vm\Network(eth0)\Total', @'\\hello!'`. If using single value variables there is no need for this macro, simply escape the variable inline instead - `@'\$myVar'`. - `$__contains(colName, $myVar)` - is to be used with multi-value template variables. If `$myVar` has the value `'value1','value2'`, it expands to: `colName in ('value1','value2')`. @@ -264,8 +268,6 @@ To make writing queries easier there are several Grafana macros that can be used There are also some Grafana variables that can be used in Azure Log Analytics queries: -- `$__from` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T18:09:58.907Z)`. -- `$__to` - Returns the From datetime from the Grafana picker. Example: `datetime(2018-06-05T20:09:58.907Z)`. - `$__interval` - Grafana calculates the minimum time grain that can be used to group by time in queries. More details on how it works [here]({{< relref "reference/templating.md#interval-variables" >}}). It returns a time grain like `5m` or `1h` that can be used in the bin function. E.g. `summarize count() by bin(TimeGenerated, $__interval)` ### Azure Log Analytics Alerting diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts index 5bf7dfb19eb5..172aa5ee077a 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/editor/kusto/kusto.ts @@ -649,6 +649,16 @@ export const grafanaMacros = [ display: '$__timeFilter()', hint: 'Macro that uses the selected timerange in Grafana to filter the query.', }, + { + text: '$__timeTo', + display: '$__timeTo()', + hint: 'Returns the From datetime from the Grafana picker. Example: datetime(2018-06-05T20:09:58.907Z).', + }, + { + text: '$__timeFrom', + display: '$__timeFrom()', + hint: 'Returns the From datetime from the Grafana picker. Example: datetime(2018-06-05T18:09:58.907Z).', + }, { text: '$__escapeMulti', display: '$__escapeMulti()', diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts index fab268a34401..186c78743f8c 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.test.ts @@ -90,27 +90,27 @@ describe('LogAnalyticsDatasource', () => { }); }); - describe('when using $__from and $__to is in the query and range is until now', () => { + describe('when using $__timeFrom and $__timeTo is in the query and range is until now', () => { beforeEach(() => { - builder.rawQueryString = 'query=Tablename | where myTime >= $__from and myTime <= $__to'; + builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; }); - it('should replace $__from and $__to with a datetime and the now() function', () => { + it('should replace $__timeFrom and $__timeTo with a datetime and the now() function', () => { const query = builder.generate().uriString; expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); - expect(query).toContain('myTime%20%3C%3D%20now()'); + expect(query).toContain('myTime%20%3C%3D%20datetime('); }); }); - describe('when using $__from and $__to is in the query and range is a specific interval', () => { + describe('when using $__timeFrom and $__timeTo is in the query and range is a specific interval', () => { beforeEach(() => { - builder.rawQueryString = 'query=Tablename | where myTime >= $__from and myTime <= $__to'; + builder.rawQueryString = 'query=Tablename | where myTime >= $__timeFrom() and myTime <= $__timeTo()'; builder.options.range.to = dateTime().subtract(1, 'hour'); builder.options.rangeRaw.to = 'now-1h'; }); - it('should replace $__from and $__to with datetimes', () => { + it('should replace $__timeFrom and $__timeTo with datetimes', () => { const query = builder.generate().uriString; expect(query).toContain('where%20myTime%20%3E%3D%20datetime('); diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts index afb64da8f4c6..ad72c4eb2eb9 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/log_analytics/querystring_builder.ts @@ -21,12 +21,16 @@ export default class LogAnalyticsQuerystringBuilder { if (p1 === 'timeFilter') { return this.getTimeFilter(p2, this.options); } + if (p1 === 'timeFrom') { + return this.getFrom(this.options); + } + if (p1 === 'timeTo') { + return this.getUntil(this.options); + } return match; }); queryString = queryString.replace(/\$__interval/gi, this.options.interval); - queryString = queryString.replace(/\$__from/gi, this.getFrom(this.options)); - queryString = queryString.replace(/\$__to/gi, this.getUntil(this.options)); } const rawQuery = queryString; queryString = encodeURIComponent(queryString); @@ -44,7 +48,10 @@ export default class LogAnalyticsQuerystringBuilder { getUntil(options) { if (options.rangeRaw.to === 'now') { - return 'now()'; + const now = Date.now(); + return `datetime(${dateTime(now) + .startOf('minute') + .toISOString()})`; } else { const until = options.range.to; return `datetime(${dateTime(until) diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html index a5b2b2adc5be..7a855a10b44b 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/annotations.editor.html @@ -67,8 +67,8 @@ - $__timeFilter(datetimeColumn) -> datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z) Or build your own conditionals using these built-in variables which just return the values: - - $__from -> datetime(2018-06-05T18:09:58.907Z) - - $__to -> datetime(2018-06-05T20:09:58.907Z) + - $__timeFrom -> datetime(2018-06-05T18:09:58.907Z) + - $__timeTo -> datetime(2018-06-05T20:09:58.907Z) - $__interval -> 5m
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html index 4690bc5be26c..1c2b14f366ed 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/partials/query.editor.html @@ -189,13 +189,13 @@ If using the All option, then check the Include All Option checkbox and in the Custom all value field type in: all. If All is chosen -> 1 == 1 Or build your own conditionals using these built-in variables which just return the values: - - $__from -> datetime(2018-06-05T18:09:58.907Z) - - $__to -> datetime(2018-06-05T20:09:58.907Z) + - $__timeFrom -> datetime(2018-06-05T18:09:58.907Z) + - $__timeTo -> datetime(2018-06-05T20:09:58.907Z) - $__interval -> 5m Examples: - ¡ where $__timeFilter - - | where TimeGenerated ≥ $__from and TimeGenerated ≤ $__to + - | where TimeGenerated ≥ $__timeFrom and TimeGenerated ≤ $__timeTo - | summarize count() by Category, bin(TimeGenerated, $__interval)
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts index cc623d8df981..9fc12e9b9169 100644 --- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts +++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/query_ctrl.ts @@ -110,6 +110,8 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { this.migrateTimeGrains(); + this.migrateToFromTimes(); + this.panelCtrl.events.on('data-received', this.onDataReceived.bind(this), $scope); this.panelCtrl.events.on('data-error', this.onDataError.bind(this), $scope); this.resultFormats = [{ text: 'Time series', value: 'time_series' }, { text: 'Table', value: 'table' }]; @@ -171,6 +173,11 @@ export class AzureMonitorQueryCtrl extends QueryCtrl { } } + migrateToFromTimes() { + this.target.azureLogAnalytics.query = this.target.azureLogAnalytics.query.replace(/\$__from\s/gi, '$__timeFrom() '); + this.target.azureLogAnalytics.query = this.target.azureLogAnalytics.query.replace(/\$__to\s/gi, '$__timeTo() '); + } + replace(variable: string) { return this.templateSrv.replace(variable, this.panelCtrl.panel.scopedVars); } From 2dc660d533088e52378d0eb5ceff6aba927c290b Mon Sep 17 00:00:00 2001 From: Carl Bergquist Date: Tue, 28 May 2019 16:02:14 +0200 Subject: [PATCH 23/49] docs: remove my email from docs examples (#17325) I dont want emails from companies who test alert notifiers :) --- .../alerting_notification_channels.md | 20 +++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/docs/sources/http_api/alerting_notification_channels.md b/docs/sources/http_api/alerting_notification_channels.md index b8db1595aaa1..aa6e7297ac2d 100644 --- a/docs/sources/http_api/alerting_notification_channels.md +++ b/docs/sources/http_api/alerting_notification_channels.md @@ -54,7 +54,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -93,7 +93,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -130,7 +130,7 @@ Content-Type: application/json "sendReminder": false, "disableResolveMessage": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -158,7 +158,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "isDefault": false, "sendReminder": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -177,7 +177,7 @@ Content-Type: application/json "isDefault": false, "sendReminder": false, "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2018-04-23T14:44:09+02:00", "updated": "2018-08-20T15:47:49+02:00" @@ -206,7 +206,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -226,7 +226,7 @@ Content-Type: application/json "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" @@ -256,7 +256,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` @@ -276,7 +276,7 @@ Content-Type: application/json "sendReminder": true, "frequency": "15m", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" }, "created": "2017-01-01 12:34", "updated": "2017-01-01 12:34" @@ -353,7 +353,7 @@ Authorization: Bearer eyJrIjoiT0tTcG1pUlY2RnVKZTFVaDFsNFZXdE9ZWmNrMkZYbk { "type": "email", "settings": { - "addresses": "carl@grafana.com;dev@grafana.com" + "addresses": "dev@grafana.com" } } ``` From 1e508d7288d63e9322ff0f8b1b4cee9646d9b930 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Tue, 28 May 2019 16:34:28 +0200 Subject: [PATCH 24/49] adds auth example for the cli cherrypick task (#17307) --- scripts/cli/tasks/cherrypick.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/scripts/cli/tasks/cherrypick.ts b/scripts/cli/tasks/cherrypick.ts index 966781951828..ac92f223a7eb 100644 --- a/scripts/cli/tasks/cherrypick.ts +++ b/scripts/cli/tasks/cherrypick.ts @@ -7,6 +7,10 @@ const cherryPickRunner: TaskRunner = async () => { let client = axios.create({ baseURL: 'https://api.github.com/repos/grafana/grafana', timeout: 10000, + // auth: { + // username: '', + // password: '', + // }, }); const res = await client.get('/issues', { From 9ff44b5037e909bc0952cdd556ab85901b05cccb Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Tue, 28 May 2019 19:32:14 +0300 Subject: [PATCH 25/49] Build(makefile): improve error handling (#17281) * Build(makefile): improve error handling Ref baa55ab6ae14ae23864f650cec2bb180b36fdabe --- Makefile | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 9f56e3da8f1a..7e5a3798f70c 100644 --- a/Makefile +++ b/Makefile @@ -86,16 +86,25 @@ revive: scripts/go/bin/revive # create docker-compose file with provided sources and start them # example: make devenv sources=postgres,openldap +ifeq ($(sources),) +devenv: + @printf 'You have to define sources for this command \nexample: make devenv sources=postgres,openldap\n' +else devenv: devenv-down $(eval targets := $(shell echo '$(sources)' | tr "," " ")) @cd devenv; \ - ./create_docker_compose.sh $(targets); \ + ./create_docker_compose.sh $(targets) || \ + (rm -rf docker-compose.yaml; exit 1) + + @cd devenv; \ docker-compose up -d +endif # drop down the envs devenv-down: - @cd devenv; docker-compose down; + @cd devenv; \ + docker-compose down; # TODO recheck the rules and leave only necessary exclusions gosec: scripts/go/bin/gosec From a1a498f96cec2fe9c7ecb8d89bd9e7f6d49eae13 Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Tue, 28 May 2019 20:08:27 +0300 Subject: [PATCH 26/49] Build: ignore absence of docker-compose (#17331) If devenv/docker-compose.yaml file is missing, `devenv-down` and subsequently `devenv` is not going to work --- Makefile | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 7e5a3798f70c..5b89b178e974 100644 --- a/Makefile +++ b/Makefile @@ -104,7 +104,8 @@ endif # drop down the envs devenv-down: @cd devenv; \ - docker-compose down; + test -f docker-compose.yaml && \ + docker-compose down || exit 0; # TODO recheck the rules and leave only necessary exclusions gosec: scripts/go/bin/gosec From 5fa5d4bdd54cc493d344b41510515115169f6cb9 Mon Sep 17 00:00:00 2001 From: Joshua Piccari Date: Wed, 29 May 2019 00:10:09 -0700 Subject: [PATCH 27/49] CloudWatch: Avoid exception while accessing results (#17283) When accessing the `series` property of query results, if a query is hidden, an exception is thrown. This is caused by lack of checks to verify that the query result exists before accessing the `series` property. Closes #17112 --- .../app/plugins/datasource/cloudwatch/datasource.ts | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/public/app/plugins/datasource/cloudwatch/datasource.ts b/public/app/plugins/datasource/cloudwatch/datasource.ts index bd2bd67248ca..5a92122f221c 100644 --- a/public/app/plugins/datasource/cloudwatch/datasource.ts +++ b/public/app/plugins/datasource/cloudwatch/datasource.ts @@ -149,12 +149,14 @@ export default class CloudWatchDatasource extends DataSourceApi if (res.results) { for (const query of request.queries) { const queryRes = res.results[query.refId]; - for (const series of queryRes.series) { - const s = { target: series.name, datapoints: series.points } as any; - if (queryRes.meta.unit) { - s.unit = queryRes.meta.unit; + if (queryRes) { + for (const series of queryRes.series) { + const s = { target: series.name, datapoints: series.points } as any; + if (queryRes.meta.unit) { + s.unit = queryRes.meta.unit; + } + data.push(s); } - data.push(s); } } } From afbdfe7cb4540f1027050f46d93cccc093c6256c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Torkel=20=C3=96degaard?= Date: Wed, 29 May 2019 09:37:29 +0200 Subject: [PATCH 28/49] NewDataSourcePage: Add Grafana Cloud link (#17324) * NewDataSource: adding initial grafana cloud link * Minor update * Updated --- .../datasources/NewDataSourcePage.tsx | 44 ++++++++++++++++--- public/sass/components/_add_data_source.scss | 4 ++ public/sass/components/_buttons.scss | 1 + 3 files changed, 44 insertions(+), 5 deletions(-) diff --git a/public/app/features/datasources/NewDataSourcePage.tsx b/public/app/features/datasources/NewDataSourcePage.tsx index 6420225f2afe..bd16ba6e8840 100644 --- a/public/app/features/datasources/NewDataSourcePage.tsx +++ b/public/app/features/datasources/NewDataSourcePage.tsx @@ -6,7 +6,7 @@ import { StoreState } from 'app/types'; import { addDataSource, loadDataSourceTypes, setDataSourceTypeSearchQuery } from './state/actions'; import { getDataSourceTypes } from './state/selectors'; import { FilterInput } from 'app/core/components/FilterInput/FilterInput'; -import { NavModel, DataSourcePluginMeta, List } from '@grafana/ui'; +import { NavModel, DataSourcePluginMeta, List, PluginType } from '@grafana/ui'; export interface Props { navModel: NavModel; @@ -43,6 +43,7 @@ class NewDataSourcePage extends PureComponent { loki: 90, mysql: 80, postgres: 79, + gcloud: -1, }; componentDidMount() { @@ -114,6 +115,8 @@ class NewDataSourcePage extends PureComponent { {} as DataSourceCategories ); + categories['cloud'].push(getGrafanaCloudPhantomPlugin()); + return ( <> {this.categoryInfoList.map(category => ( @@ -174,7 +177,9 @@ interface DataSourceTypeCardProps { } const DataSourceTypeCard: FC = props => { - const { plugin, onClick, onLearnMoreClick } = props; + const { plugin, onLearnMoreClick } = props; + const canSelect = plugin.id !== 'gcloud'; + const onClick = canSelect ? props.onClick : () => {}; // find first plugin info link const learnMoreLink = plugin.info.links && plugin.info.links.length > 0 ? plugin.info.links[0].url : null; @@ -188,16 +193,45 @@ const DataSourceTypeCard: FC = props => {
{learnMoreLink && ( - - Learn more + + Learn more )} - + {canSelect && }
); }; +function getGrafanaCloudPhantomPlugin(): DataSourcePluginMeta { + return { + id: 'gcloud', + name: 'Grafana Cloud', + type: PluginType.datasource, + module: '', + baseUrl: '', + info: { + description: 'Hosted Graphite, Prometheus and Loki', + logos: { small: 'public/img/grafana_icon.svg', large: 'asd' }, + author: { name: 'Grafana Labs' }, + links: [ + { + url: 'https://grafana.com/cloud', + name: 'Learn more', + }, + ], + screenshots: [], + updated: '2019-05-10', + version: '1.0.0', + }, + }; +} + export function getNavModel(): NavModel { const main = { icon: 'gicon gicon-add-datasources', diff --git a/public/sass/components/_add_data_source.scss b/public/sass/components/_add_data_source.scss index c14455d35c14..9e1bcb6ed778 100644 --- a/public/sass/components/_add_data_source.scss +++ b/public/sass/components/_add_data_source.scss @@ -77,6 +77,10 @@ } } +.add-datasource-item-actions__btn-icon { + margin-left: $space-sm; +} + .add-data-source-more { text-align: center; margin: $space-xl; diff --git a/public/sass/components/_buttons.scss b/public/sass/components/_buttons.scss index 1a9936bceab0..254ac5906abb 100644 --- a/public/sass/components/_buttons.scss +++ b/public/sass/components/_buttons.scss @@ -70,6 +70,7 @@ @include button-size($btn-padding-y-lg, $space-lg, $font-size-lg, $border-radius-sm); font-weight: normal; height: $height-lg; + .gicon { //font-size: 31px; margin-right: $space-sm; From fd741cbea41fbdd7c77c9c9691276acab877efd0 Mon Sep 17 00:00:00 2001 From: Carl Bergquist Date: Wed, 29 May 2019 10:27:57 +0200 Subject: [PATCH 29/49] Chore: upgrade webpack analyser (#17340) * webpack: upgrade webpack analyser * yarn.lock update --- package.json | 2 +- yarn.lock | 26 ++++++++++++++++++-------- 2 files changed, 19 insertions(+), 9 deletions(-) diff --git a/package.json b/package.json index e084f705b024..6c101939295d 100644 --- a/package.json +++ b/package.json @@ -131,7 +131,7 @@ "tslint-react": "3.6.0", "typescript": "3.4.1", "webpack": "4.29.6", - "webpack-bundle-analyzer": "3.1.0", + "webpack-bundle-analyzer": "3.3.2", "webpack-cleanup-plugin": "0.5.1", "webpack-cli": "3.2.3", "webpack-dev-server": "3.2.1", diff --git a/yarn.lock b/yarn.lock index f48cba0ec948..1936ceb817f0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2284,10 +2284,10 @@ resolved "https://registry.yarnpkg.com/@types/jquery/-/jquery-1.10.35.tgz#4e5c2b1e5b3bf0b863efb8c5e70081f52e6c9518" integrity sha512-SVtqEcudm7yjkTwoRA1gC6CNMhGDdMx4Pg8BPdiqI7bXXdCn1BPmtxgeWYQOgDxrq53/5YTlhq5ULxBEAlWIBg== -"@types/lodash@4.14.119", "@types/lodash@4.14.123": - version "4.14.119" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.119.tgz#be847e5f4bc3e35e46d041c394ead8b603ad8b39" - integrity sha512-Z3TNyBL8Vd/M9D9Ms2S3LmFq2sSMzahodD6rCS9V2N44HUMINb75jNkSuwAx7eo2ufqTdfOdtGQpNbieUjPQmw== +"@types/lodash@4.14.123": + version "4.14.123" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.123.tgz#39be5d211478c8dd3bdae98ee75bb7efe4abfe4d" + integrity sha512-pQvPkc4Nltyx7G1Ww45OjVqUsJP4UsZm+GWJpigXgkikZqJgRm4c48g027o6tdgubWHwFRF15iFd+Y4Pmqv6+Q== "@types/minimatch@*": version "3.0.3" @@ -4470,6 +4470,11 @@ caniuse-api@^3.0.0: lodash.memoize "^4.1.2" lodash.uniq "^4.5.0" +caniuse-db@1.0.30000772: + version "1.0.30000772" + resolved "https://registry.yarnpkg.com/caniuse-db/-/caniuse-db-1.0.30000772.tgz#51aae891768286eade4a3d8319ea76d6a01b512b" + integrity sha1-UarokXaChureSj2DGep21qAbUSs= + caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000929, caniuse-lite@^1.0.30000947, caniuse-lite@^1.0.30000957, caniuse-lite@^1.0.30000963: version "1.0.30000966" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000966.tgz#f3c6fefacfbfbfb981df6dfa68f2aae7bff41b64" @@ -17029,6 +17034,11 @@ tryor@~0.1.2: resolved "https://registry.yarnpkg.com/tryor/-/tryor-0.1.2.tgz#8145e4ca7caff40acde3ccf946e8b8bb75b4172b" integrity sha1-gUXkynyv9ArN48z5Rui4u3W0Fys= +ts-easing@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/ts-easing/-/ts-easing-0.2.0.tgz#c8a8a35025105566588d87dbda05dd7fbfa5a4ec" + integrity sha512-Z86EW+fFFh/IFB1fqQ3/+7Zpf9t2ebOAxNI/V6Wo7r5gqiqtxmgTlQ1qbqQcjLKYeSHPTsEmvlJUDg/EuL0uHQ== + ts-jest@24.0.2: version "24.0.2" resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-24.0.2.tgz#8dde6cece97c31c03e80e474c749753ffd27194d" @@ -17696,10 +17706,10 @@ webidl-conversions@^4.0.2: resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== -webpack-bundle-analyzer@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.1.0.tgz#2f19cbb87bb6d4f3cb4e59cb67c837bd9436e89d" - integrity sha512-nyDyWEs7C6DZlgvu1pR1zzJfIWSiGPbtaByZr8q+Fd2xp70FuM/8ngCJzj3Er1TYRLSFmp1F1OInbEm4DZH8NA== +webpack-bundle-analyzer@3.3.2: + version "3.3.2" + resolved "https://registry.yarnpkg.com/webpack-bundle-analyzer/-/webpack-bundle-analyzer-3.3.2.tgz#3da733a900f515914e729fcebcd4c40dde71fc6f" + integrity sha512-7qvJLPKB4rRWZGjVp5U1KEjwutbDHSKboAl0IfafnrdXMrgC0tOtZbQD6Rw0u4cmpgRN4O02Fc0t8eAT+FgGzA== dependencies: acorn "^6.0.7" acorn-walk "^6.1.1" From 0a92de623da2deac6b0aa37ebbd6c7546da604b9 Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Wed, 29 May 2019 11:54:07 +0300 Subject: [PATCH 30/49] Build(package.json): improve npm commands (#17022) Remove some of the repetitions in the npm commands --- package.json | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index 6c101939295d..962c443727e6 100644 --- a/package.json +++ b/package.json @@ -140,9 +140,9 @@ }, "scripts": { "dev": "webpack --progress --colors --mode development --config scripts/webpack/webpack.dev.js", - "start": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --watchTheme", - "start:hot": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot --watchTheme", - "start:ignoreTheme": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts core:start --hot", + "start": "npm run cli -- core:start --watchTheme", + "start:hot": "npm run cli -- core:start --hot --watchTheme", + "start:ignoreTheme": "npm run cli -- core:start --hot", "watch": "yarn start -d watch,start core:start --watchTheme ", "build": "grunt build", "test": "grunt test", @@ -156,13 +156,13 @@ "themes:generate": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/generateSassVariableFiles.ts", "prettier:check": "prettier --list-different \"**/*.{ts,tsx,scss}\"", "prettier:write": "prettier --list-different \"**/*.{ts,tsx,scss}\" --write", + "cli": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts", "gui:tslint": "tslint -c ./packages/grafana-ui/tslint.json --project ./packages/grafana-ui/tsconfig.json", - "gui:build": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:build", - "gui:releasePrepare": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release", + "gui:build": "npm run cli -- gui:build", + "gui:releasePrepare": "npm run cli -- gui:release", "gui:publish": "cd packages/grafana-ui/dist && npm publish --access public", - "gui:release": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts gui:release -p --createVersionCommit", - "precommit": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts precommit", - "cli": "ts-node --project ./scripts/cli/tsconfig.json ./scripts/cli/index.ts" + "gui:release": "npm run cli -- gui:release -p --createVersionCommit", + "precommit": "npm run cli -- precommit" }, "husky": { "hooks": { From e951e71843e88a8cede85a137e6701d57d91edbd Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Wed, 29 May 2019 13:47:05 +0200 Subject: [PATCH 31/49] Explore: Update time range before running queries (#17349) This makes sure that refresh/update/run query are parsing a relative time range to get proper epoch time range before running queries. Fixes #17322 --- public/app/features/explore/state/actionTypes.ts | 1 + public/app/features/explore/state/actions.ts | 6 +++++- public/app/features/explore/state/reducers.test.ts | 11 +++++++++-- public/app/features/explore/state/reducers.ts | 6 ++++-- 4 files changed, 19 insertions(+), 5 deletions(-) diff --git a/public/app/features/explore/state/actionTypes.ts b/public/app/features/explore/state/actionTypes.ts index ff7fdcb55dec..b572b6ca041b 100644 --- a/public/app/features/explore/state/actionTypes.ts +++ b/public/app/features/explore/state/actionTypes.ts @@ -230,6 +230,7 @@ export interface LoadExploreDataSourcesPayload { export interface RunQueriesPayload { exploreId: ExploreId; + range: TimeRange; } export interface ResetQueryErrorPayload { diff --git a/public/app/features/explore/state/actions.ts b/public/app/features/explore/state/actions.ts index 09f950905ae9..bfeb96aef35b 100644 --- a/public/app/features/explore/state/actions.ts +++ b/public/app/features/explore/state/actions.ts @@ -521,6 +521,7 @@ export function runQueries(exploreId: ExploreId, ignoreUIState = false, replaceU datasourceError, containerWidth, mode, + range, } = getState().explore[exploreId]; if (datasourceError) { @@ -538,7 +539,10 @@ export function runQueries(exploreId: ExploreId, ignoreUIState = false, replaceU // but we're using the datasource interval limit for now const interval = datasourceInstance.interval; - dispatch(runQueriesAction({ exploreId })); + const timeZone = getTimeZone(getState().user); + const updatedRange = getTimeRange(timeZone, range.raw); + + dispatch(runQueriesAction({ exploreId, range: updatedRange })); // Keep table queries first since they need to return quickly if ((ignoreUIState || showingTable) && mode === ExploreMode.Metrics) { dispatch( diff --git a/public/app/features/explore/state/reducers.test.ts b/public/app/features/explore/state/reducers.test.ts index da9bdfabe261..e7127eb71592 100644 --- a/public/app/features/explore/state/reducers.test.ts +++ b/public/app/features/explore/state/reducers.test.ts @@ -4,6 +4,7 @@ import { exploreReducer, makeInitialUpdateState, initialExploreState, + DEFAULT_RANGE, } from './reducers'; import { ExploreId, @@ -31,7 +32,7 @@ import { ActionOf } from 'app/core/redux/actionCreatorFactory'; import { updateLocation } from 'app/core/actions/location'; import { serializeStateToUrlParam } from 'app/core/utils/explore'; import TableModel from 'app/core/table_model'; -import { DataSourceApi, DataQuery, LogsModel, LogsDedupStrategy } from '@grafana/ui'; +import { DataSourceApi, DataQuery, LogsModel, LogsDedupStrategy, dateTime } from '@grafana/ui'; describe('Explore item reducer', () => { describe('scanning', () => { @@ -193,6 +194,7 @@ describe('Explore item reducer', () => { it('then it should set correct state', () => { const initalState: Partial = { showingStartPage: true, + range: null, }; const expectedState = { queryIntervals: { @@ -200,11 +202,16 @@ describe('Explore item reducer', () => { intervalMs: 1000, }, showingStartPage: false, + range: { + from: dateTime(), + to: dateTime(), + raw: DEFAULT_RANGE, + }, }; reducerTester() .givenReducer(itemReducer, initalState) - .whenActionIsDispatched(runQueriesAction({ exploreId: ExploreId.left })) + .whenActionIsDispatched(runQueriesAction({ exploreId: ExploreId.left, range: expectedState.range })) .thenStateShouldEqual(expectedState); }); }); diff --git a/public/app/features/explore/state/reducers.ts b/public/app/features/explore/state/reducers.ts index 1291f3d749bb..208825c9c19a 100644 --- a/public/app/features/explore/state/reducers.ts +++ b/public/app/features/explore/state/reducers.ts @@ -599,8 +599,9 @@ export const itemReducer = reducerFactory({} as ExploreItemSta }) .addMapper({ filter: runQueriesAction, - mapper: (state): ExploreItemState => { - const { range, datasourceInstance, containerWidth } = state; + mapper: (state, action): ExploreItemState => { + const { range } = action.payload; + const { datasourceInstance, containerWidth } = state; let interval = '1s'; if (datasourceInstance && datasourceInstance.interval) { interval = datasourceInstance.interval; @@ -608,6 +609,7 @@ export const itemReducer = reducerFactory({} as ExploreItemSta const queryIntervals = getIntervals(range, interval, containerWidth); return { ...state, + range, queryIntervals, showingStartPage: false, }; From d4ef19737eb64924c7e4802c0241442382f5490d Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Wed, 29 May 2019 15:55:51 +0300 Subject: [PATCH 32/49] Enterprise: remove gofakeit dep (#17344) * Enterprise: remove gofakeit dep Since we decided to use our uuid generation inside util module * Enterprise: result of execution of `go mod ...` --- go.mod | 2 - go.sum | 2 - pkg/extensions/main.go | 1 - .../brianvoe/gofakeit/BENCHMARKS.md | 134 --------- .../brianvoe/gofakeit/CODE_OF_CONDUCT.md | 46 ---- .../brianvoe/gofakeit/CONTRIBUTING.md | 1 - .../github.com/brianvoe/gofakeit/LICENSE.txt | 20 -- vendor/github.com/brianvoe/gofakeit/README.md | 254 ------------------ vendor/github.com/brianvoe/gofakeit/TODO.txt | 3 - .../github.com/brianvoe/gofakeit/address.go | 131 --------- vendor/github.com/brianvoe/gofakeit/beer.go | 45 ---- vendor/github.com/brianvoe/gofakeit/bool.go | 10 - vendor/github.com/brianvoe/gofakeit/color.go | 44 --- .../github.com/brianvoe/gofakeit/company.go | 30 --- .../github.com/brianvoe/gofakeit/contact.go | 40 --- .../github.com/brianvoe/gofakeit/currency.go | 38 --- .../brianvoe/gofakeit/data/address.go | 15 -- .../github.com/brianvoe/gofakeit/data/beer.go | 10 - .../brianvoe/gofakeit/data/colors.go | 7 - .../brianvoe/gofakeit/data/company.go | 9 - .../brianvoe/gofakeit/data/computer.go | 8 - .../brianvoe/gofakeit/data/contact.go | 6 - .../brianvoe/gofakeit/data/currency.go | 7 - .../github.com/brianvoe/gofakeit/data/data.go | 28 -- .../brianvoe/gofakeit/data/datetime.go | 9 - .../brianvoe/gofakeit/data/files.go | 7 - .../brianvoe/gofakeit/data/hacker.go | 20 -- .../brianvoe/gofakeit/data/hipster.go | 6 - .../brianvoe/gofakeit/data/internet.go | 8 - .../github.com/brianvoe/gofakeit/data/job.go | 8 - .../brianvoe/gofakeit/data/log_level.go | 8 - .../brianvoe/gofakeit/data/lorem.go | 6 - .../brianvoe/gofakeit/data/payment.go | 20 -- .../brianvoe/gofakeit/data/person.go | 9 - .../brianvoe/gofakeit/data/status_code.go | 7 - .../brianvoe/gofakeit/data/vehicle.go | 10 - .../github.com/brianvoe/gofakeit/datetime.go | 77 ------ vendor/github.com/brianvoe/gofakeit/doc.go | 10 - vendor/github.com/brianvoe/gofakeit/faker.go | 15 -- vendor/github.com/brianvoe/gofakeit/file.go | 11 - .../github.com/brianvoe/gofakeit/generate.go | 41 --- vendor/github.com/brianvoe/gofakeit/hacker.go | 35 --- .../github.com/brianvoe/gofakeit/hipster.go | 20 -- vendor/github.com/brianvoe/gofakeit/image.go | 8 - .../github.com/brianvoe/gofakeit/internet.go | 55 ---- vendor/github.com/brianvoe/gofakeit/job.go | 34 --- .../github.com/brianvoe/gofakeit/log_level.go | 15 -- vendor/github.com/brianvoe/gofakeit/logo.png | Bin 36022 -> 0 bytes vendor/github.com/brianvoe/gofakeit/misc.go | 132 --------- vendor/github.com/brianvoe/gofakeit/name.go | 26 -- vendor/github.com/brianvoe/gofakeit/number.go | 84 ------ .../github.com/brianvoe/gofakeit/password.go | 68 ----- .../github.com/brianvoe/gofakeit/payment.go | 81 ------ vendor/github.com/brianvoe/gofakeit/person.go | 45 ---- .../brianvoe/gofakeit/status_code.go | 11 - vendor/github.com/brianvoe/gofakeit/string.go | 48 ---- vendor/github.com/brianvoe/gofakeit/struct.go | 87 ------ vendor/github.com/brianvoe/gofakeit/unique.go | 34 --- .../brianvoe/gofakeit/user_agent.go | 92 ------- .../github.com/brianvoe/gofakeit/vehicle.go | 55 ---- vendor/github.com/brianvoe/gofakeit/words.go | 100 ------- vendor/github.com/robfig/cron/README.md | 2 +- vendor/github.com/robfig/cron/doc.go | 2 +- vendor/modules.txt | 3 - 64 files changed, 2 insertions(+), 2198 deletions(-) delete mode 100644 vendor/github.com/brianvoe/gofakeit/BENCHMARKS.md delete mode 100644 vendor/github.com/brianvoe/gofakeit/CODE_OF_CONDUCT.md delete mode 100644 vendor/github.com/brianvoe/gofakeit/CONTRIBUTING.md delete mode 100644 vendor/github.com/brianvoe/gofakeit/LICENSE.txt delete mode 100644 vendor/github.com/brianvoe/gofakeit/README.md delete mode 100644 vendor/github.com/brianvoe/gofakeit/TODO.txt delete mode 100644 vendor/github.com/brianvoe/gofakeit/address.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/beer.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/bool.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/color.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/company.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/contact.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/currency.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/address.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/beer.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/colors.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/company.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/computer.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/contact.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/currency.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/data.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/datetime.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/files.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/hacker.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/hipster.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/internet.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/job.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/log_level.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/lorem.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/payment.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/person.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/status_code.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/data/vehicle.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/datetime.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/doc.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/faker.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/file.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/generate.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/hacker.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/hipster.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/image.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/internet.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/job.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/log_level.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/logo.png delete mode 100644 vendor/github.com/brianvoe/gofakeit/misc.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/name.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/number.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/password.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/payment.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/person.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/status_code.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/string.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/struct.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/unique.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/user_agent.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/vehicle.go delete mode 100644 vendor/github.com/brianvoe/gofakeit/words.go diff --git a/go.mod b/go.mod index 619f5183a5c7..1730235d6173 100644 --- a/go.mod +++ b/go.mod @@ -9,10 +9,8 @@ require ( github.com/aws/aws-sdk-go v1.18.5 github.com/benbjohnson/clock v0.0.0-20161215174838-7dc76406b6d3 github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 - github.com/brianvoe/gofakeit v3.17.0+incompatible github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd // indirect github.com/codegangsta/cli v1.20.0 - github.com/davecgh/go-spew v1.1.1 github.com/denisenkom/go-mssqldb v0.0.0-20190315220205-a8ed825ac853 github.com/facebookgo/ensure v0.0.0-20160127193407-b4ab57deab51 // indirect github.com/facebookgo/inject v0.0.0-20180706035515-f23751cae28b diff --git a/go.sum b/go.sum index 3c77812fbf7b..55223ecbc74e 100644 --- a/go.sum +++ b/go.sum @@ -16,8 +16,6 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLM github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 h1:rRISKWyXfVxvoa702s91Zl5oREZTrR3yv+tXrrX7G/g= github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60= -github.com/brianvoe/gofakeit v3.17.0+incompatible h1:C1+30+c0GtjgGDtRC+iePZeP1WMiwsWCELNJhmc7aIc= -github.com/brianvoe/gofakeit v3.17.0+incompatible/go.mod h1:kfwdRA90vvNhPutZWfH7WPaDzUjz+CZFqG+rPkOjGOc= github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I= github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ= github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w= diff --git a/pkg/extensions/main.go b/pkg/extensions/main.go index cbe9ec2b7b07..6ee742a4d8e3 100644 --- a/pkg/extensions/main.go +++ b/pkg/extensions/main.go @@ -1,7 +1,6 @@ package extensions import ( - _ "github.com/brianvoe/gofakeit" _ "github.com/gobwas/glob" _ "github.com/robfig/cron" _ "gopkg.in/square/go-jose.v2" diff --git a/vendor/github.com/brianvoe/gofakeit/BENCHMARKS.md b/vendor/github.com/brianvoe/gofakeit/BENCHMARKS.md deleted file mode 100644 index ec6e6d7a3767..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/BENCHMARKS.md +++ /dev/null @@ -1,134 +0,0 @@ -go test -bench=. -benchmem -goos: darwin -goarch: amd64 -pkg: github.com/brianvoe/gofakeit -Table generated with tablesgenerator.com/markdown_tables - -| Benchmark | Ops | CPU | MEM | MEM alloc | -|---------------------------------|-----------|-------------|------------|--------------| -| BenchmarkAddress-4 | 1000000 | 1998 ns/op | 248 B/op | 7 allocs/op | -| BenchmarkStreet-4 | 1000000 | 1278 ns/op | 62 B/op | 3 allocs/op | -| BenchmarkStreetNumber-4 | 5000000 | 344 ns/op | 36 B/op | 2 allocs/op | -| BenchmarkStreetPrefix-4 | 10000000 | 121 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStreetName-4 | 10000000 | 122 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStreetSuffix-4 | 10000000 | 122 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCity-4 | 5000000 | 326 ns/op | 15 B/op | 1 allocs/op | -| BenchmarkState-4 | 10000000 | 120 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStateAbr-4 | 10000000 | 122 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkZip-4 | 5000000 | 315 ns/op | 5 B/op | 1 allocs/op | -| BenchmarkCountry-4 | 10000000 | 126 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCountryAbr-4 | 10000000 | 123 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLatitude-4 | 100000000 | 23.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLongitude-4 | 100000000 | 23.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLatitudeInRange-4 | 50000000 | 27.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLongitudeInRange-4 | 50000000 | 27.8 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerName-4 | 20000000 | 104 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerStyle-4 | 10000000 | 119 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerHop-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerYeast-4 | 20000000 | 106 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerMalt-4 | 20000000 | 114 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBeerIbu-4 | 20000000 | 71.0 ns/op | 8 B/op | 1 allocs/op | -| BenchmarkBeerAlcohol-4 | 5000000 | 335 ns/op | 40 B/op | 3 allocs/op | -| BenchmarkBeerBlg-4 | 5000000 | 338 ns/op | 48 B/op | 3 allocs/op | -| BenchmarkBool-4 | 50000000 | 34.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkColor-4 | 20000000 | 112 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkSafeColor-4 | 20000000 | 102 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHexColor-4 | 3000000 | 491 ns/op | 24 B/op | 3 allocs/op | -| BenchmarkRGBColor-4 | 20000000 | 103 ns/op | 32 B/op | 1 allocs/op | -| BenchmarkCompany-4 | 5000000 | 353 ns/op | 22 B/op | 1 allocs/op | -| BenchmarkCompanySuffix-4 | 20000000 | 89.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBuzzWord-4 | 20000000 | 99.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkBS-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkContact-4 | 1000000 | 1121 ns/op | 178 B/op | 7 allocs/op | -| BenchmarkPhone-4 | 5000000 | 346 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkPhoneFormatted-4 | 3000000 | 456 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkEmail-4 | 2000000 | 715 ns/op | 130 B/op | 5 allocs/op | -| BenchmarkCurrency-4 | 10000000 | 125 ns/op | 32 B/op | 1 allocs/op | -| BenchmarkCurrencyShort-4 | 20000000 | 104 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCurrencyLong-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkPrice-4 | 50000000 | 27.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDate-4 | 5000000 | 371 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDateRange-4 | 10000000 | 238 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkMonth-4 | 30000000 | 44.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDay-4 | 50000000 | 39.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkWeekDay-4 | 30000000 | 44.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkYear-4 | 20000000 | 115 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHour-4 | 30000000 | 39.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkMinute-4 | 50000000 | 40.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkSecond-4 | 30000000 | 40.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNanoSecond-4 | 30000000 | 42.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZone-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZoneFull-4 | 20000000 | 118 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZoneAbv-4 | 20000000 | 105 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkTimeZoneOffset-4 | 10000000 | 147 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkMimeType-4 | 20000000 | 99.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkExtension-4 | 20000000 | 109 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkGenerate-4 | 1000000 | 1588 ns/op | 414 B/op | 11 allocs/op | -| BenchmarkHackerPhrase-4 | 300000 | 4576 ns/op | 2295 B/op | 26 allocs/op | -| BenchmarkHackerAbbreviation-4 | 20000000 | 101 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerAdjective-4 | 20000000 | 101 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerNoun-4 | 20000000 | 104 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerVerb-4 | 20000000 | 113 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHackerIngverb-4 | 20000000 | 98.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHipsterWord-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkHipsterSentence-4 | 1000000 | 1636 ns/op | 353 B/op | 3 allocs/op | -| BenchmarkHipsterParagraph-4 | 50000 | 31677 ns/op | 12351 B/op | 64 allocs/op | -| BenchmarkImageURL-4 | 20000000 | 108 ns/op | 38 B/op | 3 allocs/op | -| BenchmarkDomainName-4 | 3000000 | 491 ns/op | 76 B/op | 3 allocs/op | -| BenchmarkDomainSuffix-4 | 20000000 | 99.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkURL-4 | 1000000 | 1201 ns/op | 278 B/op | 8 allocs/op | -| BenchmarkHTTPMethod-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkIPv4Address-4 | 3000000 | 407 ns/op | 48 B/op | 5 allocs/op | -| BenchmarkIPv6Address-4 | 3000000 | 552 ns/op | 96 B/op | 7 allocs/op | -| BenchmarkUsername-4 | 5000000 | 307 ns/op | 16 B/op | 2 allocs/op | -| BenchmarkJob-4 | 2000000 | 726 ns/op | 86 B/op | 2 allocs/op | -| BenchmarkJobTitle-4 | 20000000 | 98.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkJobDescriptor-4 | 20000000 | 98.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkJobLevel-4 | 20000000 | 110 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLogLevel-4 | 20000000 | 107 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkReplaceWithNumbers-4 | 3000000 | 570 ns/op | 32 B/op | 1 allocs/op | -| BenchmarkName-4 | 5000000 | 285 ns/op | 17 B/op | 1 allocs/op | -| BenchmarkFirstName-4 | 20000000 | 102 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLastName-4 | 20000000 | 100 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNamePrefix-4 | 20000000 | 98.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNameSuffix-4 | 20000000 | 109 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNumber-4 | 50000000 | 34.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint8-4 | 50000000 | 28.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint16-4 | 50000000 | 28.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint32-4 | 50000000 | 27.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUint64-4 | 50000000 | 34.6 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt8-4 | 50000000 | 28.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt16-4 | 50000000 | 28.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt32-4 | 50000000 | 27.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkInt64-4 | 50000000 | 34.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat32-4 | 50000000 | 27.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat32Range-4 | 50000000 | 27.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat64-4 | 50000000 | 25.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkFloat64Range-4 | 50000000 | 26.5 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkNumerify-4 | 5000000 | 354 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkShuffleInts-4 | 10000000 | 226 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkPassword-4 | 2000000 | 655 ns/op | 304 B/op | 6 allocs/op | -| BenchmarkCreditCard-4 | 2000000 | 997 ns/op | 88 B/op | 4 allocs/op | -| BenchmarkCreditCardType-4 | 20000000 | 92.7 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkCreditCardNumber-4 | 3000000 | 572 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkCreditCardNumberLuhn-4 | 300000 | 5815 ns/op | 159 B/op | 9 allocs/op | -| BenchmarkCreditCardExp-4 | 10000000 | 129 ns/op | 5 B/op | 1 allocs/op | -| BenchmarkCreditCardCvv-4 | 10000000 | 128 ns/op | 3 B/op | 1 allocs/op | -| BenchmarkSSN-4 | 20000000 | 84.2 ns/op | 16 B/op | 1 allocs/op | -| BenchmarkGender-4 | 50000000 | 38.0 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkPerson-4 | 300000 | 5563 ns/op | 805 B/op | 26 allocs/op | -| BenchmarkSimpleStatusCode-4 | 20000000 | 72.9 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkStatusCode-4 | 20000000 | 75.8 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLetter-4 | 50000000 | 38.4 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkDigit-4 | 50000000 | 38.2 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkLexify-4 | 10000000 | 222 ns/op | 8 B/op | 1 allocs/op | -| BenchmarkShuffleStrings-4 | 10000000 | 197 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkUUID-4 | 20000000 | 106 ns/op | 48 B/op | 1 allocs/op | -| BenchmarkUserAgent-4 | 1000000 | 1236 ns/op | 305 B/op | 5 allocs/op | -| BenchmarkChromeUserAgent-4 | 2000000 | 881 ns/op | 188 B/op | 5 allocs/op | -| BenchmarkFirefoxUserAgent-4 | 1000000 | 1595 ns/op | 386 B/op | 7 allocs/op | -| BenchmarkSafariUserAgent-4 | 1000000 | 1396 ns/op | 551 B/op | 7 allocs/op | -| BenchmarkOperaUserAgent-4 | 2000000 | 950 ns/op | 216 B/op | 5 allocs/op | -| BenchmarkWord-4 | 20000000 | 99.1 ns/op | 0 B/op | 0 allocs/op | -| BenchmarkSentence-4 | 1000000 | 1540 ns/op | 277 B/op | 2 allocs/op | -| BenchmarkParagraph-4 | 50000 | 30978 ns/op | 11006 B/op | 61 allocs/op | \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/CODE_OF_CONDUCT.md b/vendor/github.com/brianvoe/gofakeit/CODE_OF_CONDUCT.md deleted file mode 100644 index 99d12c90fecf..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,46 +0,0 @@ -# Contributor Covenant Code of Conduct - -## Our Pledge - -In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. - -## Our Standards - -Examples of behavior that contributes to creating a positive environment include: - -* Using welcoming and inclusive language -* Being respectful of differing viewpoints and experiences -* Gracefully accepting constructive criticism -* Focusing on what is best for the community -* Showing empathy towards other community members - -Examples of unacceptable behavior by participants include: - -* The use of sexualized language or imagery and unwelcome sexual attention or advances -* Trolling, insulting/derogatory comments, and personal or political attacks -* Public or private harassment -* Publishing others' private information, such as a physical or electronic address, without explicit permission -* Other conduct which could reasonably be considered inappropriate in a professional setting - -## Our Responsibilities - -Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. - -Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. - -## Scope - -This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. - -## Enforcement - -Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at brian@webiswhatido.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. - -Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. - -## Attribution - -This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] - -[homepage]: http://contributor-covenant.org -[version]: http://contributor-covenant.org/version/1/4/ diff --git a/vendor/github.com/brianvoe/gofakeit/CONTRIBUTING.md b/vendor/github.com/brianvoe/gofakeit/CONTRIBUTING.md deleted file mode 100644 index 5a4812c28ee8..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/CONTRIBUTING.md +++ /dev/null @@ -1 +0,0 @@ -# Make a pull request and submit it and ill take a look at it. Thanks! diff --git a/vendor/github.com/brianvoe/gofakeit/LICENSE.txt b/vendor/github.com/brianvoe/gofakeit/LICENSE.txt deleted file mode 100644 index 21984c9d5eaa..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/LICENSE.txt +++ /dev/null @@ -1,20 +0,0 @@ -The MIT License (MIT) - -Copyright (c) [year] [fullname] - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/vendor/github.com/brianvoe/gofakeit/README.md b/vendor/github.com/brianvoe/gofakeit/README.md deleted file mode 100644 index 4e3723fd5117..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/README.md +++ /dev/null @@ -1,254 +0,0 @@ -![alt text](https://raw.githubusercontent.com/brianvoe/gofakeit/master/logo.png) - -# gofakeit [![Go Report Card](https://goreportcard.com/badge/github.com/brianvoe/gofakeit)](https://goreportcard.com/report/github.com/brianvoe/gofakeit) [![Build Status](https://travis-ci.org/brianvoe/gofakeit.svg?branch=master)](https://travis-ci.org/brianvoe/gofakeit) [![codecov.io](https://codecov.io/github/brianvoe/gofakeit/branch/master/graph/badge.svg)](https://codecov.io/github/brianvoe/gofakeit) [![GoDoc](https://godoc.org/github.com/brianvoe/gofakeit?status.svg)](https://godoc.org/github.com/brianvoe/gofakeit) [![license](http://img.shields.io/badge/license-MIT-green.svg?style=flat)](https://raw.githubusercontent.com/brianvoe/gofakeit/master/LICENSE.txt) -Random data generator written in go - -Buy Me A Coffee - -### Features -- Every function has an example and a benchmark, -[see benchmarks](https://github.com/brianvoe/gofakeit/blob/master/BENCHMARKS.md) -- Zero dependencies -- Randomizes user defined structs -- Numerous functions for regular use - -### 120+ Functions!!! -If there is something that is generic enough missing from this package [add an issue](https://github.com/brianvoe/gofakeit/issues) and let me know what you need. -Most of the time i'll add it! - -## Person -```go -Person() *PersonInfo -Name() string -NamePrefix() string -NameSuffix() string -FirstName() string -LastName() string -Gender() string -SSN() string -Contact() *ContactInfo -Email() string -Phone() string -PhoneFormatted() string -Username() string -Password(lower bool, upper bool, numeric bool, special bool, space bool, num int) string -``` - -## Address -```go -Address() *AddressInfo -City() string -Country() string -CountryAbr() string -State() string -StateAbr() string -StatusCode() string -Street() string -StreetName() string -StreetNumber() string -StreetPrefix() string -StreetSuffix() string -Zip() string -Latitude() float64 -LatitudeInRange() (float64, error) -Longitude() float64 -LongitudeInRange() (float64, error) -``` - -## Beer -```go -BeerAlcohol() string -BeerBlg() string -BeerHop() string -BeerIbu() string -BeerMalt() string -BeerName() string -BeerStyle() string -BeerYeast() string -``` - -## Cars -```go -Vehicle() *VehicleInfo -CarMaker() string -CarModel() string -VehicleType() string -FuelType() string -TransmissionGearType() string -``` - -## Words -```go -Word() string -Sentence(wordCount int) string -Paragraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string -Question() string -Quote() string -``` - -## Misc -```go -Struct(v interface{}) -Generate() string -Bool() bool -UUID() string -``` - -## Colors -```go -Color() string -HexColor() string -RGBColor() string -SafeColor() string -``` - -## Internet -```go -URL() string -ImageURL(width int, height int) string -DomainName() string -DomainSuffix() string -IPv4Address() string -IPv6Address() string -SimpleStatusCode() int -LogLevel(logType string) string -HTTPMethod() string -UserAgent() string -ChromeUserAgent() string -FirefoxUserAgent() string -OperaUserAgent() string -SafariUserAgent() string -``` - -## Date/Time -```go -Date() time.Time -DateRange(start, end time.Time) time.Time -NanoSecond() int -Second() int -Minute() int -Hour() int -Month() string -Day() int -WeekDay() string -Year() int -TimeZone() string -TimeZoneAbv() string -TimeZoneFull() string -TimeZoneOffset() float32 -``` - -## Payment -```go -Price(min, max float64) float64 -CreditCard() *CreditCardInfo -CreditCardCvv() string -CreditCardExp() string -CreditCardNumber() int -CreditCardNumberLuhn() int -CreditCardType() string -Currency() *CurrencyInfo -CurrencyLong() string -CurrencyShort() string -``` - -## Company -```go -BS() string -BuzzWord() string -Company() string -CompanySuffix() string -Job() *JobInfo -JobDescriptor() string -JobLevel() string -JobTitle() string -``` - -## Hacker -```go -HackerAbbreviation() string -HackerAdjective() string -HackerIngverb() string -HackerNoun() string -HackerPhrase() string -HackerVerb() string -``` - -## Hipster -```go -HipsterWord() string -HipsterSentence(wordCount int) string -HipsterParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string -``` - -## File -```go -Extension() string -MimeType() string -``` - -## Numbers -```go -Number(min int, max int) int -Numerify(str string) string -Int8() int8 -Int16() int16 -Int32() int32 -Int64() int64 -Uint8() uint8 -Uint16() uint16 -Uint32() uint32 -Uint64() uint64 -Float32() float32 -Float32Range(min, max float32) float32 -Float64() float64 -Float64Range(min, max float64) float64 -ShuffleInts(a []int) -``` - -## String -```go -Digit() string -Letter() string -Lexify(str string) string -RandString(a []string) string -ShuffleStrings(a []string) -``` - -## Documentation -[![GoDoc](https://godoc.org/github.com/brianvoe/gofakeit?status.svg)](https://godoc.org/github.com/brianvoe/gofakeit) - -## Example -```go -import "github.com/brianvoe/gofakeit" - -gofakeit.Name() // Markus Moen -gofakeit.Email() // alaynawuckert@kozey.biz -gofakeit.Phone() // (570)245-7485 -gofakeit.BS() // front-end -gofakeit.BeerName() // Duvel -gofakeit.Color() // MediumOrchid -gofakeit.Company() // Moen, Pagac and Wuckert -gofakeit.CreditCardNumber() // 4287271570245748 -gofakeit.HackerPhrase() // Connecting the array won't do anything, we need to generate the haptic COM driver! -gofakeit.JobTitle() // Director -gofakeit.Password(true, true, true, true, true, 32) // WV10MzLxq2DX79w1omH97_0ga59j8!kj -gofakeit.CurrencyShort() // USD -// 120+ more!!! - -// Create structs with random injected data -type Foo struct { - Bar string - Baz string - Int int - Pointer *int - Skip *string `fake:"skip"` // Set to "skip" to not generate data for -} -var f Foo -gofakeit.Struct(&f) -fmt.Printf("f.Bar:%s\n", f.Bar) // f.Bar:hrukpttuezptneuvunh -fmt.Printf("f.Baz:%s\n", f.Baz) // f.Baz:uksqvgzadxlgghejkmv -fmt.Printf("f.Int:%d\n", f.Int) // f.Int:-7825289004089916589 -fmt.Printf("f.Pointer:%d\n", *f.Pointer) // f.Pointer:-343806609094473732 -fmt.Printf("f.Skip:%v\n", f.Skip) // f.Skip: -``` diff --git a/vendor/github.com/brianvoe/gofakeit/TODO.txt b/vendor/github.com/brianvoe/gofakeit/TODO.txt deleted file mode 100644 index 7a492842136b..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/TODO.txt +++ /dev/null @@ -1,3 +0,0 @@ -* Take a look at [chance.js](http://chancejs.com/) and see if i missed anything. -* Look into [National Baby Name List](http://www.ssa.gov/oact/babynames/limits.html) and see if that makes sense to replace over what we currently have. -* Look at [data list](https://github.com/dariusk/corpora/tree/master/data) and see if it makes sense to add that data in or if it seems unncessary. diff --git a/vendor/github.com/brianvoe/gofakeit/address.go b/vendor/github.com/brianvoe/gofakeit/address.go deleted file mode 100644 index 82fc6b00e191..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/address.go +++ /dev/null @@ -1,131 +0,0 @@ -package gofakeit - -import ( - "errors" - "math/rand" - "strings" -) - -// AddressInfo is a struct full of address information -type AddressInfo struct { - Address string - Street string - City string - State string - Zip string - Country string - Latitude float64 - Longitude float64 -} - -// Address will generate a struct of address information -func Address() *AddressInfo { - street := Street() - city := City() - state := State() - zip := Zip() - - return &AddressInfo{ - Address: street + ", " + city + ", " + state + " " + zip, - Street: street, - City: city, - State: state, - Zip: zip, - Country: Country(), - Latitude: Latitude(), - Longitude: Longitude(), - } -} - -// Street will generate a random address street string -func Street() (street string) { - switch randInt := randIntRange(1, 2); randInt { - case 1: - street = StreetNumber() + " " + StreetPrefix() + " " + StreetName() + StreetSuffix() - case 2: - street = StreetNumber() + " " + StreetName() + StreetSuffix() - } - - return -} - -// StreetNumber will generate a random address street number string -func StreetNumber() string { - return strings.TrimLeft(replaceWithNumbers(getRandValue([]string{"address", "number"})), "0") -} - -// StreetPrefix will generate a random address street prefix string -func StreetPrefix() string { - return getRandValue([]string{"address", "street_prefix"}) -} - -// StreetName will generate a random address street name string -func StreetName() string { - return getRandValue([]string{"address", "street_name"}) -} - -// StreetSuffix will generate a random address street suffix string -func StreetSuffix() string { - return getRandValue([]string{"address", "street_suffix"}) -} - -// City will generate a random city string -func City() (city string) { - switch randInt := randIntRange(1, 3); randInt { - case 1: - city = FirstName() + StreetSuffix() - case 2: - city = LastName() + StreetSuffix() - case 3: - city = StreetPrefix() + " " + LastName() - } - - return -} - -// State will generate a random state string -func State() string { - return getRandValue([]string{"address", "state"}) -} - -// StateAbr will generate a random abbreviated state string -func StateAbr() string { - return getRandValue([]string{"address", "state_abr"}) -} - -// Zip will generate a random Zip code string -func Zip() string { - return replaceWithNumbers(getRandValue([]string{"address", "zip"})) -} - -// Country will generate a random country string -func Country() string { - return getRandValue([]string{"address", "country"}) -} - -// CountryAbr will generate a random abbreviated country string -func CountryAbr() string { - return getRandValue([]string{"address", "country_abr"}) -} - -// Latitude will generate a random latitude float64 -func Latitude() float64 { return (rand.Float64() * 180) - 90 } - -// LatitudeInRange will generate a random latitude within the input range -func LatitudeInRange(min, max float64) (float64, error) { - if min > max || min < -90 || min > 90 || max < -90 || max > 90 { - return 0, errors.New("input range is invalid") - } - return randFloat64Range(min, max), nil -} - -// Longitude will generate a random longitude float64 -func Longitude() float64 { return (rand.Float64() * 360) - 180 } - -// LongitudeInRange will generate a random longitude within the input range -func LongitudeInRange(min, max float64) (float64, error) { - if min > max || min < -180 || min > 180 || max < -180 || max > 180 { - return 0, errors.New("input range is invalid") - } - return randFloat64Range(min, max), nil -} diff --git a/vendor/github.com/brianvoe/gofakeit/beer.go b/vendor/github.com/brianvoe/gofakeit/beer.go deleted file mode 100644 index 53297d537809..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/beer.go +++ /dev/null @@ -1,45 +0,0 @@ -package gofakeit - -import "strconv" - -// Faker::Beer.blg #=> "18.5°Blg" - -// BeerName will return a random beer name -func BeerName() string { - return getRandValue([]string{"beer", "name"}) -} - -// BeerStyle will return a random beer style -func BeerStyle() string { - return getRandValue([]string{"beer", "style"}) -} - -// BeerHop will return a random beer hop -func BeerHop() string { - return getRandValue([]string{"beer", "hop"}) -} - -// BeerYeast will return a random beer yeast -func BeerYeast() string { - return getRandValue([]string{"beer", "yeast"}) -} - -// BeerMalt will return a random beer malt -func BeerMalt() string { - return getRandValue([]string{"beer", "malt"}) -} - -// BeerIbu will return a random beer ibu value between 10 and 100 -func BeerIbu() string { - return strconv.Itoa(randIntRange(10, 100)) + " IBU" -} - -// BeerAlcohol will return a random beer alcohol level between 2.0 and 10.0 -func BeerAlcohol() string { - return strconv.FormatFloat(randFloat64Range(2.0, 10.0), 'f', 1, 64) + "%" -} - -// BeerBlg will return a random beer blg between 5.0 and 20.0 -func BeerBlg() string { - return strconv.FormatFloat(randFloat64Range(5.0, 20.0), 'f', 1, 64) + "°Blg" -} diff --git a/vendor/github.com/brianvoe/gofakeit/bool.go b/vendor/github.com/brianvoe/gofakeit/bool.go deleted file mode 100644 index f63eeedd3241..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/bool.go +++ /dev/null @@ -1,10 +0,0 @@ -package gofakeit - -// Bool will generate a random boolean value -func Bool() bool { - if randIntRange(0, 1) == 1 { - return true - } - - return false -} diff --git a/vendor/github.com/brianvoe/gofakeit/color.go b/vendor/github.com/brianvoe/gofakeit/color.go deleted file mode 100644 index 63a737e99a62..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/color.go +++ /dev/null @@ -1,44 +0,0 @@ -package gofakeit - -import "math/rand" - -// Color will generate a random color string -func Color() string { - return getRandValue([]string{"color", "full"}) -} - -// SafeColor will generate a random safe color string -func SafeColor() string { - return getRandValue([]string{"color", "safe"}) -} - -// HexColor will generate a random hexadecimal color string -func HexColor() string { - color := make([]byte, 6) - hashQuestion := []byte("?#") - for i := 0; i < 6; i++ { - color[i] = hashQuestion[rand.Intn(2)] - } - - return "#" + replaceWithLetters(replaceWithNumbers(string(color))) - - // color := "" - // for i := 1; i <= 6; i++ { - // color += RandString([]string{"?", "#"}) - // } - - // // Replace # with number - // color = replaceWithNumbers(color) - - // // Replace ? with letter - // for strings.Count(color, "?") > 0 { - // color = strings.Replace(color, "?", RandString(letters), 1) - // } - - // return "#" + color -} - -// RGBColor will generate a random int slice color -func RGBColor() []int { - return []int{randIntRange(0, 255), randIntRange(0, 255), randIntRange(0, 255)} -} diff --git a/vendor/github.com/brianvoe/gofakeit/company.go b/vendor/github.com/brianvoe/gofakeit/company.go deleted file mode 100644 index abdb2aa698f1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/company.go +++ /dev/null @@ -1,30 +0,0 @@ -package gofakeit - -// Company will generate a random company name string -func Company() (company string) { - switch randInt := randIntRange(1, 3); randInt { - case 1: - company = LastName() + ", " + LastName() + " and " + LastName() - case 2: - company = LastName() + "-" + LastName() - case 3: - company = LastName() + " " + CompanySuffix() - } - - return -} - -// CompanySuffix will generate a random company suffix string -func CompanySuffix() string { - return getRandValue([]string{"company", "suffix"}) -} - -// BuzzWord will generate a random company buzz word string -func BuzzWord() string { - return getRandValue([]string{"company", "buzzwords"}) -} - -// BS will generate a random company bs string -func BS() string { - return getRandValue([]string{"company", "bs"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/contact.go b/vendor/github.com/brianvoe/gofakeit/contact.go deleted file mode 100644 index 1eb0ae05303d..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/contact.go +++ /dev/null @@ -1,40 +0,0 @@ -package gofakeit - -import ( - "strings" -) - -// ContactInfo struct full of contact info -type ContactInfo struct { - Phone string - Email string -} - -// Contact will generate a struct with information randomly populated contact information -func Contact() *ContactInfo { - return &ContactInfo{ - Phone: Phone(), - Email: Email(), - } -} - -// Phone will generate a random phone number string -func Phone() string { - return replaceWithNumbers("##########") -} - -// PhoneFormatted will generate a random phone number string -func PhoneFormatted() string { - return replaceWithNumbers(getRandValue([]string{"contact", "phone"})) -} - -// Email will generate a random email string -func Email() string { - var email string - - email = getRandValue([]string{"person", "first"}) + getRandValue([]string{"person", "last"}) - email += "@" - email += getRandValue([]string{"person", "last"}) + "." + getRandValue([]string{"internet", "domain_suffix"}) - - return strings.ToLower(email) -} diff --git a/vendor/github.com/brianvoe/gofakeit/currency.go b/vendor/github.com/brianvoe/gofakeit/currency.go deleted file mode 100644 index c25e4d62a7aa..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/currency.go +++ /dev/null @@ -1,38 +0,0 @@ -package gofakeit - -import ( - "math" - "math/rand" - - "github.com/brianvoe/gofakeit/data" -) - -// CurrencyInfo is a struct of currency information -type CurrencyInfo struct { - Short string - Long string -} - -// Currency will generate a struct with random currency information -func Currency() *CurrencyInfo { - index := rand.Intn(len(data.Data["currency"]["short"])) - return &CurrencyInfo{ - Short: data.Data["currency"]["short"][index], - Long: data.Data["currency"]["long"][index], - } -} - -// CurrencyShort will generate a random short currency value -func CurrencyShort() string { - return getRandValue([]string{"currency", "short"}) -} - -// CurrencyLong will generate a random long currency name -func CurrencyLong() string { - return getRandValue([]string{"currency", "long"}) -} - -// Price will take in a min and max value and return a formatted price -func Price(min, max float64) float64 { - return math.Floor(randFloat64Range(min, max)*100) / 100 -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/address.go b/vendor/github.com/brianvoe/gofakeit/data/address.go deleted file mode 100644 index 671cdda91375..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/address.go +++ /dev/null @@ -1,15 +0,0 @@ -package data - -// Address consists of address information -var Address = map[string][]string{ - "number": {"#####", "####", "###"}, - "street_prefix": {"North", "East", "West", "South", "New", "Lake", "Port"}, - "street_name": {"Alley", "Avenue", "Branch", "Bridge", "Brook", "Brooks", "Burg", "Burgs", "Bypass", "Camp", "Canyon", "Cape", "Causeway", "Center", "Centers", "Circle", "Circles", "Cliff", "Cliffs", "Club", "Common", "Corner", "Corners", "Course", "Court", "Courts", "Cove", "Coves", "Creek", "Crescent", "Crest", "Crossing", "Crossroad", "Curve", "Dale", "Dam", "Divide", "Drive", "Drive", "Drives", "Estate", "Estates", "Expressway", "Extension", "Extensions", "Fall", "Falls", "Ferry", "Field", "Fields", "Flat", "Flats", "Ford", "Fords", "Forest", "Forge", "Forges", "Fork", "Forks", "Fort", "Freeway", "Garden", "Gardens", "Gateway", "Glen", "Glens", "Green", "Greens", "Grove", "Groves", "Harbor", "Harbors", "Haven", "Heights", "Highway", "Hill", "Hills", "Hollow", "Inlet", "Inlet", "Island", "Island", "Islands", "Islands", "Isle", "Isle", "Junction", "Junctions", "Key", "Keys", "Knoll", "Knolls", "Lake", "Lakes", "Land", "Landing", "Lane", "Light", "Lights", "Loaf", "Lock", "Locks", "Locks", "Lodge", "Lodge", "Loop", "Mall", "Manor", "Manors", "Meadow", "Meadows", "Mews", "Mill", "Mills", "Mission", "Mission", "Motorway", "Mount", "Mountain", "Mountain", "Mountains", "Mountains", "Neck", "Orchard", "Oval", "Overpass", "Park", "Parks", "Parkway", "Parkways", "Pass", "Passage", "Path", "Pike", "Pine", "Pines", "Place", "Plain", "Plains", "Plains", "Plaza", "Plaza", "Point", "Points", "Port", "Port", "Ports", "Ports", "Prairie", "Prairie", "Radial", "Ramp", "Ranch", "Rapid", "Rapids", "Rest", "Ridge", "Ridges", "River", "Road", "Road", "Roads", "Roads", "Route", "Row", "Rue", "Run", "Shoal", "Shoals", "Shore", "Shores", "Skyway", "Spring", "Springs", "Springs", "Spur", "Spurs", "Square", "Square", "Squares", "Squares", "Station", "Station", "Stravenue", "Stravenue", "Stream", "Stream", "Street", "Street", "Streets", "Summit", "Summit", "Terrace", "Throughway", "Trace", "Track", "Trafficway", "Trail", "Trail", "Tunnel", "Tunnel", "Turnpike", "Turnpike", "Underpass", "Union", "Unions", "Valley", "Valleys", "Via", "Viaduct", "View", "Views", "Village", "Village", "Villages", "Ville", "Vista", "Vista", "Walk", "Walks", "Wall", "Way", "Ways", "Well", "Wells"}, - "street_suffix": {"town", "ton", "land", "ville", "berg", "burgh", "borough", "bury", "view", "port", "mouth", "stad", "furt", "chester", "mouth", "fort", "haven", "side", "shire"}, - "city": {"{address.street_prefix} {name.first}{address.street_suffix}", "{address.street_prefix} {name.first}", "{name.first}{address.street_suffix}", "{name.last}{address.street_suffix}"}, - "state": {"Alabama", "Alaska", "Arizona", "Arkansas", "California", "Colorado", "Connecticut", "Delaware", "Florida", "Georgia", "Hawaii", "Idaho", "Illinois", "Indiana", "Iowa", "Kansas", "Kentucky", "Louisiana", "Maine", "Maryland", "Massachusetts", "Michigan", "Minnesota", "Mississippi", "Missouri", "Montana", "Nebraska", "Nevada", "New Hampshire", "New Jersey", "New Mexico", "New York", "North Carolina", "North Dakota", "Ohio", "Oklahoma", "Oregon", "Pennsylvania", "Rhode Island", "South Carolina", "South Dakota", "Tennessee", "Texas", "Utah", "Vermont", "Virginia", "Washington", "West Virginia", "Wisconsin", "Wyoming"}, - "state_abr": {"AL", "AK", "AS", "AZ", "AR", "CA", "CO", "CT", "DE", "DC", "FM", "FL", "GA", "GU", "HI", "ID", "IL", "IN", "IA", "KS", "KY", "LA", "ME", "MH", "MD", "MA", "MI", "MN", "MS", "MO", "MT", "NE", "NV", "NH", "NJ", "NM", "NY", "NC", "ND", "MP", "OH", "OK", "OR", "PW", "PA", "PR", "RI", "SC", "SD", "TN", "TX", "UT", "VT", "VI", "VA", "WA", "WV", "WI", "WY", "AE", "AA", "AP"}, - "zip": {"#####"}, - "country": {"Afghanistan", "Albania", "Algeria", "American Samoa", "Andorra", "Angola", "Anguilla", "Antarctica", "Antigua and Barbuda", "Argentina", "Armenia", "Aruba", "Australia", "Austria", "Azerbaijan", "Bahamas", "Bahrain", "Bangladesh", "Barbados", "Belarus", "Belgium", "Belize", "Benin", "Bermuda", "Bhutan", "Bolivia", "Bosnia and Herzegovina", "Botswana", "Bouvet Island", "Brazil", "British Indian Ocean Territory", "British Virgin Islands", "Brunei Darussalam", "Bulgaria", "Burkina Faso", "Burundi", "Cambodia", "Cameroon", "Canada", "Cape Verde", "Cayman Islands", "Central African Republic", "Chad", "Chile", "China", "Christmas Island", "Cocos (Keeling) Islands", "Colombia", "Comoros", "Congo", "Congo", "Cook Islands", "Costa Rica", "Cote Divoire", "Croatia", "Cuba", "Cyprus", "Czech Republic", "Denmark", "Djibouti", "Dominica", "Dominican Republic", "Ecuador", "Egypt", "El Salvador", "Equatorial Guinea", "Eritrea", "Estonia", "Ethiopia", "Faroe Islands", "Falkland Islands", "Fiji", "Finland", "France", "French Guiana", "French Polynesia", "French Southern Territories", "Gabon", "Gambia", "Georgia", "Germany", "Ghana", "Gibraltar", "Greece", "Greenland", "Grenada", "Guadeloupe", "Guam", "Guatemala", "Guernsey", "Guinea", "Guinea-Bissau", "Guyana", "Haiti", "Heard Island and McDonald Islands", "Holy See (Vatican City State)", "Honduras", "Hong Kong", "Hungary", "Iceland", "India", "Indonesia", "Iran", "Iraq", "Ireland", "Isle of Man", "Israel", "Italy", "Jamaica", "Japan", "Jersey", "Jordan", "Kazakhstan", "Kenya", "Kiribati", "Korea", "Korea", "Kuwait", "Kyrgyz Republic", "Lao Peoples Democratic Republic", "Latvia", "Lebanon", "Lesotho", "Liberia", "Libyan Arab Jamahiriya", "Liechtenstein", "Lithuania", "Luxembourg", "Macao", "Macedonia", "Madagascar", "Malawi", "Malaysia", "Maldives", "Mali", "Malta", "Marshall Islands", "Martinique", "Mauritania", "Mauritius", "Mayotte", "Mexico", "Micronesia", "Moldova", "Monaco", "Mongolia", "Montenegro", "Montserrat", "Morocco", "Mozambique", "Myanmar", "Namibia", "Nauru", "Nepal", "Netherlands Antilles", "Netherlands", "New Caledonia", "New Zealand", "Nicaragua", "Niger", "Nigeria", "Niue", "Norfolk Island", "Northern Mariana Islands", "Norway", "Oman", "Pakistan", "Palau", "Palestinian Territory", "Panama", "Papua New Guinea", "Paraguay", "Peru", "Philippines", "Pitcairn Islands", "Poland", "Portugal", "Puerto Rico", "Qatar", "Reunion", "Romania", "Russian Federation", "Rwanda", "Saint Barthelemy", "Saint Helena", "Saint Kitts and Nevis", "Saint Lucia", "Saint Martin", "Saint Pierre and Miquelon", "Saint Vincent and the Grenadines", "Samoa", "San Marino", "Sao Tome and Principe", "Saudi Arabia", "Senegal", "Serbia", "Seychelles", "Sierra Leone", "Singapore", "Slovakia (Slovak Republic)", "Slovenia", "Solomon Islands", "Somalia", "South Africa", "South Georgia and the South Sandwich Islands", "Spain", "Sri Lanka", "Sudan", "Suriname", "Svalbard & Jan Mayen Islands", "Swaziland", "Sweden", "Switzerland", "Syrian Arab Republic", "Taiwan", "Tajikistan", "Tanzania", "Thailand", "Timor-Leste", "Togo", "Tokelau", "Tonga", "Trinidad and Tobago", "Tunisia", "Turkey", "Turkmenistan", "Turks and Caicos Islands", "Tuvalu", "Uganda", "Ukraine", "United Arab Emirates", "United Kingdom", "United States of America", "United States Minor Outlying Islands", "United States Virgin Islands", "Uruguay", "Uzbekistan", "Vanuatu", "Venezuela", "Vietnam", "Wallis and Futuna", "Western Sahara", "Yemen", "Zambia", "Zimbabwe"}, - "country_abr": {"AF", "AL", "DZ", "AS", "AD", "AO", "AI", "AQ", "AG", "AR", "AM", "AW", "AU", "AT", "AZ", "BS", "BH", "BD", "BB", "BY", "BE", "BZ", "BJ", "BM", "BT", "BO", "BA", "BW", "BV", "BR", "IO", "BN", "BG", "BF", "BI", "KH", "CM", "CA", "CV", "KY", "CF", "TD", "CL", "CN", "CX", "CC", "CO", "KM", "CG", "CK", "CR", "CI", "HR", "CU", "CY", "CZ", "DK", "DJ", "DM", "DO", "TL", "EC", "EG", "SV", "GQ", "ER", "EE", "ET", "FK", "FO", "FJ", "FI", "FR", "FX", "GF", "PF", "TF", "GA", "GM", "GE", "DE", "GH", "GI", "GR", "GL", "GD", "GP", "GU", "GT", "GN", "GW", "GY", "HT", "HM", "HN", "HK", "HU", "IS", "IN", "ID", "IR", "IQ", "IE", "IL", "IT", "JM", "JP", "JO", "KZ", "KE", "KI", "KP", "KR", "KW", "KG", "LA", "LV", "LB", "LS", "LR", "LY", "LI", "LT", "LU", "MO", "MK", "MG", "MW", "MY", "MV", "ML", "MT", "MH", "MQ", "MR", "MU", "YT", "MX", "FM", "MD", "MC", "MN", "MS", "MA", "MZ", "MM", "NA", "NR", "NP", "NL", "AN", "NC", "NZ", "NI", "NE", "NG", "NU", "NF", "MP", "NO", "OM", "PK", "PW", "PA", "PG", "PY", "PE", "PH", "PN", "PL", "PT", "PR", "QA", "RE", "RO", "RU", "RW", "KN", "LC", "VC", "WS", "SM", "ST", "SA", "SN", "RS", "SC", "SL", "SG", "SK", "SI", "SB", "SO", "ZA", "ES", "LK", "SH", "PM", "SD", "SR", "SJ", "SZ", "SE", "CH", "SY", "TW", "TJ", "TZ", "TH", "TG", "TK", "TO", "TT", "TN", "TR", "TM", "TC", "TV", "UG", "UA", "AE", "GB", "US", "UM", "UY", "UZ", "VU", "VA", "VE", "VN", "VG", "VI", "WF", "EH", "YE", "YU", "ZR", "ZM", "ZW"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/beer.go b/vendor/github.com/brianvoe/gofakeit/data/beer.go deleted file mode 100644 index 1192907d5f29..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/beer.go +++ /dev/null @@ -1,10 +0,0 @@ -package data - -// Beer consists of various beer information -var Beer = map[string][]string{ - "name": {"Pliny The Elder", "Founders Kentucky Breakfast", "Trappistes Rochefort 10", "HopSlam Ale", "Stone Imperial Russian Stout", "St. Bernardus Abt 12", "Founders Breakfast Stout", "Weihenstephaner Hefeweissbier", "Péché Mortel", "Celebrator Doppelbock", "Duvel", "Dreadnaught IPA", "Nugget Nectar", "La Fin Du Monde", "Bourbon County Stout", "Old Rasputin Russian Imperial Stout", "Two Hearted Ale", "Ruination IPA", "Schneider Aventinus", "Double Bastard Ale", "90 Minute IPA", "Hop Rod Rye", "Trappistes Rochefort 8", "Chimay Grande Réserve", "Stone IPA", "Arrogant Bastard Ale", "Edmund Fitzgerald Porter", "Chocolate St", "Oak Aged Yeti Imperial Stout", "Ten FIDY", "Storm King Stout", "Shakespeare Oatmeal", "Alpha King Pale Ale", "Westmalle Trappist Tripel", "Samuel Smith’s Imperial IPA", "Yeti Imperial Stout", "Hennepin", "Samuel Smith’s Oatmeal Stout", "Brooklyn Black", "Oaked Arrogant Bastard Ale", "Sublimely Self-Righteous Ale", "Trois Pistoles", "Bell’s Expedition", "Sierra Nevada Celebration Ale", "Sierra Nevada Bigfoot Barleywine Style Ale", "Racer 5 India Pale Ale, Bear Republic Bre", "Orval Trappist Ale", "Hercules Double IPA", "Maharaj", "Maudite"}, - "hop": {"Ahtanum", "Amarillo", "Bitter Gold", "Bravo", "Brewer’s Gold", "Bullion", "Cascade", "Cashmere", "Centennial", "Chelan", "Chinook", "Citra", "Cluster", "Columbia", "Columbus", "Comet", "Crystal", "Equinox", "Eroica", "Fuggle", "Galena", "Glacier", "Golding", "Hallertau", "Horizon", "Liberty", "Magnum", "Millennium", "Mosaic", "Mt. Hood", "Mt. Rainier", "Newport", "Northern Brewer", "Nugget", "Olympic", "Palisade", "Perle", "Saaz", "Santiam", "Simcoe", "Sorachi Ace", "Sterling", "Summit", "Tahoma", "Tettnang", "TriplePearl", "Ultra", "Vanguard", "Warrior", "Willamette", "Yakima Gol"}, - "yeast": {"1007 - German Ale", "1010 - American Wheat", "1028 - London Ale", "1056 - American Ale", "1084 - Irish Ale", "1098 - British Ale", "1099 - Whitbread Ale", "1187 - Ringwood Ale", "1272 - American Ale II", "1275 - Thames Valley Ale", "1318 - London Ale III", "1332 - Northwest Ale", "1335 - British Ale II", "1450 - Dennys Favorite 50", "1469 - West Yorkshire Ale", "1728 - Scottish Ale", "1968 - London ESB Ale", "2565 - Kölsch", "1214 - Belgian Abbey", "1388 - Belgian Strong Ale", "1762 - Belgian Abbey II", "3056 - Bavarian Wheat Blend", "3068 - Weihenstephan Weizen", "3278 - Belgian Lambic Blend", "3333 - German Wheat", "3463 - Forbidden Fruit", "3522 - Belgian Ardennes", "3638 - Bavarian Wheat", "3711 - French Saison", "3724 - Belgian Saison", "3763 - Roeselare Ale Blend", "3787 - Trappist High Gravity", "3942 - Belgian Wheat", "3944 - Belgian Witbier", "2000 - Budvar Lager", "2001 - Urquell Lager", "2007 - Pilsen Lager", "2035 - American Lager", "2042 - Danish Lager", "2112 - California Lager", "2124 - Bohemian Lager", "2206 - Bavarian Lager", "2278 - Czech Pils", "2308 - Munich Lager", "2633 - Octoberfest Lager Blend", "5112 - Brettanomyces bruxellensis", "5335 - Lactobacillus", "5526 - Brettanomyces lambicus", "5733 - Pediococcus"}, - "malt": {"Black malt", "Caramel", "Carapils", "Chocolate", "Munich", "Caramel", "Carapils", "Chocolate malt", "Munich", "Pale", "Roasted barley", "Rye malt", "Special roast", "Victory", "Vienna", "Wheat mal"}, - "style": {"Light Lager", "Pilsner", "European Amber Lager", "Dark Lager", "Bock", "Light Hybrid Beer", "Amber Hybrid Beer", "English Pale Ale", "Scottish And Irish Ale", "Merican Ale", "English Brown Ale", "Porter", "Stout", "India Pale Ale", "German Wheat And Rye Beer", "Belgian And French Ale", "Sour Ale", "Belgian Strong Ale", "Strong Ale", "Fruit Beer", "Vegetable Beer", "Smoke-flavored", "Wood-aged Beer"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/colors.go b/vendor/github.com/brianvoe/gofakeit/data/colors.go deleted file mode 100644 index 3aca817d69f3..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/colors.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// Colors consists of color information -var Colors = map[string][]string{ - "safe": {"black", "maroon", "green", "navy", "olive", "purple", "teal", "lime", "blue", "silver", "gray", "yellow", "fuchsia", "aqua", "white"}, - "full": {"AliceBlue", "AntiqueWhite", "Aqua", "Aquamarine", "Azure", "Beige", "Bisque", "Black", "BlanchedAlmond", "Blue", "BlueViolet", "Brown", "BurlyWood", "CadetBlue", "Chartreuse", "Chocolate", "Coral", "CornflowerBlue", "Cornsilk", "Crimson", "Cyan", "DarkBlue", "DarkCyan", "DarkGoldenRod", "DarkGray", "DarkGreen", "DarkKhaki", "DarkMagenta", "DarkOliveGreen", "Darkorange", "DarkOrchid", "DarkRed", "DarkSalmon", "DarkSeaGreen", "DarkSlateBlue", "DarkSlateGray", "DarkTurquoise", "DarkViolet", "DeepPink", "DeepSkyBlue", "DimGray", "DimGrey", "DodgerBlue", "FireBrick", "FloralWhite", "ForestGreen", "Fuchsia", "Gainsboro", "GhostWhite", "Gold", "GoldenRod", "Gray", "Green", "GreenYellow", "HoneyDew", "HotPink", "IndianRed ", "Indigo ", "Ivory", "Khaki", "Lavender", "LavenderBlush", "LawnGreen", "LemonChiffon", "LightBlue", "LightCoral", "LightCyan", "LightGoldenRodYellow", "LightGray", "LightGreen", "LightPink", "LightSalmon", "LightSeaGreen", "LightSkyBlue", "LightSlateGray", "LightSteelBlue", "LightYellow", "Lime", "LimeGreen", "Linen", "Magenta", "Maroon", "MediumAquaMarine", "MediumBlue", "MediumOrchid", "MediumPurple", "MediumSeaGreen", "MediumSlateBlue", "MediumSpringGreen", "MediumTurquoise", "MediumVioletRed", "MidnightBlue", "MintCream", "MistyRose", "Moccasin", "NavajoWhite", "Navy", "OldLace", "Olive", "OliveDrab", "Orange", "OrangeRed", "Orchid", "PaleGoldenRod", "PaleGreen", "PaleTurquoise", "PaleVioletRed", "PapayaWhip", "PeachPuff", "Peru", "Pink", "Plum", "PowderBlue", "Purple", "Red", "RosyBrown", "RoyalBlue", "SaddleBrown", "Salmon", "SandyBrown", "SeaGreen", "SeaShell", "Sienna", "Silver", "SkyBlue", "SlateBlue", "SlateGray", "Snow", "SpringGreen", "SteelBlue", "Tan", "Teal", "Thistle", "Tomato", "Turquoise", "Violet", "Wheat", "White", "WhiteSmoke", "Yellow", "YellowGreen"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/company.go b/vendor/github.com/brianvoe/gofakeit/data/company.go deleted file mode 100644 index b2a3790c7c68..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/company.go +++ /dev/null @@ -1,9 +0,0 @@ -package data - -// Company consists of company information -var Company = map[string][]string{ - "name": {"{person.last} {company.suffix}", "{person.last}-{person.last}", "{person.last}, {person.last} and {person.last}"}, - "suffix": {"Inc", "and Sons", "LLC", "Group"}, - "buzzwords": {"Adaptive", "Advanced", "Ameliorated", "Assimilated", "Automated", "Balanced", "Business-focused", "Centralized", "Cloned", "Compatible", "Configurable", "Cross-group", "Cross-platform", "Customer-focused", "Customizable", "De-engineered", "Decentralized", "Devolved", "Digitized", "Distributed", "Diverse", "Down-sized", "Enhanced", "Enterprise-wide", "Ergonomic", "Exclusive", "Expanded", "Extended", "Face to face", "Focused", "Front-line", "Fully-configurable", "Function-based", "Fundamental", "Future-proofed", "Grass-roots", "Horizontal", "Implemented", "Innovative", "Integrated", "Intuitive", "Inverse", "Managed", "Mandatory", "Monitored", "Multi-channelled", "Multi-lateral", "Multi-layered", "Multi-tiered", "Networked", "Object-based", "Open-architected", "Open-source", "Operative", "Optimized", "Optional", "Organic", "Organized", "Persevering", "Persistent", "Phased", "Polarised", "Pre-emptive", "Proactive", "Profit-focused", "Profound", "Programmable", "Progressive", "Public-key", "Quality-focused", "Re-contextualized", "Re-engineered", "Reactive", "Realigned", "Reduced", "Reverse-engineered", "Right-sized", "Robust", "Seamless", "Secured", "Self-enabling", "Sharable", "Stand-alone", "Streamlined", "Switchable", "Synchronised", "Synergistic", "Synergized", "Team-oriented", "Total", "Triple-buffered", "Universal", "Up-sized", "Upgradable", "User-centric", "User-friendly", "Versatile", "Virtual", "Vision-oriented", "Visionary", "24 hour", "24/7", "3rd generation", "4th generation", "5th generation", "6th generation", "actuating", "analyzing", "asymmetric", "asynchronous", "attitude-oriented", "background", "bandwidth-monitored", "bi-directional", "bifurcated", "bottom-line", "clear-thinking", "client-driven", "client-server", "coherent", "cohesive", "composite", "content-based", "context-sensitive", "contextually-based", "dedicated", "demand-driven", "didactic", "directional", "discrete", "disintermediate", "dynamic", "eco-centric", "empowering", "encompassing", "even-keeled", "executive", "explicit", "exuding", "fault-tolerant", "foreground", "fresh-thinking", "full-range", "global", "grid-enabled", "heuristic", "high-level", "holistic", "homogeneous", "human-resource", "hybrid", "impactful", "incremental", "intangible", "interactive", "intermediate", "leading edge", "local", "logistical", "maximized", "methodical", "mission-critical", "mobile", "modular", "motivating", "multi-state", "multi-tasking", "multimedia", "national", "needs-based", "neutral", "next generation", "non-volatile", "object-oriented", "optimal", "optimizing", "radical", "real-time", "reciprocal", "regional", "responsive", "scalable", "secondary", "solution-oriented", "stable", "static", "system-worthy", "systematic", "systemic", "tangible", "tertiary", "transitional", "uniform", "upward-trending", "user-facing", "value-added", "web-enabled", "well-modulated", "zero administration", "zero defect", "zero tolerance", "Graphic Interface", "Graphical User Interface", "ability", "access", "adapter", "algorithm", "alliance", "analyzer", "application", "approach", "architecture", "archive", "array", "artificial intelligence", "attitude", "benchmark", "budgetary management", "capability", "capacity", "challenge", "circuit", "collaboration", "complexity", "concept", "conglomeration", "contingency", "core", "customer loyalty", "data-warehouse", "database", "definition", "emulation", "encoding", "encryption", "extranet", "firmware", "flexibility", "focus group", "forecast", "frame", "framework", "function", "functionalities", "groupware", "hardware", "help-desk", "hierarchy", "hub", "implementation", "info-mediaries", "infrastructure", "initiative", "installation", "instruction set", "interface", "internet solution", "intranet", "knowledge base", "knowledge user", "leverage", "local area network", "matrices", "matrix", "methodology", "middleware", "migration", "model", "moderator", "monitoring", "moratorium", "neural-net", "open architecture", "open system", "orchestration", "paradigm", "parallelism", "policy", "portal", "pricing structure", "process improvement", "product", "productivity", "project", "projection", "protocol", "secured line", "service-desk", "software", "solution", "standardization", "strategy", "structure", "success", "superstructure", "support", "synergy", "system engine", "task-force", "throughput", "time-frame", "toolset", "utilisation", "website", "workforce"}, - "bs": {"aggregate", "architect", "benchmark", "brand", "cultivate", "deliver", "deploy", "disintermediate", "drive", "e-enable", "embrace", "empower", "enable", "engage", "engineer", "enhance", "envisioneer", "evolve", "expedite", "exploit", "extend", "facilitate", "generate", "grow", "harness", "implement", "incentivize", "incubate", "innovate", "integrate", "iterate", "leverage", "matrix", "maximize", "mesh", "monetize", "morph", "optimize", "orchestrate", "productize", "recontextualize", "redefine", "reintermediate", "reinvent", "repurpose", "revolutionize", "scale", "seize", "strategize", "streamline", "syndicate", "synergize", "synthesize", "target", "transform", "transition", "unleash", "utilize", "visualize", "whiteboard", "24/365", "24/7", "B2B", "B2C", "back-end", "best-of-breed", "bleeding-edge", "bricks-and-clicks", "clicks-and-mortar", "collaborative", "compelling", "cross-media", "cross-platform", "customized", "cutting-edge", "distributed", "dot-com", "dynamic", "e-business", "efficient", "end-to-end", "enterprise", "extensible", "frictionless", "front-end", "global", "granular", "holistic", "impactful", "innovative", "integrated", "interactive", "intuitive", "killer", "leading-edge", "magnetic", "mission-critical", "next-generation", "one-to-one", "open-source", "out-of-the-box", "plug-and-play", "proactive", "real-time", "revolutionary", "rich", "robust", "scalable", "seamless", "sexy", "sticky", "strategic", "synergistic", "transparent", "turn-key", "ubiquitous", "user-centric", "value-added", "vertical", "viral", "virtual", "visionary", "web-enabled", "wireless", "world-class", "ROI", "action-items", "applications", "architectures", "bandwidth", "channels", "communities", "content", "convergence", "deliverables", "e-business", "e-commerce", "e-markets", "e-services", "e-tailers", "experiences", "eyeballs", "functionalities", "infomediaries", "infrastructures", "initiatives", "interfaces", "markets", "methodologies", "metrics", "mindshare", "models", "networks", "niches", "paradigms", "partnerships", "platforms", "portals", "relationships", "schemas", "solutions", "supply-chains", "synergies", "systems", "technologies", "users", "vortals", "web services", "web-readiness"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/computer.go b/vendor/github.com/brianvoe/gofakeit/data/computer.go deleted file mode 100644 index b682c6f820cc..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/computer.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// Computer consists of computer information -var Computer = map[string][]string{ - "linux_processor": {"i686", "x86_64"}, - "mac_processor": {"Intel", "PPC", "U; Intel", "U; PPC"}, - "windows_platform": {"Windows NT 6.2", "Windows NT 6.1", "Windows NT 6.0", "Windows NT 5.2", "Windows NT 5.1", "Windows NT 5.01", "Windows NT 5.0", "Windows NT 4.0", "Windows 98; Win 9x 4.90", "Windows 98", "Windows 95", "Windows CE"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/contact.go b/vendor/github.com/brianvoe/gofakeit/data/contact.go deleted file mode 100644 index 88b957961dbb..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/contact.go +++ /dev/null @@ -1,6 +0,0 @@ -package data - -// Contact consists of contact information -var Contact = map[string][]string{ - "phone": {"###-###-####", "(###)###-####", "1-###-###-####", "###.###.####"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/currency.go b/vendor/github.com/brianvoe/gofakeit/data/currency.go deleted file mode 100644 index 13b8019973ca..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/currency.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// Currency consists of currency information -var Currency = map[string][]string{ - "short": {"AED", "AFN", "ALL", "AMD", "ANG", "AOA", "ARS", "AUD", "AWG", "AZN", "BAM", "BBD", "BDT", "BGN", "BHD", "BIF", "BMD", "BND", "BOB", "BRL", "BSD", "BTN", "BWP", "BYR", "BZD", "CAD", "CDF", "CHF", "CLP", "CNY", "COP", "CRC", "CUC", "CUP", "CVE", "CZK", "DJF", "DKK", "DOP", "DZD", "EGP", "ERN", "ETB", "EUR", "FJD", "FKP", "GBP", "GEL", "GGP", "GHS", "GIP", "GMD", "GNF", "GTQ", "GYD", "HKD", "HNL", "HRK", "HTG", "HUF", "IDR", "ILS", "IMP", "INR", "IQD", "IRR", "ISK", "JEP", "JMD", "JOD", "JPY", "KES", "KGS", "KHR", "KMF", "KPW", "KRW", "KWD", "KYD", "KZT", "LAK", "LBP", "LKR", "LRD", "LSL", "LTL", "LYD", "MAD", "MDL", "MGA", "MKD", "MMK", "MNT", "MOP", "MRO", "MUR", "MVR", "MWK", "MXN", "MYR", "MZN", "NAD", "NGN", "NIO", "NOK", "NPR", "NZD", "OMR", "PAB", "PEN", "PGK", "PHP", "PKR", "PLN", "PYG", "QAR", "RON", "RSD", "RUB", "RWF", "SAR", "SBD", "SCR", "SDG", "SEK", "SGD", "SHP", "SLL", "SOS", "SPL", "SRD", "STD", "SVC", "SYP", "SZL", "THB", "TJS", "TMT", "TND", "TOP", "TRY", "TTD", "TVD", "TWD", "TZS", "UAH", "UGX", "USD", "UYU", "UZS", "VEF", "VND", "VUV", "WST", "XAF", "XCD", "XDR", "XOF", "XPF", "YER", "ZAR", "ZMW", "ZWD"}, - "long": {"United Arab Emirates Dirham", "Afghanistan Afghani", "Albania Lek", "Armenia Dram", "Netherlands Antilles Guilder", "Angola Kwanza", "Argentina Peso", "Australia Dollar", "Aruba Guilder", "Azerbaijan New Manat", "Bosnia and Herzegovina Convertible Marka", "Barbados Dollar", "Bangladesh Taka", "Bulgaria Lev", "Bahrain Dinar", "Burundi Franc", "Bermuda Dollar", "Brunei Darussalam Dollar", "Bolivia Boliviano", "Brazil Real", "Bahamas Dollar", "Bhutan Ngultrum", "Botswana Pula", "Belarus Ruble", "Belize Dollar", "Canada Dollar", "Congo/Kinshasa Franc", "Switzerland Franc", "Chile Peso", "China Yuan Renminbi", "Colombia Peso", "Costa Rica Colon", "Cuba Convertible Peso", "Cuba Peso", "Cape Verde Escudo", "Czech Republic Koruna", "Djibouti Franc", "Denmark Krone", "Dominican Republic Peso", "Algeria Dinar", "Egypt Pound", "Eritrea Nakfa", "Ethiopia Birr", "Euro Member Countries", "Fiji Dollar", "Falkland Islands (Malvinas) Pound", "United Kingdom Pound", "Georgia Lari", "Guernsey Pound", "Ghana Cedi", "Gibraltar Pound", "Gambia Dalasi", "Guinea Franc", "Guatemala Quetzal", "Guyana Dollar", "Hong Kong Dollar", "Honduras Lempira", "Croatia Kuna", "Haiti Gourde", "Hungary Forint", "Indonesia Rupiah", "Israel Shekel", "Isle of Man Pound", "India Rupee", "Iraq Dinar", "Iran Rial", "Iceland Krona", "Jersey Pound", "Jamaica Dollar", "Jordan Dinar", "Japan Yen", "Kenya Shilling", "Kyrgyzstan Som", "Cambodia Riel", "Comoros Franc", "Korea (North) Won", "Korea (South) Won", "Kuwait Dinar", "Cayman Islands Dollar", "Kazakhstan Tenge", "Laos Kip", "Lebanon Pound", "Sri Lanka Rupee", "Liberia Dollar", "Lesotho Loti", "Lithuania Litas", "Libya Dinar", "Morocco Dirham", "Moldova Leu", "Madagascar Ariary", "Macedonia Denar", "Myanmar (Burma) Kyat", "Mongolia Tughrik", "Macau Pataca", "Mauritania Ouguiya", "Mauritius Rupee", "Maldives (Maldive Islands) Rufiyaa", "Malawi Kwacha", "Mexico Peso", "Malaysia Ringgit", "Mozambique Metical", "Namibia Dollar", "Nigeria Naira", "Nicaragua Cordoba", "Norway Krone", "Nepal Rupee", "New Zealand Dollar", "Oman Rial", "Panama Balboa", "Peru Nuevo Sol", "Papua New Guinea Kina", "Philippines Peso", "Pakistan Rupee", "Poland Zloty", "Paraguay Guarani", "Qatar Riyal", "Romania New Leu", "Serbia Dinar", "Russia Ruble", "Rwanda Franc", "Saudi Arabia Riyal", "Solomon Islands Dollar", "Seychelles Rupee", "Sudan Pound", "Sweden Krona", "Singapore Dollar", "Saint Helena Pound", "Sierra Leone Leone", "Somalia Shilling", "Seborga Luigino", "Suriname Dollar", "São Tomé and Príncipe Dobra", "El Salvador Colon", "Syria Pound", "Swaziland Lilangeni", "Thailand Baht", "Tajikistan Somoni", "Turkmenistan Manat", "Tunisia Dinar", "Tonga Pa'anga", "Turkey Lira", "Trinidad and Tobago Dollar", "Tuvalu Dollar", "Taiwan New Dollar", "Tanzania Shilling", "Ukraine Hryvnia", "Uganda Shilling", "United States Dollar", "Uruguay Peso", "Uzbekistan Som", "Venezuela Bolivar", "Viet Nam Dong", "Vanuatu Vatu", "Samoa Tala", "Communauté Financière Africaine (BEAC) CFA Franc BEAC", "East Caribbean Dollar", "International Monetary Fund (IMF) Special Drawing Rights", "Communauté Financière Africaine (BCEAO) Franc", "Comptoirs Français du Pacifique (CFP) Franc", "Yemen Rial", "South Africa Rand", "Zambia Kwacha", "Zimbabwe Dollar"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/data.go b/vendor/github.com/brianvoe/gofakeit/data/data.go deleted file mode 100644 index d751c9994356..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/data.go +++ /dev/null @@ -1,28 +0,0 @@ -package data - -// Data consists of the main set of fake information -var Data = map[string]map[string][]string{ - "person": Person, - "contact": Contact, - "address": Address, - "company": Company, - "job": Job, - "lorem": Lorem, - "internet": Internet, - "file": Files, - "color": Colors, - "computer": Computer, - "payment": Payment, - "hipster": Hipster, - "beer": Beer, - "hacker": Hacker, - "currency": Currency, - "log_level": LogLevels, - "timezone": TimeZone, - "vehicle": Vehicle, -} - -// IntData consists of the main set of fake information (integer only) -var IntData = map[string]map[string][]int{ - "status_code": StatusCodes, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/datetime.go b/vendor/github.com/brianvoe/gofakeit/data/datetime.go deleted file mode 100644 index 3347120a67e2..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/datetime.go +++ /dev/null @@ -1,9 +0,0 @@ -package data - -// TimeZone is an array of short and long timezones -var TimeZone = map[string][]string{ - "offset": {"-12", "-11", "-10", "-8", "-7", "-7", "-8", "-7", "-6", "-6", "-6", "-5", "-5", "-6", "-5", "-4", "-4", "-4.5", "-4", "-3", "-4", "-4", "-4", "-2.5", "-3", "-3", "-3", "-3", "-3", "-3", "-2", "-1", "0", "-1", "1", "0", "0", "1", "1", "0", "2", "2", "2", "2", "1", "1", "3", "3", "2", "3", "3", "2", "3", "3", "3", "2", "3", "3", "3", "3", "3", "3", "4", "4.5", "4", "5", "4", "4", "4", "4.5", "5", "5", "5", "5.5", "5.5", "5.75", "6", "6", "6.5", "7", "7", "8", "8", "8", "8", "8", "8", "9", "9", "9", "9.5", "9.5", "10", "10", "10", "10", "10", "11", "11", "12", "12", "12", "12", "13", "13", "13"}, - "abr": {"DST", "U", "HST", "AKDT", "PDT", "PDT", "PST", "UMST", "MDT", "MDT", "CAST", "CDT", "CDT", "CCST", "SPST", "EDT", "UEDT", "VST", "PYT", "ADT", "CBST", "SWST", "PSST", "NDT", "ESAST", "AST", "SEST", "GDT", "MST", "BST", "U", "MDT", "ADT", "CVST", "MDT", "UTC", "GMT", "BST", "GDT", "GST", "WEDT", "CEDT", "RDT", "CEDT", "WCAST", "NST", "GDT", "MEDT", "EST", "SDT", "EEDT", "SAST", "FDT", "TDT", "JDT", "LST", "JST", "AST", "KST", "AST", "EAST", "MSK", "SAMT", "IDT", "AST", "ADT", "MST", "GST", "CST", "AST", "WAST", "YEKT", "PKT", "IST", "SLST", "NST", "CAST", "BST", "MST", "SAST", "NCAST", "CST", "NAST", "MPST", "WAST", "TST", "UST", "NAEST", "JST", "KST", "CAST", "ACST", "EAST", "AEST", "WPST", "TST", "YST", "CPST", "VST", "NZST", "U", "FST", "MST", "KDT", "TST", "SST"}, - "text": {"Dateline Standard Time", "UTC-11", "Hawaiian Standard Time", "Alaskan Standard Time", "Pacific Standard Time (Mexico)", "Pacific Daylight Time", "Pacific Standard Time", "US Mountain Standard Time", "Mountain Standard Time (Mexico)", "Mountain Standard Time", "Central America Standard Time", "Central Standard Time", "Central Standard Time (Mexico)", "Canada Central Standard Time", "SA Pacific Standard Time", "Eastern Standard Time", "US Eastern Standard Time", "Venezuela Standard Time", "Paraguay Standard Time", "Atlantic Standard Time", "Central Brazilian Standard Time", "SA Western Standard Time", "Pacific SA Standard Time", "Newfoundland Standard Time", "E. South America Standard Time", "Argentina Standard Time", "SA Eastern Standard Time", "Greenland Standard Time", "Montevideo Standard Time", "Bahia Standard Time", "UTC-02", "Mid-Atlantic Standard Time", "Azores Standard Time", "Cape Verde Standard Time", "Morocco Standard Time", "UTC", "Greenwich Mean Time", "British Summer Time", "GMT Standard Time", "Greenwich Standard Time", "W. Europe Standard Time", "Central Europe Standard Time", "Romance Standard Time", "Central European Standard Time", "W. Central Africa Standard Time", "Namibia Standard Time", "GTB Standard Time", "Middle East Standard Time", "Egypt Standard Time", "Syria Standard Time", "E. Europe Standard Time", "South Africa Standard Time", "FLE Standard Time", "Turkey Standard Time", "Israel Standard Time", "Libya Standard Time", "Jordan Standard Time", "Arabic Standard Time", "Kaliningrad Standard Time", "Arab Standard Time", "E. Africa Standard Time", "Moscow Standard Time", "Samara Time", "Iran Standard Time", "Arabian Standard Time", "Azerbaijan Standard Time", "Mauritius Standard Time", "Georgian Standard Time", "Caucasus Standard Time", "Afghanistan Standard Time", "West Asia Standard Time", "Yekaterinburg Time", "Pakistan Standard Time", "India Standard Time", "Sri Lanka Standard Time", "Nepal Standard Time", "Central Asia Standard Time", "Bangladesh Standard Time", "Myanmar Standard Time", "SE Asia Standard Time", "N. Central Asia Standard Time", "China Standard Time", "North Asia Standard Time", "Singapore Standard Time", "W. Australia Standard Time", "Taipei Standard Time", "Ulaanbaatar Standard Time", "North Asia East Standard Time", "Japan Standard Time", "Korea Standard Time", "Cen. Australia Standard Time", "AUS Central Standard Time", "E. Australia Standard Time", "AUS Eastern Standard Time", "West Pacific Standard Time", "Tasmania Standard Time", "Yakutsk Standard Time", "Central Pacific Standard Time", "Vladivostok Standard Time", "New Zealand Standard Time", "UTC+12", "Fiji Standard Time", "Magadan Standard Time", "Kamchatka Standard Time", "Tonga Standard Time", "Samoa Standard Time"}, - "full": {"(UTC-12:00) International Date Line West", "(UTC-11:00) Coordinated Universal Time-11", "(UTC-10:00) Hawaii", "(UTC-09:00) Alaska", "(UTC-08:00) Baja California", "(UTC-07:00) Pacific Time (US & Canada)", "(UTC-08:00) Pacific Time (US & Canada)", "(UTC-07:00) Arizona", "(UTC-07:00) Chihuahua, La Paz, Mazatlan", "(UTC-07:00) Mountain Time (US & Canada)", "(UTC-06:00) Central America", "(UTC-06:00) Central Time (US & Canada)", "(UTC-06:00) Guadalajara, Mexico City, Monterrey", "(UTC-06:00) Saskatchewan", "(UTC-05:00) Bogota, Lima, Quito", "(UTC-05:00) Eastern Time (US & Canada)", "(UTC-05:00) Indiana (East)", "(UTC-04:30) Caracas", "(UTC-04:00) Asuncion", "(UTC-04:00) Atlantic Time (Canada)", "(UTC-04:00) Cuiaba", "(UTC-04:00) Georgetown, La Paz, Manaus, San Juan", "(UTC-04:00) Santiago", "(UTC-03:30) Newfoundland", "(UTC-03:00) Brasilia", "(UTC-03:00) Buenos Aires", "(UTC-03:00) Cayenne, Fortaleza", "(UTC-03:00) Greenland", "(UTC-03:00) Montevideo", "(UTC-03:00) Salvador", "(UTC-02:00) Coordinated Universal Time-02", "(UTC-02:00) Mid-Atlantic - Old", "(UTC-01:00) Azores", "(UTC-01:00) Cape Verde Is.", "(UTC) Casablanca", "(UTC) Coordinated Universal Time", "(UTC) Edinburgh, London", "(UTC+01:00) Edinburgh, London", "(UTC) Dublin, Lisbon", "(UTC) Monrovia, Reykjavik", "(UTC+01:00) Amsterdam, Berlin, Bern, Rome, Stockholm, Vienna", "(UTC+01:00) Belgrade, Bratislava, Budapest, Ljubljana, Prague", "(UTC+01:00) Brussels, Copenhagen, Madrid, Paris", "(UTC+01:00) Sarajevo, Skopje, Warsaw, Zagreb", "(UTC+01:00) West Central Africa", "(UTC+01:00) Windhoek", "(UTC+02:00) Athens, Bucharest", "(UTC+02:00) Beirut", "(UTC+02:00) Cairo", "(UTC+02:00) Damascus", "(UTC+02:00) E. Europe", "(UTC+02:00) Harare, Pretoria", "(UTC+02:00) Helsinki, Kyiv, Riga, Sofia, Tallinn, Vilnius", "(UTC+03:00) Istanbul", "(UTC+02:00) Jerusalem", "(UTC+02:00) Tripoli", "(UTC+03:00) Amman", "(UTC+03:00) Baghdad", "(UTC+03:00) Kaliningrad, Minsk", "(UTC+03:00) Kuwait, Riyadh", "(UTC+03:00) Nairobi", "(UTC+03:00) Moscow, St. Petersburg, Volgograd", "(UTC+04:00) Samara, Ulyanovsk, Saratov", "(UTC+03:30) Tehran", "(UTC+04:00) Abu Dhabi, Muscat", "(UTC+04:00) Baku", "(UTC+04:00) Port Louis", "(UTC+04:00) Tbilisi", "(UTC+04:00) Yerevan", "(UTC+04:30) Kabul", "(UTC+05:00) Ashgabat, Tashkent", "(UTC+05:00) Yekaterinburg", "(UTC+05:00) Islamabad, Karachi", "(UTC+05:30) Chennai, Kolkata, Mumbai, New Delhi", "(UTC+05:30) Sri Jayawardenepura", "(UTC+05:45) Kathmandu", "(UTC+06:00) Astana", "(UTC+06:00) Dhaka", "(UTC+06:30) Yangon (Rangoon)", "(UTC+07:00) Bangkok, Hanoi, Jakarta", "(UTC+07:00) Novosibirsk", "(UTC+08:00) Beijing, Chongqing, Hong Kong, Urumqi", "(UTC+08:00) Krasnoyarsk", "(UTC+08:00) Kuala Lumpur, Singapore", "(UTC+08:00) Perth", "(UTC+08:00) Taipei", "(UTC+08:00) Ulaanbaatar", "(UTC+09:00) Irkutsk", "(UTC+09:00) Osaka, Sapporo, Tokyo", "(UTC+09:00) Seoul", "(UTC+09:30) Adelaide", "(UTC+09:30) Darwin", "(UTC+10:00) Brisbane", "(UTC+10:00) Canberra, Melbourne, Sydney", "(UTC+10:00) Guam, Port Moresby", "(UTC+10:00) Hobart", "(UTC+10:00) Yakutsk", "(UTC+11:00) Solomon Is., New Caledonia", "(UTC+11:00) Vladivostok", "(UTC+12:00) Auckland, Wellington", "(UTC+12:00) Coordinated Universal Time+12", "(UTC+12:00) Fiji", "(UTC+12:00) Magadan", "(UTC+12:00) Petropavlovsk-Kamchatsky - Old", "(UTC+13:00) Nuku'alofa", "(UTC+13:00) Samoa"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/files.go b/vendor/github.com/brianvoe/gofakeit/data/files.go deleted file mode 100644 index 363b840017f5..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/files.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// Files consists of file information -var Files = map[string][]string{ - "mime_type": {"x-world/x-3dmf", "application/octet-stream", "application/x-authorware-bin", "application/x-authorware-map", "application/x-authorware-seg", "text/vnd.abc", "text/html", "video/animaflex", "application/postscript", "audio/aiff", "audio/x-aiff", "audio/aiff", "audio/x-aiff", "audio/aiff", "audio/x-aiff", "application/x-aim", "text/x-audiosoft-intra", "application/x-navi-animation", "application/x-nokia-9000-communicator-add-on-software", "application/mime", "application/octet-stream", "application/arj", "application/octet-stream", "image/x-jg", "video/x-ms-asf", "text/x-asm", "text/asp", "application/x-mplayer2", "video/x-ms-asf", "video/x-ms-asf-plugin", "audio/basic", "audio/x-au", "application/x-troff-msvideo", "video/avi", "video/msvideo", "video/x-msvideo", "video/avs-video", "application/x-bcpio", "application/mac-binary", "application/macbinary", "application/octet-stream", "application/x-binary", "application/x-macbinary", "image/bmp", "image/bmp", "image/x-windows-bmp", "application/book", "application/book", "application/x-bzip2", "application/x-bsh", "application/x-bzip", "application/x-bzip2", "text/plain", "text/x-c", "text/plain", "application/vnd.ms-pki.seccat", "text/plain", "text/x-c", "application/clariscad", "application/x-cocoa", "application/cdf", "application/x-cdf", "application/x-netcdf", "application/pkix-cert", "application/x-x509-ca-cert", "application/x-chat", "application/x-chat", "application/java", "application/java-byte-code", "application/x-java-class", "application/octet-stream", "text/plain", "text/plain", "application/x-cpio", "text/x-c", "application/mac-compactpro", "application/x-compactpro", "application/x-cpt", "application/pkcs-crl", "application/pkix-crl", "application/pkix-cert", "application/x-x509-ca-cert", "application/x-x509-user-cert", "application/x-csh", "text/x-script.csh", "application/x-pointplus", "text/css", "text/plain", "application/x-director", "application/x-deepv", "text/plain", "application/x-x509-ca-cert", "video/x-dv", "application/x-director", "video/dl", "video/x-dl", "application/msword", "application/msword", "application/commonground", "application/drafting", "application/octet-stream", "video/x-dv", "application/x-dvi", "drawing/x-dwf (old)", "model/vnd.dwf", "application/acad", "image/vnd.dwg", "image/x-dwg", "application/dxf", "image/vnd.dwg", "image/x-dwg", "application/x-director", "text/x-script.elisp", "application/x-bytecode.elisp (compiled elisp)", "application/x-elc", "application/x-envoy", "application/postscript", "application/x-esrehber", "text/x-setext", "application/envoy", "application/x-envoy", "application/octet-stream", "text/plain", "text/x-fortran", "text/x-fortran", "text/plain", "text/x-fortran", "application/vnd.fdf", "application/fractals", "image/fif", "video/fli", "video/x-fli", "image/florian", "text/vnd.fmi.flexstor", "video/x-atomic3d-feature", "text/plain", "text/x-fortran", "image/vnd.fpx", "image/vnd.net-fpx", "application/freeloader", "audio/make", "text/plain", "image/g3fax", "image/gif", "video/gl", "video/x-gl", "audio/x-gsm", "audio/x-gsm", "application/x-gsp", "application/x-gss", "application/x-gtar", "application/x-compressed", "application/x-gzip", "application/x-gzip", "multipart/x-gzip", "text/plain", "text/x-h", "application/x-hdf", "application/x-helpfile", "application/vnd.hp-hpgl", "text/plain", "text/x-h", "text/x-script", "application/hlp", "application/x-helpfile", "application/x-winhelp", "application/vnd.hp-hpgl", "application/vnd.hp-hpgl", "application/binhex", "application/binhex4", "application/mac-binhex", "application/mac-binhex40", "application/x-binhex40", "application/x-mac-binhex40", "application/hta", "text/x-component", "text/html", "text/html", "text/html", "text/webviewhtml", "text/html", "x-conference/x-cooltalk", "image/x-icon", "text/plain", "image/ief", "image/ief", "application/iges", "model/iges", "application/iges", "model/iges", "application/x-ima", "application/x-httpd-imap", "application/inf", "application/x-internett-signup", "application/x-ip2", "video/x-isvideo", "audio/it", "application/x-inventor", "i-world/i-vrml", "application/x-livescreen", "audio/x-jam", "text/plain", "text/x-java-source", "text/plain", "text/x-java-source", "application/x-java-commerce", "image/jpeg", "image/pjpeg", "image/jpeg", "image/jpeg", "image/pjpeg", "image/jpeg", "image/pjpeg", "image/jpeg", "image/pjpeg", "image/x-jps", "application/x-javascript", "image/jutvision", "audio/midi", "music/x-karaoke", "application/x-ksh", "text/x-script.ksh", "audio/nspaudio", "audio/x-nspaudio", "audio/x-liveaudio", "application/x-latex", "application/lha", "application/octet-stream", "application/x-lha", "application/octet-stream", "text/plain", "audio/nspaudio", "audio/x-nspaudio", "text/plain", "application/x-lisp", "text/x-script.lisp", "text/plain", "text/x-la-asf", "application/x-latex", "application/octet-stream", "application/x-lzh", "application/lzx", "application/octet-stream", "application/x-lzx", "text/plain", "text/x-m", "video/mpeg", "audio/mpeg", "video/mpeg", "audio/x-mpequrl", "application/x-troff-man", "application/x-navimap", "text/plain", "application/mbedlet", "application/mcad", "application/x-mathcad", "image/vasa", "text/mcf", "application/netmc", "application/x-troff-me", "message/rfc822", "message/rfc822", "application/x-midi", "audio/midi", "audio/x-mid", "audio/x-midi", "music/crescendo", "x-music/x-midi", "application/x-midi", "audio/midi", "audio/x-mid", "audio/x-midi", "music/crescendo", "x-music/x-midi", "application/x-frame", "application/x-mif", "message/rfc822", "www/mime", "video/x-motion-jpeg", "application/base64", "application/x-meme", "application/base64", "audio/mod", "audio/x-mod", "video/quicktime", "video/quicktime", "video/x-sgi-movie", "audio/mpeg", "audio/x-mpeg", "video/mpeg", "video/x-mpeg", "video/x-mpeq2a", "audio/mpeg3", "audio/x-mpeg-3", "video/mpeg", "video/x-mpeg", "audio/mpeg", "video/mpeg", "application/x-project", "video/mpeg", "video/mpeg", "audio/mpeg", "video/mpeg", "audio/mpeg", "application/vnd.ms-project", "application/x-project", "application/x-project", "application/x-project", "application/marc", "application/x-troff-ms", "video/x-sgi-movie", "audio/make", "application/x-vnd.audioexplosion.mzz", "image/naplps", "image/naplps", "application/x-netcdf", "application/vnd.nokia.configuration-message", "image/x-niff", "image/x-niff", "application/x-mix-transfer", "application/x-conference", "application/x-navidoc", "application/octet-stream", "application/oda", "application/x-omc", "application/x-omcdatamaker", "application/x-omcregerator", "text/x-pascal", "application/pkcs10", "application/x-pkcs10", "application/pkcs-12", "application/x-pkcs12", "application/x-pkcs7-signature", "application/pkcs7-mime", "application/x-pkcs7-mime", "application/pkcs7-mime", "application/x-pkcs7-mime", "application/x-pkcs7-certreqresp", "application/pkcs7-signature", "application/pro_eng", "text/pascal", "image/x-portable-bitmap", "application/vnd.hp-pcl", "application/x-pcl", "image/x-pict", "image/x-pcx", "chemical/x-pdb", "application/pdf", "audio/make", "audio/make.my.funk", "image/x-portable-graymap", "image/x-portable-greymap", "image/pict", "image/pict", "application/x-newton-compatible-pkg", "application/vnd.ms-pki.pko", "text/plain", "text/x-script.perl", "application/x-pixclscript", "image/x-xpixmap", "text/x-script.perl-module", "application/x-pagemaker", "application/x-pagemaker", "image/png", "application/x-portable-anymap", "image/x-portable-anymap", "application/mspowerpoint", "application/vnd.ms-powerpoint", "model/x-pov", "application/vnd.ms-powerpoint", "image/x-portable-pixmap", "application/mspowerpoint", "application/vnd.ms-powerpoint", "application/mspowerpoint", "application/powerpoint", "application/vnd.ms-powerpoint", "application/x-mspowerpoint", "application/mspowerpoint", "application/x-freelance", "application/pro_eng", "application/postscript", "application/octet-stream", "paleovu/x-pv", "application/vnd.ms-powerpoint", "text/x-script.phyton", "application/x-bytecode.python", "audio/vnd.qcelp", "x-world/x-3dmf", "x-world/x-3dmf", "image/x-quicktime", "video/quicktime", "video/x-qtc", "image/x-quicktime", "image/x-quicktime", "audio/x-pn-realaudio", "audio/x-pn-realaudio-plugin", "audio/x-realaudio", "audio/x-pn-realaudio", "application/x-cmu-raster", "image/cmu-raster", "image/x-cmu-raster", "image/cmu-raster", "text/x-script.rexx", "image/vnd.rn-realflash", "image/x-rgb", "application/vnd.rn-realmedia", "audio/x-pn-realaudio", "audio/mid", "audio/x-pn-realaudio", "audio/x-pn-realaudio", "audio/x-pn-realaudio-plugin", "application/ringing-tones", "application/vnd.nokia.ringing-tone", "application/vnd.rn-realplayer", "application/x-troff", "image/vnd.rn-realpix", "audio/x-pn-realaudio-plugin", "text/richtext", "text/vnd.rn-realtext", "application/rtf", "application/x-rtf", "text/richtext", "application/rtf", "text/richtext", "video/vnd.rn-realvideo", "text/x-asm", "audio/s3m", "application/octet-stream", "application/x-tbook", "application/x-lotusscreencam", "text/x-script.guile", "text/x-script.scheme", "video/x-scm", "text/plain", "application/sdp", "application/x-sdp", "application/sounder", "application/sea", "application/x-sea", "application/set", "text/sgml", "text/x-sgml", "text/sgml", "text/x-sgml", "application/x-bsh", "application/x-sh", "application/x-shar", "text/x-script.sh", "application/x-bsh", "application/x-shar", "text/html", "text/x-server-parsed-html", "audio/x-psid", "application/x-sit", "application/x-stuffit", "application/x-koan", "application/x-koan", "application/x-koan", "application/x-koan", "application/x-seelogo", "application/smil", "application/smil", "audio/basic", "audio/x-adpcm", "application/solids", "application/x-pkcs7-certificates", "text/x-speech", "application/futuresplash", "application/x-sprite", "application/x-sprite", "application/x-wais-source", "text/x-server-parsed-html", "application/streamingmedia", "application/vnd.ms-pki.certstore", "application/step", "application/sla", "application/vnd.ms-pki.stl", "application/x-navistyle", "application/step", "application/x-sv4cpio", "application/x-sv4crc", "image/vnd.dwg", "image/x-dwg", "application/x-world", "x-world/x-svr", "application/x-shockwave-flash", "application/x-troff", "text/x-speech", "application/x-tar", "application/toolbook", "application/x-tbook", "application/x-tcl", "text/x-script.tcl", "text/x-script.tcsh", "application/x-tex", "application/x-texinfo", "application/x-texinfo", "application/plain", "text/plain", "application/gnutar", "application/x-compressed", "image/tiff", "image/x-tiff", "image/tiff", "image/x-tiff", "application/x-troff", "audio/tsp-audio", "application/dsptype", "audio/tsplayer", "text/tab-separated-values", "image/florian", "text/plain", "text/x-uil", "text/uri-list", "text/uri-list", "application/i-deas", "text/uri-list", "text/uri-list", "application/x-ustar", "multipart/x-ustar", "application/octet-stream", "text/x-uuencode", "text/x-uuencode", "application/x-cdlink", "text/x-vcalendar", "application/vda", "video/vdo", "application/groupwise", "video/vivo", "video/vnd.vivo", "video/vivo", "video/vnd.vivo", "application/vocaltec-media-desc", "application/vocaltec-media-file", "audio/voc", "audio/x-voc", "video/vosaic", "audio/voxware", "audio/x-twinvq-plugin", "audio/x-twinvq", "audio/x-twinvq-plugin", "application/x-vrml", "model/vrml", "x-world/x-vrml", "x-world/x-vrt", "application/x-visio", "application/x-visio", "application/x-visio", "application/wordperfect6.0", "application/wordperfect6.1", "application/msword", "audio/wav", "audio/x-wav", "application/x-qpro", "image/vnd.wap.wbmp", "application/vnd.xara", "application/msword", "application/x-123", "windows/metafile", "text/vnd.wap.wml", "application/vnd.wap.wmlc", "text/vnd.wap.wmlscript", "application/vnd.wap.wmlscriptc", "application/msword", "application/wordperfect", "application/wordperfect", "application/wordperfect6.0", "application/wordperfect", "application/wordperfect", "application/x-wpwin", "application/x-lotus", "application/mswrite", "application/x-wri", "application/x-world", "model/vrml", "x-world/x-vrml", "model/vrml", "x-world/x-vrml", "text/scriplet", "application/x-wais-source", "application/x-wintalk", "image/x-xbitmap", "image/x-xbm", "image/xbm", "video/x-amt-demorun", "xgl/drawing", "image/vnd.xiff", "application/excel", "application/excel", "application/x-excel", "application/x-msexcel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/x-msexcel", "application/excel", "application/x-excel", "application/excel", "application/x-excel", "application/excel", "application/vnd.ms-excel", "application/x-excel", "application/x-msexcel", "audio/xm", "application/xml", "text/xml", "xgl/movie", "application/x-vnd.ls-xpix", "image/x-xpixmap", "image/xpm", "image/png", "video/x-amt-showrun", "image/x-xwd", "image/x-xwindowdump", "chemical/x-pdb", "application/x-compress", "application/x-compressed", "application/x-compressed", "application/x-zip-compressed", "application/zip", "multipart/x-zip", "application/octet-stream", "text/x-script.zsh"}, - "extension": {"doc", "docx", "log", "msg", "odt", "pages", "rtf", "tex", "txt", "wpd", "wps", "csv", "dat", "gbr", "ged", "key", "keychain", "pps", "ppt", "pptx", "sdf", "tar", "vcf", "xml", "aif", "iff", "mid", "mpa", "ra", "wav", "wma", "asf", "asx", "avi", "flv", "mov", "mpg", "rm", "srt", "swf", "vob", "wmv", "max", "obj", "bmp", "dds", "gif", "jpg", "png", "psd", "pspimage", "tga", "thm", "tif", "tiff", "yuv", "ai", "eps", "ps", "svg", "indd", "pct", "pdf", "xlr", "xls", "xlsx", "accdb", "db", "dbf", "mdb", "pdb", "sql", "apk", "app", "bat", "cgi", "com", "exe", "gadget", "jar", "pif", "vb", "wsf", "dem", "gam", "nes", "rom", "sav", "dwg", "dxf", "gpx", "kml", "kmz", "asp", "aspx", "cer", "cfm", "csr", "css", "htm", "html", "js", "jsp", "php", "rss", "xhtml", "crx", "plugin", "fnt", "fon", "otf", "ttf", "cab", "cpl", "cur", "deskthemepack", "dll", "dmp", "drv", "icns", "ico", "lnk", "sys", "cfg", "ini", "prf", "hqx", "mim", "uue", "cbr", "deb", "gz", "pkg", "rar", "rpm", "sitx", "gz", "zip", "zipx", "bin", "cue", "dmg", "iso", "mdf", "toast", "vcd", "class", "cpp", "cs", "dtd", "fla", "java", "lua", "pl", "py", "sh", "sln", "swift", "vcxproj", "xcodeproj", "bak", "tmp", "crdownload", "ics", "msi", "part", "torrent"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/hacker.go b/vendor/github.com/brianvoe/gofakeit/data/hacker.go deleted file mode 100644 index 4735f7d560af..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/hacker.go +++ /dev/null @@ -1,20 +0,0 @@ -package data - -// Hacker consists of random hacker phrases -var Hacker = map[string][]string{ - "abbreviation": {"TCP", "HTTP", "SDD", "RAM", "GB", "CSS", "SSL", "AGP", "SQL", "FTP", "PCI", "AI", "ADP", "RSS", "XML", "EXE", "COM", "HDD", "THX", "SMTP", "SMS", "USB", "PNG", "SAS", "IB", "SCSI", "JSON", "XSS", "JBOD"}, - "adjective": {"auxiliary", "primary", "back-end", "digital", "open-source", "virtual", "cross-platform", "redundant", "online", "haptic", "multi-byte", "bluetooth", "wireless", "1080p", "neural", "optical", "solid state", "mobile"}, - "noun": {"driver", "protocol", "bandwidth", "panel", "microchip", "program", "port", "card", "array", "interface", "system", "sensor", "firewall", "hard drive", "pixel", "alarm", "feed", "monitor", "application", "transmitter", "bus", "circuit", "capacitor", "matrix"}, - "verb": {"back up", "bypass", "hack", "override", "compress", "copy", "navigate", "index", "connect", "generate", "quantify", "calculate", "synthesize", "input", "transmit", "program", "reboot", "parse"}, - "ingverb": {"backing up", "bypassing", "hacking", "overriding", "compressing", "copying", "navigating", "indexing", "connecting", "generating", "quantifying", "calculating", "synthesizing", "transmitting", "programming", "parsing"}, - "phrase": { - "If we {hacker.verb} the {hacker.noun}, we can get to the {hacker.abbreviation} {hacker.noun} through the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "We need to {hacker.verb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "Try to {hacker.verb} the {hacker.abbreviation} {hacker.noun}, maybe it will {hacker.verb} the {hacker.adjective} {hacker.noun}!", - "You can't {hacker.verb} the {hacker.noun} without {hacker.ingverb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "Use the {hacker.adjective} {hacker.abbreviation} {hacker.noun}, then you can {hacker.verb} the {hacker.adjective} {hacker.noun}!", - "The {hacker.abbreviation} {hacker.noun} is down, {hacker.verb} the {hacker.adjective} {hacker.noun} so we can {hacker.verb} the {hacker.abbreviation} {hacker.noun}!", - "{hacker.ingverb} the {hacker.noun} won't do anything, we need to {hacker.verb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}!", - "I'll {hacker.verb} the {hacker.adjective} {hacker.abbreviation} {hacker.noun}, that should {hacker.verb} the {hacker.abbreviation} {hacker.noun}!", - }, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/hipster.go b/vendor/github.com/brianvoe/gofakeit/data/hipster.go deleted file mode 100644 index f036f4639bc8..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/hipster.go +++ /dev/null @@ -1,6 +0,0 @@ -package data - -// Hipster consists of random hipster words -var Hipster = map[string][]string{ - "word": {"Wes Anderson", "chicharrones", "narwhal", "food truck", "marfa", "aesthetic", "keytar", "art party", "sustainable", "forage", "mlkshk", "gentrify", "locavore", "swag", "hoodie", "microdosing", "VHS", "before they sold out", "pabst", "plaid", "Thundercats", "freegan", "scenester", "hella", "occupy", "truffaut", "raw denim", "beard", "post-ironic", "photo booth", "twee", "90's", "pitchfork", "cray", "cornhole", "kale chips", "pour-over", "yr", "five dollar toast", "kombucha", "you probably haven't heard of them", "mustache", "fixie", "try-hard", "franzen", "kitsch", "austin", "stumptown", "keffiyeh", "whatever", "tumblr", "DIY", "shoreditch", "biodiesel", "vegan", "pop-up", "banjo", "kogi", "cold-pressed", "letterpress", "chambray", "butcher", "synth", "trust fund", "hammock", "farm-to-table", "intelligentsia", "loko", "ugh", "offal", "poutine", "gastropub", "Godard", "jean shorts", "sriracha", "dreamcatcher", "leggings", "fashion axe", "church-key", "meggings", "tote bag", "disrupt", "readymade", "helvetica", "flannel", "meh", "roof", "hashtag", "knausgaard", "cronut", "schlitz", "green juice", "waistcoat", "normcore", "viral", "ethical", "actually", "fingerstache", "humblebrag", "deep v", "wayfarers", "tacos", "taxidermy", "selvage", "put a bird on it", "ramps", "portland", "retro", "kickstarter", "bushwick", "brunch", "distillery", "migas", "flexitarian", "XOXO", "small batch", "messenger bag", "heirloom", "tofu", "bicycle rights", "bespoke", "salvia", "wolf", "selfies", "echo", "park", "listicle", "craft beer", "chartreuse", "sartorial", "pinterest", "mumblecore", "kinfolk", "vinyl", "etsy", "umami", "8-bit", "polaroid", "banh mi", "crucifix", "bitters", "brooklyn", "PBR&B", "drinking", "vinegar", "squid", "tattooed", "skateboard", "vice", "authentic", "literally", "lomo", "celiac", "health", "goth", "artisan", "chillwave", "blue bottle", "pickled", "next level", "neutra", "organic", "Yuccie", "paleo", "blog", "single-origin coffee", "seitan", "street", "gluten-free", "mixtape", "venmo", "irony", "everyday", "carry", "slow-carb", "3 wolf moon", "direct trade", "lo-fi", "tousled", "tilde", "semiotics", "cred", "chia", "master", "cleanse", "ennui", "quinoa", "pug", "iPhone", "fanny pack", "cliche", "cardigan", "asymmetrical", "meditation", "YOLO", "typewriter", "pork belly", "shabby chic", "+1", "lumbersexual", "williamsburg"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/internet.go b/vendor/github.com/brianvoe/gofakeit/data/internet.go deleted file mode 100644 index 1f16db95c765..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/internet.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// Internet consists of various internet information -var Internet = map[string][]string{ - "browser": {"firefox", "chrome", "internetExplorer", "opera", "safari"}, - "domain_suffix": {"com", "biz", "info", "name", "net", "org", "io"}, - "http_method": {"HEAD", "GET", "POST", "PUT", "PATCH", "DELETE"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/job.go b/vendor/github.com/brianvoe/gofakeit/data/job.go deleted file mode 100644 index 905dd74ee023..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/job.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// Job consists of job data -var Job = map[string][]string{ - "title": {"Administrator", "Agent", "Analyst", "Architect", "Assistant", "Associate", "Consultant", "Coordinator", "Designer", "Developer", "Director", "Engineer", "Executive", "Facilitator", "Liaison", "Manager", "Officer", "Orchestrator", "Planner", "Producer", "Representative", "Specialist", "Strategist", "Supervisor", "Technician"}, - "descriptor": {"Central", "Chief", "Corporate", "Customer", "Direct", "District", "Dynamic", "Dynamic", "Forward", "Future", "Global", "Human", "Internal", "International", "Investor", "Lead", "Legacy", "National", "Principal", "Product", "Regional", "Senior"}, - "level": {"Accountability", "Accounts", "Applications", "Assurance", "Brand", "Branding", "Communications", "Configuration", "Creative", "Data", "Directives", "Division", "Factors", "Functionality", "Group", "Identity", "Implementation", "Infrastructure", "Integration", "Interactions", "Intranet", "Marketing", "Markets", "Metrics", "Mobility", "Operations", "Optimization", "Paradigm", "Program", "Quality", "Research", "Response", "Security", "Solutions", "Tactics", "Usability", "Web"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/log_level.go b/vendor/github.com/brianvoe/gofakeit/data/log_level.go deleted file mode 100644 index 01d98b63c6b6..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/log_level.go +++ /dev/null @@ -1,8 +0,0 @@ -package data - -// LogLevels consists of log levels for several types -var LogLevels = map[string][]string{ - "general": {"error", "warning", "info", "fatal", "trace", "debug"}, - "syslog": {"emerg", "alert", "crit", "err", "warning", "notice", "info", "debug"}, - "apache": {"emerg", "alert", "crit", "error", "warn", "notice", "info", "debug", "trace1-8"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/lorem.go b/vendor/github.com/brianvoe/gofakeit/data/lorem.go deleted file mode 100644 index b0a8f8a1378f..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/lorem.go +++ /dev/null @@ -1,6 +0,0 @@ -package data - -// Lorem consists of lorem ipsum information -var Lorem = map[string][]string{ - "word": {"alias", "consequatur", "aut", "perferendis", "sit", "voluptatem", "accusantium", "doloremque", "aperiam", "eaque", "ipsa", "quae", "ab", "illo", "inventore", "veritatis", "et", "quasi", "architecto", "beatae", "vitae", "dicta", "sunt", "explicabo", "aspernatur", "aut", "odit", "aut", "fugit", "sed", "quia", "consequuntur", "magni", "dolores", "eos", "qui", "ratione", "voluptatem", "sequi", "nesciunt", "neque", "dolorem", "ipsum", "quia", "dolor", "sit", "amet", "consectetur", "adipisci", "velit", "sed", "quia", "non", "numquam", "eius", "modi", "tempora", "incidunt", "ut", "labore", "et", "dolore", "magnam", "aliquam", "quaerat", "voluptatem", "ut", "enim", "ad", "minima", "veniam", "quis", "nostrum", "exercitationem", "ullam", "corporis", "nemo", "enim", "ipsam", "voluptatem", "quia", "voluptas", "sit", "suscipit", "laboriosam", "nisi", "ut", "aliquid", "ex", "ea", "commodi", "consequatur", "quis", "autem", "vel", "eum", "iure", "reprehenderit", "qui", "in", "ea", "voluptate", "velit", "esse", "quam", "nihil", "molestiae", "et", "iusto", "odio", "dignissimos", "ducimus", "qui", "blanditiis", "praesentium", "laudantium", "totam", "rem", "voluptatum", "deleniti", "atque", "corrupti", "quos", "dolores", "et", "quas", "molestias", "excepturi", "sint", "occaecati", "cupiditate", "non", "provident", "sed", "ut", "perspiciatis", "unde", "omnis", "iste", "natus", "error", "similique", "sunt", "in", "culpa", "qui", "officia", "deserunt", "mollitia", "animi", "id", "est", "laborum", "et", "dolorum", "fuga", "et", "harum", "quidem", "rerum", "facilis", "est", "et", "expedita", "distinctio", "nam", "libero", "tempore", "cum", "soluta", "nobis", "est", "eligendi", "optio", "cumque", "nihil", "impedit", "quo", "porro", "quisquam", "est", "qui", "minus", "id", "quod", "maxime", "placeat", "facere", "possimus", "omnis", "voluptas", "assumenda", "est", "omnis", "dolor", "repellendus", "temporibus", "autem", "quibusdam", "et", "aut", "consequatur", "vel", "illum", "qui", "dolorem", "eum", "fugiat", "quo", "voluptas", "nulla", "pariatur", "at", "vero", "eos", "et", "accusamus", "officiis", "debitis", "aut", "rerum", "necessitatibus", "saepe", "eveniet", "ut", "et", "voluptates", "repudiandae", "sint", "et", "molestiae", "non", "recusandae", "itaque", "earum", "rerum", "hic", "tenetur", "a", "sapiente", "delectus", "ut", "aut", "reiciendis", "voluptatibus", "maiores", "doloribus", "asperiores", "repellat"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/payment.go b/vendor/github.com/brianvoe/gofakeit/data/payment.go deleted file mode 100644 index e50903a72af6..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/payment.go +++ /dev/null @@ -1,20 +0,0 @@ -package data - -// Payment contains payment information -var Payment = map[string][]string{ - "card_type": {"Visa", "MasterCard", "American Express", "Discover"}, - "number": { - // Visa - "4###############", - "4###############", - // Mastercard - "222100##########", - "272099##########", - // American Express - "34#############", - "37#############", - // Discover - "65##############", - "65##############", - }, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/person.go b/vendor/github.com/brianvoe/gofakeit/data/person.go deleted file mode 100644 index 129b59ba6e3c..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/person.go +++ /dev/null @@ -1,9 +0,0 @@ -package data - -// Person consists of a slice of people information -var Person = map[string][]string{ - "prefix": {"Mr.", "Mrs.", "Ms.", "Miss", "Dr."}, - "suffix": {"Jr.", "Sr.", "I", "II", "III", "IV", "V", "MD", "DDS", "PhD", "DVM"}, - "first": {"Aaliyah", "Aaron", "Abagail", "Abbey", "Abbie", "Abbigail", "Abby", "Abdiel", "Abdul", "Abdullah", "Abe", "Abel", "Abelardo", "Abigail", "Abigale", "Abigayle", "Abner", "Abraham", "Ada", "Adah", "Adalberto", "Adaline", "Adam", "Adan", "Addie", "Addison", "Adela", "Adelbert", "Adele", "Adelia", "Adeline", "Adell", "Adella", "Adelle", "Aditya", "Adolf", "Adolfo", "Adolph", "Adolphus", "Adonis", "Adrain", "Adrian", "Adriana", "Adrianna", "Adriel", "Adrien", "Adrienne", "Afton", "Aglae", "Agnes", "Agustin", "Agustina", "Ahmad", "Ahmed", "Aida", "Aidan", "Aiden", "Aileen", "Aimee", "Aisha", "Aiyana", "Akeem", "Al", "Alaina", "Alan", "Alana", "Alanis", "Alanna", "Alayna", "Alba", "Albert", "Alberta", "Albertha", "Alberto", "Albin", "Albina", "Alda", "Alden", "Alec", "Aleen", "Alejandra", "Alejandrin", "Alek", "Alena", "Alene", "Alessandra", "Alessandro", "Alessia", "Aletha", "Alex", "Alexa", "Alexander", "Alexandra", "Alexandre", "Alexandrea", "Alexandria", "Alexandrine", "Alexandro", "Alexane", "Alexanne", "Alexie", "Alexis", "Alexys", "Alexzander", "Alf", "Alfonso", "Alfonzo", "Alford", "Alfred", "Alfreda", "Alfredo", "Ali", "Alia", "Alice", "Alicia", "Alisa", "Alisha", "Alison", "Alivia", "Aliya", "Aliyah", "Aliza", "Alize", "Allan", "Allen", "Allene", "Allie", "Allison", "Ally", "Alphonso", "Alta", "Althea", "Alva", "Alvah", "Alvena", "Alvera", "Alverta", "Alvina", "Alvis", "Alyce", "Alycia", "Alysa", "Alysha", "Alyson", "Alysson", "Amalia", "Amanda", "Amani", "Amara", "Amari", "Amaya", "Amber", "Ambrose", "Amelia", "Amelie", "Amely", "America", "Americo", "Amie", "Amina", "Amir", "Amira", "Amiya", "Amos", "Amparo", "Amy", "Amya", "Ana", "Anabel", "Anabelle", "Anahi", "Anais", "Anastacio", "Anastasia", "Anderson", "Andre", "Andreane", "Andreanne", "Andres", "Andrew", "Andy", "Angel", "Angela", "Angelica", "Angelina", "Angeline", "Angelita", "Angelo", "Angie", "Angus", "Anibal", "Anika", "Anissa", "Anita", "Aniya", "Aniyah", "Anjali", "Anna", "Annabel", "Annabell", "Annabelle", "Annalise", "Annamae", "Annamarie", "Anne", "Annetta", "Annette", "Annie", "Ansel", "Ansley", "Anthony", "Antoinette", "Antone", "Antonetta", "Antonette", "Antonia", "Antonietta", "Antonina", "Antonio", "Antwan", "Antwon", "Anya", "April", "Ara", "Araceli", "Aracely", "Arch", "Archibald", "Ardella", "Arden", "Ardith", "Arely", "Ari", "Ariane", "Arianna", "Aric", "Ariel", "Arielle", "Arjun", "Arlene", "Arlie", "Arlo", "Armand", "Armando", "Armani", "Arnaldo", "Arne", "Arno", "Arnold", "Arnoldo", "Arnulfo", "Aron", "Art", "Arthur", "Arturo", "Arvel", "Arvid", "Arvilla", "Aryanna", "Asa", "Asha", "Ashlee", "Ashleigh", "Ashley", "Ashly", "Ashlynn", "Ashton", "Ashtyn", "Asia", "Assunta", "Astrid", "Athena", "Aubree", "Aubrey", "Audie", "Audra", "Audreanne", "Audrey", "August", "Augusta", "Augustine", "Augustus", "Aurelia", "Aurelie", "Aurelio", "Aurore", "Austen", "Austin", "Austyn", "Autumn", "Ava", "Avery", "Avis", "Axel", "Ayana", "Ayden", "Ayla", "Aylin", "Baby", "Bailee", "Bailey", "Barbara", "Barney", "Baron", "Barrett", "Barry", "Bart", "Bartholome", "Barton", "Baylee", "Beatrice", "Beau", "Beaulah", "Bell", "Bella", "Belle", "Ben", "Benedict", "Benjamin", "Bennett", "Bennie", "Benny", "Benton", "Berenice", "Bernadette", "Bernadine", "Bernard", "Bernardo", "Berneice", "Bernhard", "Bernice", "Bernie", "Berniece", "Bernita", "Berry", "Bert", "Berta", "Bertha", "Bertram", "Bertrand", "Beryl", "Bessie", "Beth", "Bethany", "Bethel", "Betsy", "Bette", "Bettie", "Betty", "Bettye", "Beulah", "Beverly", "Bianka", "Bill", "Billie", "Billy", "Birdie", "Blair", "Blaise", "Blake", "Blanca", "Blanche", "Blaze", "Bo", "Bobbie", "Bobby", "Bonita", "Bonnie", "Boris", "Boyd", "Brad", "Braden", "Bradford", "Bradley", "Bradly", "Brady", "Braeden", "Brain", "Brandi", "Brando", "Brandon", "Brandt", "Brandy", "Brandyn", "Brannon", "Branson", "Brant", "Braulio", "Braxton", "Brayan", "Breana", "Breanna", "Breanne", "Brenda", "Brendan", "Brenden", "Brendon", "Brenna", "Brennan", "Brennon", "Brent", "Bret", "Brett", "Bria", "Brian", "Briana", "Brianne", "Brice", "Bridget", "Bridgette", "Bridie", "Brielle", "Brigitte", "Brionna", "Brisa", "Britney", "Brittany", "Brock", "Broderick", "Brody", "Brook", "Brooke", "Brooklyn", "Brooks", "Brown", "Bruce", "Bryana", "Bryce", "Brycen", "Bryon", "Buck", "Bud", "Buddy", "Buford", "Bulah", "Burdette", "Burley", "Burnice", "Buster", "Cade", "Caden", "Caesar", "Caitlyn", "Cale", "Caleb", "Caleigh", "Cali", "Calista", "Callie", "Camden", "Cameron", "Camila", "Camilla", "Camille", "Camren", "Camron", "Camryn", "Camylle", "Candace", "Candelario", "Candice", "Candida", "Candido", "Cara", "Carey", "Carissa", "Carlee", "Carleton", "Carley", "Carli", "Carlie", "Carlo", "Carlos", "Carlotta", "Carmel", "Carmela", "Carmella", "Carmelo", "Carmen", "Carmine", "Carol", "Carolanne", "Carole", "Carolina", "Caroline", "Carolyn", "Carolyne", "Carrie", "Carroll", "Carson", "Carter", "Cary", "Casandra", "Casey", "Casimer", "Casimir", "Casper", "Cassandra", "Cassandre", "Cassidy", "Cassie", "Catalina", "Caterina", "Catharine", "Catherine", "Cathrine", "Cathryn", "Cathy", "Cayla", "Ceasar", "Cecelia", "Cecil", "Cecile", "Cecilia", "Cedrick", "Celestine", "Celestino", "Celia", "Celine", "Cesar", "Chad", "Chadd", "Chadrick", "Chaim", "Chance", "Chandler", "Chanel", "Chanelle", "Charity", "Charlene", "Charles", "Charley", "Charlie", "Charlotte", "Chase", "Chasity", "Chauncey", "Chaya", "Chaz", "Chelsea", "Chelsey", "Chelsie", "Chesley", "Chester", "Chet", "Cheyanne", "Cheyenne", "Chloe", "Chris", "Christ", "Christa", "Christelle", "Christian", "Christiana", "Christina", "Christine", "Christop", "Christophe", "Christopher", "Christy", "Chyna", "Ciara", "Cicero", "Cielo", "Cierra", "Cindy", "Citlalli", "Clair", "Claire", "Clara", "Clarabelle", "Clare", "Clarissa", "Clark", "Claud", "Claude", "Claudia", "Claudie", "Claudine", "Clay", "Clemens", "Clement", "Clementina", "Clementine", "Clemmie", "Cleo", "Cleora", "Cleta", "Cletus", "Cleve", "Cleveland", "Clifford", "Clifton", "Clint", "Clinton", "Clotilde", "Clovis", "Cloyd", "Clyde", "Coby", "Cody", "Colby", "Cole", "Coleman", "Colin", "Colleen", "Collin", "Colt", "Colten", "Colton", "Columbus", "Concepcion", "Conner", "Connie", "Connor", "Conor", "Conrad", "Constance", "Constantin", "Consuelo", "Cooper", "Cora", "Coralie", "Corbin", "Cordelia", "Cordell", "Cordia", "Cordie", "Corene", "Corine", "Cornelius", "Cornell", "Corrine", "Cortez", "Cortney", "Cory", "Coty", "Courtney", "Coy", "Craig", "Crawford", "Creola", "Cristal", "Cristian", "Cristina", "Cristobal", "Cristopher", "Cruz", "Crystal", "Crystel", "Cullen", "Curt", "Curtis", "Cydney", "Cynthia", "Cyril", "Cyrus", "Dagmar", "Dahlia", "Daija", "Daisha", "Daisy", "Dakota", "Dale", "Dallas", "Dallin", "Dalton", "Damaris", "Dameon", "Damian", "Damien", "Damion", "Damon", "Dan", "Dana", "Dandre", "Dane", "Dangelo", "Dangelo", "Danial", "Daniela", "Daniella", "Danielle", "Danika", "Dannie", "Danny", "Dante", "Danyka", "Daphne", "Daphnee", "Daphney", "Darby", "Daren", "Darian", "Dariana", "Darien", "Dario", "Darion", "Darius", "Darlene", "Daron", "Darrel", "Darrell", "Darren", "Darrick", "Darrin", "Darrion", "Darron", "Darryl", "Darwin", "Daryl", "Dashawn", "Dasia", "Dave", "David", "Davin", "Davion", "Davon", "Davonte", "Dawn", "Dawson", "Dax", "Dayana", "Dayna", "Dayne", "Dayton", "Dean", "Deangelo", "Deanna", "Deborah", "Declan", "Dedric", "Dedrick", "Dee", "Deion", "Deja", "Dejah", "Dejon", "Dejuan", "Delaney", "Delbert", "Delfina", "Delia", "Delilah", "Dell", "Della", "Delmer", "Delores", "Delpha", "Delphia", "Delphine", "Delta", "Demarco", "Demarcus", "Demario", "Demetris", "Demetrius", "Demond", "Dena", "Denis", "Dennis", "Deon", "Deondre", "Deontae", "Deonte", "Dereck", "Derek", "Derick", "Deron", "Derrick", "Deshaun", "Deshawn", "Desiree", "Desmond", "Dessie", "Destany", "Destin", "Destinee", "Destiney", "Destini", "Destiny", "Devan", "Devante", "Deven", "Devin", "Devon", "Devonte", "Devyn", "Dewayne", "Dewitt", "Dexter", "Diamond", "Diana", "Dianna", "Diego", "Dillan", "Dillon", "Dimitri", "Dina", "Dino", "Dion", "Dixie", "Dock", "Dolly", "Dolores", "Domenic", "Domenica", "Domenick", "Domenico", "Domingo", "Dominic", "Dominique", "Don", "Donald", "Donato", "Donavon", "Donna", "Donnell", "Donnie", "Donny", "Dora", "Dorcas", "Dorian", "Doris", "Dorothea", "Dorothy", "Dorris", "Dortha", "Dorthy", "Doug", "Douglas", "Dovie", "Doyle", "Drake", "Drew", "Duane", "Dudley", "Dulce", "Duncan", "Durward", "Dustin", "Dusty", "Dwight", "Dylan", "Earl", "Earlene", "Earline", "Earnest", "Earnestine", "Easter", "Easton", "Ebba", "Ebony", "Ed", "Eda", "Edd", "Eddie", "Eden", "Edgar", "Edgardo", "Edison", "Edmond", "Edmund", "Edna", "Eduardo", "Edward", "Edwardo", "Edwin", "Edwina", "Edyth", "Edythe", "Effie", "Efrain", "Efren", "Eileen", "Einar", "Eino", "Eladio", "Elaina", "Elbert", "Elda", "Eldon", "Eldora", "Eldred", "Eldridge", "Eleanora", "Eleanore", "Eleazar", "Electa", "Elena", "Elenor", "Elenora", "Eleonore", "Elfrieda", "Eli", "Elian", "Eliane", "Elias", "Eliezer", "Elijah", "Elinor", "Elinore", "Elisa", "Elisabeth", "Elise", "Eliseo", "Elisha", "Elissa", "Eliza", "Elizabeth", "Ella", "Ellen", "Ellie", "Elliot", "Elliott", "Ellis", "Ellsworth", "Elmer", "Elmira", "Elmo", "Elmore", "Elna", "Elnora", "Elody", "Eloisa", "Eloise", "Elouise", "Eloy", "Elroy", "Elsa", "Else", "Elsie", "Elta", "Elton", "Elva", "Elvera", "Elvie", "Elvis", "Elwin", "Elwyn", "Elyse", "Elyssa", "Elza", "Emanuel", "Emelia", "Emelie", "Emely", "Emerald", "Emerson", "Emery", "Emie", "Emil", "Emile", "Emilia", "Emiliano", "Emilie", "Emilio", "Emily", "Emma", "Emmalee", "Emmanuel", "Emmanuelle", "Emmet", "Emmett", "Emmie", "Emmitt", "Emmy", "Emory", "Ena", "Enid", "Enoch", "Enola", "Enos", "Enrico", "Enrique", "Ephraim", "Era", "Eriberto", "Eric", "Erica", "Erich", "Erick", "Ericka", "Erik", "Erika", "Erin", "Erling", "Erna", "Ernest", "Ernestina", "Ernestine", "Ernesto", "Ernie", "Ervin", "Erwin", "Eryn", "Esmeralda", "Esperanza", "Esta", "Esteban", "Estefania", "Estel", "Estell", "Estella", "Estelle", "Estevan", "Esther", "Estrella", "Etha", "Ethan", "Ethel", "Ethelyn", "Ethyl", "Ettie", "Eudora", "Eugene", "Eugenia", "Eula", "Eulah", "Eulalia", "Euna", "Eunice", "Eusebio", "Eva", "Evalyn", "Evan", "Evangeline", "Evans", "Eve", "Eveline", "Evelyn", "Everardo", "Everett", "Everette", "Evert", "Evie", "Ewald", "Ewell", "Ezekiel", "Ezequiel", "Ezra", "Fabian", "Fabiola", "Fae", "Fannie", "Fanny", "Fatima", "Faustino", "Fausto", "Favian", "Fay", "Faye", "Federico", "Felicia", "Felicita", "Felicity", "Felipa", "Felipe", "Felix", "Felton", "Fermin", "Fern", "Fernando", "Ferne", "Fidel", "Filiberto", "Filomena", "Finn", "Fiona", "Flavie", "Flavio", "Fleta", "Fletcher", "Flo", "Florence", "Florencio", "Florian", "Florida", "Florine", "Flossie", "Floy", "Floyd", "Ford", "Forest", "Forrest", "Foster", "Frances", "Francesca", "Francesco", "Francis", "Francisca", "Francisco", "Franco", "Frank", "Frankie", "Franz", "Fred", "Freda", "Freddie", "Freddy", "Frederic", "Frederick", "Frederik", "Frederique", "Fredrick", "Fredy", "Freeda", "Freeman", "Freida", "Frida", "Frieda", "Friedrich", "Fritz", "Furman", "Gabe", "Gabriel", "Gabriella", "Gabrielle", "Gaetano", "Gage", "Gail", "Gardner", "Garett", "Garfield", "Garland", "Garnet", "Garnett", "Garret", "Garrett", "Garrick", "Garrison", "Garry", "Garth", "Gaston", "Gavin", "Gay", "Gayle", "Gaylord", "Gene", "General", "Genesis", "Genevieve", "Gennaro", "Genoveva", "Geo", "Geoffrey", "George", "Georgette", "Georgiana", "Georgianna", "Geovanni", "Geovanny", "Geovany", "Gerald", "Geraldine", "Gerard", "Gerardo", "Gerda", "Gerhard", "Germaine", "German", "Gerry", "Gerson", "Gertrude", "Gia", "Gianni", "Gideon", "Gilbert", "Gilberto", "Gilda", "Giles", "Gillian", "Gina", "Gino", "Giovani", "Giovanna", "Giovanni", "Giovanny", "Gisselle", "Giuseppe", "Gladyce", "Gladys", "Glen", "Glenda", "Glenna", "Glennie", "Gloria", "Godfrey", "Golda", "Golden", "Gonzalo", "Gordon", "Grace", "Gracie", "Graciela", "Grady", "Graham", "Grant", "Granville", "Grayce", "Grayson", "Green", "Greg", "Gregg", "Gregoria", "Gregorio", "Gregory", "Greta", "Gretchen", "Greyson", "Griffin", "Grover", "Guadalupe", "Gudrun", "Guido", "Guillermo", "Guiseppe", "Gunnar", "Gunner", "Gus", "Gussie", "Gust", "Gustave", "Guy", "Gwen", "Gwendolyn", "Hadley", "Hailee", "Hailey", "Hailie", "Hal", "Haleigh", "Haley", "Halie", "Halle", "Hallie", "Hank", "Hanna", "Hannah", "Hans", "Hardy", "Harley", "Harmon", "Harmony", "Harold", "Harrison", "Harry", "Harvey", "Haskell", "Hassan", "Hassie", "Hattie", "Haven", "Hayden", "Haylee", "Hayley", "Haylie", "Hazel", "Hazle", "Heath", "Heather", "Heaven", "Heber", "Hector", "Heidi", "Helen", "Helena", "Helene", "Helga", "Hellen", "Helmer", "Heloise", "Henderson", "Henri", "Henriette", "Henry", "Herbert", "Herman", "Hermann", "Hermina", "Herminia", "Herminio", "Hershel", "Herta", "Hertha", "Hester", "Hettie", "Hilario", "Hilbert", "Hilda", "Hildegard", "Hillard", "Hillary", "Hilma", "Hilton", "Hipolito", "Hiram", "Hobart", "Holden", "Hollie", "Hollis", "Holly", "Hope", "Horace", "Horacio", "Hortense", "Hosea", "Houston", "Howard", "Howell", "Hoyt", "Hubert", "Hudson", "Hugh", "Hulda", "Humberto", "Hunter", "Hyman", "Ian", "Ibrahim", "Icie", "Ida", "Idell", "Idella", "Ignacio", "Ignatius", "Ike", "Ila", "Ilene", "Iliana", "Ima", "Imani", "Imelda", "Immanuel", "Imogene", "Ines", "Irma", "Irving", "Irwin", "Isaac", "Isabel", "Isabell", "Isabella", "Isabelle", "Isac", "Isadore", "Isai", "Isaiah", "Isaias", "Isidro", "Ismael", "Isobel", "Isom", "Israel", "Issac", "Itzel", "Iva", "Ivah", "Ivory", "Ivy", "Izabella", "Izaiah", "Jabari", "Jace", "Jacey", "Jacinthe", "Jacinto", "Jack", "Jackeline", "Jackie", "Jacklyn", "Jackson", "Jacky", "Jaclyn", "Jacquelyn", "Jacques", "Jacynthe", "Jada", "Jade", "Jaden", "Jadon", "Jadyn", "Jaeden", "Jaida", "Jaiden", "Jailyn", "Jaime", "Jairo", "Jakayla", "Jake", "Jakob", "Jaleel", "Jalen", "Jalon", "Jalyn", "Jamaal", "Jamal", "Jamar", "Jamarcus", "Jamel", "Jameson", "Jamey", "Jamie", "Jamil", "Jamir", "Jamison", "Jammie", "Jan", "Jana", "Janae", "Jane", "Janelle", "Janessa", "Janet", "Janice", "Janick", "Janie", "Janis", "Janiya", "Jannie", "Jany", "Jaquan", "Jaquelin", "Jaqueline", "Jared", "Jaren", "Jarod", "Jaron", "Jarred", "Jarrell", "Jarret", "Jarrett", "Jarrod", "Jarvis", "Jasen", "Jasmin", "Jason", "Jasper", "Jaunita", "Javier", "Javon", "Javonte", "Jay", "Jayce", "Jaycee", "Jayda", "Jayde", "Jayden", "Jaydon", "Jaylan", "Jaylen", "Jaylin", "Jaylon", "Jayme", "Jayne", "Jayson", "Jazlyn", "Jazmin", "Jazmyn", "Jazmyne", "Jean", "Jeanette", "Jeanie", "Jeanne", "Jed", "Jedediah", "Jedidiah", "Jeff", "Jefferey", "Jeffery", "Jeffrey", "Jeffry", "Jena", "Jenifer", "Jennie", "Jennifer", "Jennings", "Jennyfer", "Jensen", "Jerad", "Jerald", "Jeramie", "Jeramy", "Jerel", "Jeremie", "Jeremy", "Jermain", "Jermaine", "Jermey", "Jerod", "Jerome", "Jeromy", "Jerrell", "Jerrod", "Jerrold", "Jerry", "Jess", "Jesse", "Jessica", "Jessie", "Jessika", "Jessy", "Jessyca", "Jesus", "Jett", "Jettie", "Jevon", "Jewel", "Jewell", "Jillian", "Jimmie", "Jimmy", "Jo", "Joan", "Joana", "Joanie", "Joanne", "Joannie", "Joanny", "Joany", "Joaquin", "Jocelyn", "Jodie", "Jody", "Joe", "Joel", "Joelle", "Joesph", "Joey", "Johan", "Johann", "Johanna", "Johathan", "John", "Johnathan", "Johnathon", "Johnnie", "Johnny", "Johnpaul", "Johnson", "Jolie", "Jon", "Jonas", "Jonatan", "Jonathan", "Jonathon", "Jordan", "Jordane", "Jordi", "Jordon", "Jordy", "Jordyn", "Jorge", "Jose", "Josefa", "Josefina", "Joseph", "Josephine", "Josh", "Joshua", "Joshuah", "Josiah", "Josiane", "Josianne", "Josie", "Josue", "Jovan", "Jovani", "Jovanny", "Jovany", "Joy", "Joyce", "Juana", "Juanita", "Judah", "Judd", "Jude", "Judge", "Judson", "Judy", "Jules", "Julia", "Julian", "Juliana", "Julianne", "Julie", "Julien", "Juliet", "Julio", "Julius", "June", "Junior", "Junius", "Justen", "Justice", "Justina", "Justine", "Juston", "Justus", "Justyn", "Juvenal", "Juwan", "Kacey", "Kaci", "Kacie", "Kade", "Kaden", "Kadin", "Kaela", "Kaelyn", "Kaia", "Kailee", "Kailey", "Kailyn", "Kaitlin", "Kaitlyn", "Kale", "Kaleb", "Kaleigh", "Kaley", "Kali", "Kallie", "Kameron", "Kamille", "Kamren", "Kamron", "Kamryn", "Kane", "Kara", "Kareem", "Karelle", "Karen", "Kari", "Kariane", "Karianne", "Karina", "Karine", "Karl", "Karlee", "Karley", "Karli", "Karlie", "Karolann", "Karson", "Kasandra", "Kasey", "Kassandra", "Katarina", "Katelin", "Katelyn", "Katelynn", "Katharina", "Katherine", "Katheryn", "Kathleen", "Kathlyn", "Kathryn", "Kathryne", "Katlyn", "Katlynn", "Katrina", "Katrine", "Kattie", "Kavon", "Kay", "Kaya", "Kaycee", "Kayden", "Kayla", "Kaylah", "Kaylee", "Kayleigh", "Kayley", "Kayli", "Kaylie", "Kaylin", "Keagan", "Keanu", "Keara", "Keaton", "Keegan", "Keeley", "Keely", "Keenan", "Keira", "Keith", "Kellen", "Kelley", "Kelli", "Kellie", "Kelly", "Kelsi", "Kelsie", "Kelton", "Kelvin", "Ken", "Kendall", "Kendra", "Kendrick", "Kenna", "Kennedi", "Kennedy", "Kenneth", "Kennith", "Kenny", "Kenton", "Kenya", "Kenyatta", "Kenyon", "Keon", "Keshaun", "Keshawn", "Keven", "Kevin", "Kevon", "Keyon", "Keyshawn", "Khalid", "Khalil", "Kian", "Kiana", "Kianna", "Kiara", "Kiarra", "Kiel", "Kiera", "Kieran", "Kiley", "Kim", "Kimberly", "King", "Kip", "Kira", "Kirk", "Kirsten", "Kirstin", "Kitty", "Kobe", "Koby", "Kody", "Kolby", "Kole", "Korbin", "Korey", "Kory", "Kraig", "Kris", "Krista", "Kristian", "Kristin", "Kristina", "Kristofer", "Kristoffer", "Kristopher", "Kristy", "Krystal", "Krystel", "Krystina", "Kurt", "Kurtis", "Kyla", "Kyle", "Kylee", "Kyleigh", "Kyler", "Kylie", "Kyra", "Lacey", "Lacy", "Ladarius", "Lafayette", "Laila", "Laisha", "Lamar", "Lambert", "Lamont", "Lance", "Landen", "Lane", "Laney", "Larissa", "Laron", "Larry", "Larue", "Laura", "Laurel", "Lauren", "Laurence", "Lauretta", "Lauriane", "Laurianne", "Laurie", "Laurine", "Laury", "Lauryn", "Lavada", "Lavern", "Laverna", "Laverne", "Lavina", "Lavinia", "Lavon", "Lavonne", "Lawrence", "Lawson", "Layla", "Layne", "Lazaro", "Lea", "Leann", "Leanna", "Leanne", "Leatha", "Leda", "Lee", "Leif", "Leila", "Leilani", "Lela", "Lelah", "Leland", "Lelia", "Lempi", "Lemuel", "Lenna", "Lennie", "Lenny", "Lenora", "Lenore", "Leo", "Leola", "Leon", "Leonard", "Leonardo", "Leone", "Leonel", "Leonie", "Leonor", "Leonora", "Leopold", "Leopoldo", "Leora", "Lera", "Lesley", "Leslie", "Lesly", "Lessie", "Lester", "Leta", "Letha", "Letitia", "Levi", "Lew", "Lewis", "Lexi", "Lexie", "Lexus", "Lia", "Liam", "Liana", "Libbie", "Libby", "Lila", "Lilian", "Liliana", "Liliane", "Lilla", "Lillian", "Lilliana", "Lillie", "Lilly", "Lily", "Lilyan", "Lina", "Lincoln", "Linda", "Lindsay", "Lindsey", "Linnea", "Linnie", "Linwood", "Lionel", "Lisa", "Lisandro", "Lisette", "Litzy", "Liza", "Lizeth", "Lizzie", "Llewellyn", "Lloyd", "Logan", "Lois", "Lola", "Lolita", "Loma", "Lon", "London", "Lonie", "Lonnie", "Lonny", "Lonzo", "Lora", "Loraine", "Loren", "Lorena", "Lorenz", "Lorenza", "Lorenzo", "Lori", "Lorine", "Lorna", "Lottie", "Lou", "Louie", "Louisa", "Lourdes", "Louvenia", "Lowell", "Loy", "Loyal", "Loyce", "Lucas", "Luciano", "Lucie", "Lucienne", "Lucile", "Lucinda", "Lucio", "Lucious", "Lucius", "Lucy", "Ludie", "Ludwig", "Lue", "Luella", "Luigi", "Luis", "Luisa", "Lukas", "Lula", "Lulu", "Luna", "Lupe", "Lura", "Lurline", "Luther", "Luz", "Lyda", "Lydia", "Lyla", "Lynn", "Lyric", "Lysanne", "Mabel", "Mabelle", "Mable", "Mac", "Macey", "Maci", "Macie", "Mack", "Mackenzie", "Macy", "Madaline", "Madalyn", "Maddison", "Madeline", "Madelyn", "Madelynn", "Madge", "Madie", "Madilyn", "Madisen", "Madison", "Madisyn", "Madonna", "Madyson", "Mae", "Maegan", "Maeve", "Mafalda", "Magali", "Magdalen", "Magdalena", "Maggie", "Magnolia", "Magnus", "Maia", "Maida", "Maiya", "Major", "Makayla", "Makenna", "Makenzie", "Malachi", "Malcolm", "Malika", "Malinda", "Mallie", "Mallory", "Malvina", "Mandy", "Manley", "Manuel", "Manuela", "Mara", "Marc", "Marcel", "Marcelina", "Marcelino", "Marcella", "Marcelle", "Marcellus", "Marcelo", "Marcia", "Marco", "Marcos", "Marcus", "Margaret", "Margarete", "Margarett", "Margaretta", "Margarette", "Margarita", "Marge", "Margie", "Margot", "Margret", "Marguerite", "Maria", "Mariah", "Mariam", "Marian", "Mariana", "Mariane", "Marianna", "Marianne", "Mariano", "Maribel", "Marie", "Mariela", "Marielle", "Marietta", "Marilie", "Marilou", "Marilyne", "Marina", "Mario", "Marion", "Marisa", "Marisol", "Maritza", "Marjolaine", "Marjorie", "Marjory", "Mark", "Markus", "Marlee", "Marlen", "Marlene", "Marley", "Marlin", "Marlon", "Marques", "Marquis", "Marquise", "Marshall", "Marta", "Martin", "Martina", "Martine", "Marty", "Marvin", "Mary", "Maryam", "Maryjane", "Maryse", "Mason", "Mateo", "Mathew", "Mathias", "Mathilde", "Matilda", "Matilde", "Matt", "Matteo", "Mattie", "Maud", "Maude", "Maudie", "Maureen", "Maurice", "Mauricio", "Maurine", "Maverick", "Mavis", "Max", "Maxie", "Maxime", "Maximilian", "Maximillia", "Maximillian", "Maximo", "Maximus", "Maxine", "Maxwell", "May", "Maya", "Maybell", "Maybelle", "Maye", "Maymie", "Maynard", "Mayra", "Mazie", "Mckayla", "Mckenna", "Mckenzie", "Meagan", "Meaghan", "Meda", "Megane", "Meggie", "Meghan", "Mekhi", "Melany", "Melba", "Melisa", "Melissa", "Mellie", "Melody", "Melvin", "Melvina", "Melyna", "Melyssa", "Mercedes", "Meredith", "Merl", "Merle", "Merlin", "Merritt", "Mertie", "Mervin", "Meta", "Mia", "Micaela", "Micah", "Michael", "Michaela", "Michale", "Micheal", "Michel", "Michele", "Michelle", "Miguel", "Mikayla", "Mike", "Mikel", "Milan", "Miles", "Milford", "Miller", "Millie", "Milo", "Milton", "Mina", "Minerva", "Minnie", "Miracle", "Mireille", "Mireya", "Misael", "Missouri", "Misty", "Mitchel", "Mitchell", "Mittie", "Modesta", "Modesto", "Mohamed", "Mohammad", "Mohammed", "Moises", "Mollie", "Molly", "Mona", "Monica", "Monique", "Monroe", "Monserrat", "Monserrate", "Montana", "Monte", "Monty", "Morgan", "Moriah", "Morris", "Mortimer", "Morton", "Mose", "Moses", "Moshe", "Mossie", "Mozell", "Mozelle", "Muhammad", "Muriel", "Murl", "Murphy", "Murray", "Mustafa", "Mya", "Myah", "Mylene", "Myles", "Myra", "Myriam", "Myrl", "Myrna", "Myron", "Myrtice", "Myrtie", "Myrtis", "Myrtle", "Nadia", "Nakia", "Name", "Nannie", "Naomi", "Naomie", "Napoleon", "Narciso", "Nash", "Nasir", "Nat", "Natalia", "Natalie", "Natasha", "Nathan", "Nathanael", "Nathanial", "Nathaniel", "Nathen", "Nayeli", "Neal", "Ned", "Nedra", "Neha", "Neil", "Nelda", "Nella", "Nelle", "Nellie", "Nels", "Nelson", "Neoma", "Nestor", "Nettie", "Neva", "Newell", "Newton", "Nia", "Nicholas", "Nicholaus", "Nichole", "Nick", "Nicklaus", "Nickolas", "Nico", "Nicola", "Nicolas", "Nicole", "Nicolette", "Nigel", "Nikita", "Nikki", "Nikko", "Niko", "Nikolas", "Nils", "Nina", "Noah", "Noble", "Noe", "Noel", "Noelia", "Noemi", "Noemie", "Noemy", "Nola", "Nolan", "Nona", "Nora", "Norbert", "Norberto", "Norene", "Norma", "Norris", "Norval", "Norwood", "Nova", "Novella", "Nya", "Nyah", "Nyasia", "Obie", "Oceane", "Ocie", "Octavia", "Oda", "Odell", "Odessa", "Odie", "Ofelia", "Okey", "Ola", "Olaf", "Ole", "Olen", "Oleta", "Olga", "Olin", "Oliver", "Ollie", "Oma", "Omari", "Omer", "Ona", "Onie", "Opal", "Ophelia", "Ora", "Oral", "Oran", "Oren", "Orie", "Orin", "Orion", "Orland", "Orlando", "Orlo", "Orpha", "Orrin", "Orval", "Orville", "Osbaldo", "Osborne", "Oscar", "Osvaldo", "Oswald", "Oswaldo", "Otha", "Otho", "Otilia", "Otis", "Ottilie", "Ottis", "Otto", "Ova", "Owen", "Ozella", "Pablo", "Paige", "Palma", "Pamela", "Pansy", "Paolo", "Paris", "Parker", "Pascale", "Pasquale", "Pat", "Patience", "Patricia", "Patrick", "Patsy", "Pattie", "Paul", "Paula", "Pauline", "Paxton", "Payton", "Pearl", "Pearlie", "Pearline", "Pedro", "Peggie", "Penelope", "Percival", "Percy", "Perry", "Pete", "Peter", "Petra", "Peyton", "Philip", "Phoebe", "Phyllis", "Pierce", "Pierre", "Pietro", "Pink", "Pinkie", "Piper", "Polly", "Porter", "Precious", "Presley", "Preston", "Price", "Prince", "Princess", "Priscilla", "Providenci", "Prudence", "Queen", "Queenie", "Quentin", "Quincy", "Quinn", "Quinten", "Quinton", "Rachael", "Rachel", "Rachelle", "Rae", "Raegan", "Rafael", "Rafaela", "Raheem", "Rahsaan", "Rahul", "Raina", "Raleigh", "Ralph", "Ramiro", "Ramon", "Ramona", "Randal", "Randall", "Randi", "Randy", "Ransom", "Raoul", "Raphael", "Raphaelle", "Raquel", "Rashad", "Rashawn", "Rasheed", "Raul", "Raven", "Ray", "Raymond", "Raymundo", "Reagan", "Reanna", "Reba", "Rebeca", "Rebecca", "Rebeka", "Rebekah", "Reece", "Reed", "Reese", "Regan", "Reggie", "Reginald", "Reid", "Reilly", "Reina", "Reinhold", "Remington", "Rene", "Renee", "Ressie", "Reta", "Retha", "Retta", "Reuben", "Reva", "Rex", "Rey", "Reyes", "Reymundo", "Reyna", "Reynold", "Rhea", "Rhett", "Rhianna", "Rhiannon", "Rhoda", "Ricardo", "Richard", "Richie", "Richmond", "Rick", "Rickey", "Rickie", "Ricky", "Rico", "Rigoberto", "Riley", "Rita", "River", "Robb", "Robbie", "Robert", "Roberta", "Roberto", "Robin", "Robyn", "Rocio", "Rocky", "Rod", "Roderick", "Rodger", "Rodolfo", "Rodrick", "Rodrigo", "Roel", "Rogelio", "Roger", "Rogers", "Rolando", "Rollin", "Roma", "Romaine", "Roman", "Ron", "Ronaldo", "Ronny", "Roosevelt", "Rory", "Rosa", "Rosalee", "Rosalia", "Rosalind", "Rosalinda", "Rosalyn", "Rosamond", "Rosanna", "Rosario", "Roscoe", "Rose", "Rosella", "Roselyn", "Rosemarie", "Rosemary", "Rosendo", "Rosetta", "Rosie", "Rosina", "Roslyn", "Ross", "Rossie", "Rowan", "Rowena", "Rowland", "Roxane", "Roxanne", "Roy", "Royal", "Royce", "Rozella", "Ruben", "Rubie", "Ruby", "Rubye", "Rudolph", "Rudy", "Rupert", "Russ", "Russel", "Russell", "Rusty", "Ruth", "Ruthe", "Ruthie", "Ryan", "Ryann", "Ryder", "Rylan", "Rylee", "Ryleigh", "Ryley", "Sabina", "Sabrina", "Sabryna", "Sadie", "Sadye", "Sage", "Saige", "Sallie", "Sally", "Salma", "Salvador", "Salvatore", "Sam", "Samanta", "Samantha", "Samara", "Samir", "Sammie", "Sammy", "Samson", "Sandra", "Sandrine", "Sandy", "Sanford", "Santa", "Santiago", "Santina", "Santino", "Santos", "Sarah", "Sarai", "Sarina", "Sasha", "Saul", "Savanah", "Savanna", "Savannah", "Savion", "Scarlett", "Schuyler", "Scot", "Scottie", "Scotty", "Seamus", "Sean", "Sebastian", "Sedrick", "Selena", "Selina", "Selmer", "Serena", "Serenity", "Seth", "Shad", "Shaina", "Shakira", "Shana", "Shane", "Shanel", "Shanelle", "Shania", "Shanie", "Shaniya", "Shanna", "Shannon", "Shanny", "Shanon", "Shany", "Sharon", "Shaun", "Shawn", "Shawna", "Shaylee", "Shayna", "Shayne", "Shea", "Sheila", "Sheldon", "Shemar", "Sheridan", "Sherman", "Sherwood", "Shirley", "Shyann", "Shyanne", "Sibyl", "Sid", "Sidney", "Sienna", "Sierra", "Sigmund", "Sigrid", "Sigurd", "Silas", "Sim", "Simeon", "Simone", "Sincere", "Sister", "Skye", "Skyla", "Skylar", "Sofia", "Soledad", "Solon", "Sonia", "Sonny", "Sonya", "Sophia", "Sophie", "Spencer", "Stacey", "Stacy", "Stan", "Stanford", "Stanley", "Stanton", "Stefan", "Stefanie", "Stella", "Stephan", "Stephania", "Stephanie", "Stephany", "Stephen", "Stephon", "Sterling", "Steve", "Stevie", "Stewart", "Stone", "Stuart", "Summer", "Sunny", "Susan", "Susana", "Susanna", "Susie", "Suzanne", "Sven", "Syble", "Sydnee", "Sydney", "Sydni", "Sydnie", "Sylvan", "Sylvester", "Sylvia", "Tabitha", "Tad", "Talia", "Talon", "Tamara", "Tamia", "Tania", "Tanner", "Tanya", "Tara", "Taryn", "Tate", "Tatum", "Tatyana", "Taurean", "Tavares", "Taya", "Taylor", "Teagan", "Ted", "Telly", "Terence", "Teresa", "Terrance", "Terrell", "Terrence", "Terrill", "Terry", "Tess", "Tessie", "Tevin", "Thad", "Thaddeus", "Thalia", "Thea", "Thelma", "Theo", "Theodora", "Theodore", "Theresa", "Therese", "Theresia", "Theron", "Thomas", "Thora", "Thurman", "Tia", "Tiana", "Tianna", "Tiara", "Tierra", "Tiffany", "Tillman", "Timmothy", "Timmy", "Timothy", "Tina", "Tito", "Titus", "Tobin", "Toby", "Tod", "Tom", "Tomas", "Tomasa", "Tommie", "Toney", "Toni", "Tony", "Torey", "Torrance", "Torrey", "Toy", "Trace", "Tracey", "Tracy", "Travis", "Travon", "Tre", "Tremaine", "Tremayne", "Trent", "Trenton", "Tressa", "Tressie", "Treva", "Trever", "Trevion", "Trevor", "Trey", "Trinity", "Trisha", "Tristian", "Tristin", "Triston", "Troy", "Trudie", "Trycia", "Trystan", "Turner", "Twila", "Tyler", "Tyra", "Tyree", "Tyreek", "Tyrel", "Tyrell", "Tyrese", "Tyrique", "Tyshawn", "Tyson", "Ubaldo", "Ulices", "Ulises", "Una", "Unique", "Urban", "Uriah", "Uriel", "Ursula", "Vada", "Valentin", "Valentina", "Valentine", "Valerie", "Vallie", "Van", "Vance", "Vanessa", "Vaughn", "Veda", "Velda", "Vella", "Velma", "Velva", "Vena", "Verda", "Verdie", "Vergie", "Verla", "Verlie", "Vern", "Verna", "Verner", "Vernice", "Vernie", "Vernon", "Verona", "Veronica", "Vesta", "Vicenta", "Vicente", "Vickie", "Vicky", "Victor", "Victoria", "Vida", "Vidal", "Vilma", "Vince", "Vincent", "Vincenza", "Vincenzo", "Vinnie", "Viola", "Violet", "Violette", "Virgie", "Virgil", "Virginia", "Virginie", "Vita", "Vito", "Viva", "Vivian", "Viviane", "Vivianne", "Vivien", "Vivienne", "Vladimir", "Wade", "Waino", "Waldo", "Walker", "Wallace", "Walter", "Walton", "Wanda", "Ward", "Warren", "Watson", "Wava", "Waylon", "Wayne", "Webster", "Weldon", "Wellington", "Wendell", "Wendy", "Werner", "Westley", "Weston", "Whitney", "Wilber", "Wilbert", "Wilburn", "Wiley", "Wilford", "Wilfred", "Wilfredo", "Wilfrid", "Wilhelm", "Wilhelmine", "Will", "Willa", "Willard", "William", "Willie", "Willis", "Willow", "Willy", "Wilma", "Wilmer", "Wilson", "Wilton", "Winfield", "Winifred", "Winnifred", "Winona", "Winston", "Woodrow", "Wyatt", "Wyman", "Xander", "Xavier", "Xzavier", "Yadira", "Yasmeen", "Yasmin", "Yasmine", "Yazmin", "Yesenia", "Yessenia", "Yolanda", "Yoshiko", "Yvette", "Yvonne", "Zachariah", "Zachary", "Zachery", "Zack", "Zackary", "Zackery", "Zakary", "Zander", "Zane", "Zaria", "Zechariah", "Zelda", "Zella", "Zelma", "Zena", "Zetta", "Zion", "Zita", "Zoe", "Zoey", "Zoie", "Zoila", "Zola", "Zora", "Zula"}, - "last": {"Abbott", "Abernathy", "Abshire", "Adams", "Altenwerth", "Anderson", "Ankunding", "Armstrong", "Auer", "Aufderhar", "Bahringer", "Bailey", "Balistreri", "Barrows", "Bartell", "Bartoletti", "Barton", "Bashirian", "Batz", "Bauch", "Baumbach", "Bayer", "Beahan", "Beatty", "Bechtelar", "Becker", "Bednar", "Beer", "Beier", "Berge", "Bergnaum", "Bergstrom", "Bernhard", "Bernier", "Bins", "Blanda", "Blick", "Block", "Bode", "Boehm", "Bogan", "Bogisich", "Borer", "Bosco", "Botsford", "Boyer", "Boyle", "Bradtke", "Brakus", "Braun", "Breitenberg", "Brekke", "Brown", "Bruen", "Buckridge", "Carroll", "Carter", "Cartwright", "Casper", "Cassin", "Champlin", "Christiansen", "Cole", "Collier", "Collins", "Conn", "Connelly", "Conroy", "Considine", "Corkery", "Cormier", "Corwin", "Cremin", "Crist", "Crona", "Cronin", "Crooks", "Cruickshank", "Cummerata", "Cummings", "Dach", "Damore", "Daniel", "Dare", "Daugherty", "Davis", "Deckow", "Denesik", "Dibbert", "Dickens", "Dicki", "Dickinson", "Dietrich", "Donnelly", "Dooley", "Douglas", "Doyle", "DuBuque", "Durgan", "Ebert", "Effertz", "Eichmann", "Emard", "Emmerich", "Erdman", "Ernser", "Fadel", "Fahey", "Farrell", "Fay", "Feeney", "Feest", "Feil", "Ferry", "Fisher", "Flatley", "Frami", "Franecki", "Friesen", "Fritsch", "Funk", "Gaylord", "Gerhold", "Gerlach", "Gibson", "Gislason", "Gleason", "Gleichner", "Glover", "Goldner", "Goodwin", "Gorczany", "Gottlieb", "Goyette", "Grady", "Graham", "Grant", "Green", "Greenfelder", "Greenholt", "Grimes", "Gulgowski", "Gusikowski", "Gutkowski", "Gutmann", "Haag", "Hackett", "Hagenes", "Hahn", "Haley", "Halvorson", "Hamill", "Hammes", "Hand", "Hane", "Hansen", "Harber", "Harris", "Hartmann", "Harvey", "Hauck", "Hayes", "Heaney", "Heathcote", "Hegmann", "Heidenreich", "Heller", "Herman", "Hermann", "Hermiston", "Herzog", "Hessel", "Hettinger", "Hickle", "Hilll", "Hills", "Hilpert", "Hintz", "Hirthe", "Hodkiewicz", "Hoeger", "Homenick", "Hoppe", "Howe", "Howell", "Hudson", "Huel", "Huels", "Hyatt", "Jacobi", "Jacobs", "Jacobson", "Jakubowski", "Jaskolski", "Jast", "Jenkins", "Jerde", "Jewess", "Johns", "Johnson", "Johnston", "Jones", "Kassulke", "Kautzer", "Keebler", "Keeling", "Kemmer", "Kerluke", "Kertzmann", "Kessler", "Kiehn", "Kihn", "Kilback", "King", "Kirlin", "Klein", "Kling", "Klocko", "Koch", "Koelpin", "Koepp", "Kohler", "Konopelski", "Koss", "Kovacek", "Kozey", "Krajcik", "Kreiger", "Kris", "Kshlerin", "Kub", "Kuhic", "Kuhlman", "Kuhn", "Kulas", "Kunde", "Kunze", "Kuphal", "Kutch", "Kuvalis", "Labadie", "Lakin", "Lang", "Langosh", "Langworth", "Larkin", "Larson", "Leannon", "Lebsack", "Ledner", "Leffler", "Legros", "Lehner", "Lemke", "Lesch", "Leuschke", "Lind", "Lindgren", "Littel", "Little", "Lockman", "Lowe", "Lubowitz", "Lueilwitz", "Luettgen", "Lynch", "Macejkovic", "Maggio", "Mann", "Mante", "Marks", "Marquardt", "Marvin", "Mayer", "Mayert", "McClure", "McCullough", "McDermott", "McGlynn", "McKenzie", "McLaughlin", "Medhurst", "Mertz", "Metz", "Miller", "Mills", "Mitchell", "Moen", "Mohr", "Monahan", "Moore", "Morar", "Morissette", "Mosciski", "Mraz", "Mueller", "Muller", "Murazik", "Murphy", "Murray", "Nader", "Nicolas", "Nienow", "Nikolaus", "Nitzsche", "Nolan", "Oberbrunner", "Okuneva", "Olson", "Ondricka", "OReilly", "Orn", "Ortiz", "Osinski", "Pacocha", "Padberg", "Pagac", "Parisian", "Parker", "Paucek", "Pfannerstill", "Pfeffer", "Pollich", "Pouros", "Powlowski", "Predovic", "Price", "Prohaska", "Prosacco", "Purdy", "Quigley", "Quitzon", "Rath", "Ratke", "Rau", "Raynor", "Reichel", "Reichert", "Reilly", "Reinger", "Rempel", "Renner", "Reynolds", "Rice", "Rippin", "Ritchie", "Robel", "Roberts", "Rodriguez", "Rogahn", "Rohan", "Rolfson", "Romaguera", "Roob", "Rosenbaum", "Rowe", "Ruecker", "Runolfsdottir", "Runolfsson", "Runte", "Russel", "Rutherford", "Ryan", "Sanford", "Satterfield", "Sauer", "Sawayn", "Schaden", "Schaefer", "Schamberger", "Schiller", "Schimmel", "Schinner", "Schmeler", "Schmidt", "Schmitt", "Schneider", "Schoen", "Schowalter", "Schroeder", "Schulist", "Schultz", "Schumm", "Schuppe", "Schuster", "Senger", "Shanahan", "Shields", "Simonis", "Sipes", "Skiles", "Smith", "Smitham", "Spencer", "Spinka", "Sporer", "Stamm", "Stanton", "Stark", "Stehr", "Steuber", "Stiedemann", "Stokes", "Stoltenberg", "Stracke", "Streich", "Stroman", "Strosin", "Swaniawski", "Swift", "Terry", "Thiel", "Thompson", "Tillman", "Torp", "Torphy", "Towne", "Toy", "Trantow", "Tremblay", "Treutel", "Tromp", "Turcotte", "Turner", "Ullrich", "Upton", "Vandervort", "Veum", "Volkman", "Von", "VonRueden", "Waelchi", "Walker", "Walsh", "Walter", "Ward", "Waters", "Watsica", "Weber", "Wehner", "Weimann", "Weissnat", "Welch", "West", "White", "Wiegand", "Wilderman", "Wilkinson", "Will", "Williamson", "Willms", "Windler", "Wintheiser", "Wisoky", "Wisozk", "Witting", "Wiza", "Wolf", "Wolff", "Wuckert", "Wunsch", "Wyman", "Yost", "Yundt", "Zboncak", "Zemlak", "Ziemann", "Zieme", "Zulauf"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/status_code.go b/vendor/github.com/brianvoe/gofakeit/data/status_code.go deleted file mode 100644 index 7d78fd995026..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/status_code.go +++ /dev/null @@ -1,7 +0,0 @@ -package data - -// StatusCodes consists of commonly used HTTP status codes -var StatusCodes = map[string][]int{ - "simple": {200, 301, 302, 400, 404, 500}, - "general": {100, 200, 201, 203, 204, 205, 301, 302, 304, 400, 401, 403, 404, 405, 406, 416, 500, 501, 502, 503, 504}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/data/vehicle.go b/vendor/github.com/brianvoe/gofakeit/data/vehicle.go deleted file mode 100644 index 3b96728bccad..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/data/vehicle.go +++ /dev/null @@ -1,10 +0,0 @@ -package data - -// Vehicle Beer consists of various beer information -var Vehicle = map[string][]string{ - "vehicle_type": {"Passenger car mini", "Passenger car light", "Passenger car compact", "Passenger car medium", "Passenger car heavy", "Sport utility vehicle", "Pickup truck", "Van"}, - "fuel_type": {"Gasoline", "Methanol", "Ethanol", "Diesel", "LPG", "CNG", "Electric"}, - "transmission_type": {"Manual", "Automatic"}, - "maker": {"Alfa Romeo", "Aston Martin", "Audi", "Bentley", "Benz", "BMW", "Bugatti", "Cadillac", "Chevrolet", "Chrysler", "Citroen", "Corvette", "DAF", "Dacia", "Daewoo", "Daihatsu", "Datsun", "De Lorean", "Dino", "Dodge", "Farboud", "Ferrari", "Fiat", "Ford", "Honda", "Hummer", "Hyundai", "Jaguar", "Jeep", "KIA", "Koenigsegg", "Lada", "Lamborghini", "Lancia", "Land Rover", "Lexus", "Ligier", "Lincoln", "Lotus", "Martini", "Maserati", "Maybach", "Mazda", "McLaren", "Mercedes", "Mercedes-Benz", "Mini", "Mitsubishi", "Nissan", "Noble", "Opel", "Peugeot", "Pontiac", "Porsche", "Renault", "Rolls-Royce", "Rover", "Saab", "Seat", "Skoda", "Smart", "Spyker", "Subaru", "Suzuki", "Toyota", "Tesla", "Vauxhall", "Volkswagen", "Volvo"}, - "model": {"Db9 Coupe", "Db9 Coupe Manual", "Db9 Volante", "V12 Vanquish S", "V8 Vantage", "A3", "A4", "A4 Avant Quattro", "A4 Cabriolet", "A4 Cabriolet Quattro", "A4 Quattro", "A6", "A6 Avant Quattro", "A6 Quattro", "A8 L", "Gti", "Passat", "S4", "S4 Avant", "S4 Cabriolet", "Tt Coupe", "Tt Roadster", "Bentley Arnage", "Continental Flying Spur", "Continental Gt", " 325ci Convertible", " 325i", " 325xi", " 325xi Sport Wagon", " 330ci Convertible", " 330i", " 330xi", " 525i", " 525xi", " 530i", " 530xi", " 530xi Sport Wagon", " 550i", " 650ci", " 650ci Convertible", " 750li", " 760li", " M3", " M3 Convertible", " M5", " M6", " Mini Cooper", " Mini Cooper Convertible", " Mini Cooper S", " Mini Cooper S Convertible", " X3", " X5", " X5 4.8is", " Z4 3.0 Si Coupe", " Z4 3.0i", " Z4 3.0si", " Z4 M Roadster", "Veyron", "300c/srt-8", "Caravan 2wd", "Charger", "Commander 4wd", "Crossfire Roadster", "Dakota Pickup 2wd", "Dakota Pickup 4wd", "Durango 2wd", "Durango 4wd", "Grand Cherokee 2wd", "Grand Cherokee 4wd", "Liberty/cherokee 2wd", "Liberty/cherokee 4wd", "Pacifica 2wd", "Pacifica Awd", "Pt Cruiser", "Ram 1500 Pickup 2wd", "Ram 1500 Pickup 4wd", "Sebring 4-dr", "Stratus 4-dr", "Town & Country 2wd", "Viper Convertible", "Wrangler/tj 4wd", "F430", "Ferrari 612 Scaglietti", "Ferrari F141", "B4000 4wd", "Crown Victoria Police", "E150 Club Wagon", "E150 Econoline 2wd", "Escape 4wd", "Escape Fwd", "Escape Hybrid 4wd", "Escape Hybrid Fwd", "Expedition 2wd", "Explorer 2wd", "Explorer 4wd", "F150 Ffv 2wd", "F150 Ffv 4wd", "F150 Pickup 2wd", "F150 Pickup 4wd", "Five Hundred Awd", "Focus Fwd", "Focus Station Wag", "Freestar Wagon Fwd", "Freestyle Awd", "Freestyle Fwd", "Grand Marquis", "Gt 2wd", "Ls", "Mark Lt", "Milan", "Monterey Wagon Fwd", "Mountaineer 4wd", "Mustang", "Navigator 2wd", "Ranger Pickup 2wd", "Ranger Pickup 4wd", "Taurus", "Taurus Ethanol Ffv", "Thunderbird", "Town Car", "Zephyr", "B9 Tribeca Awd", "Baja Awd", "Forester Awd", "Impreza Awd", "Impreza Wgn/outback Spt Awd", "Legacy Awd", "Legacy Wagon Awd", "Outback Awd", "Outback Wagon Awd", "9-3 Convertible", "9-3 Sport Sedan", "9-5 Sedan", "C15 Silverado Hybrid 2wd", "C1500 Silverado 2wd", "C1500 Suburban 2wd", "C1500 Tahoe 2wd", "C1500 Yukon 2wd", "Cobalt", "Colorado 2wd", "Colorado 4wd", "Colorado Cab Chassis Inc 2wd", "Colorado Crew Cab 2wd", "Colorado Crew Cab 4wd", "Corvette", "Cts", "Dts", "Envoy 2wd", "Envoy Xl 4wd", "Equinox Awd", "Equinox Fwd", "Escalade 2wd", "Escalade Esv Awd", "G15/25chev Van 2wd Conv", "G1500/2500 Chevy Express 2wd", "G1500/2500 Chevy Van 2wd", "G6", "G6 Gt/gtp Convertible", "Grand Prix", "Gto", "H3 4wd", "Hhr Fwd", "I-280 2wd Ext Cab", "Impala", "K15 Silverado Hybrid 4wd", "K1500 Avalanche 4wd", "K1500 Silverado 4wd", "K1500 Tahoe 4wd", "Lacrosse/allure", "Limousine", "Malibu", "Montana Sv6 Awd", "Monte Carlo", "Rendezvous Awd", "Rendezvous Fwd", "Solstice", "Srx 2wd", "Srx Awd", "Ssr Pickup 2wd", "Sts", "Sts Awd", "Terraza Fwd", "Trailblazer 2wd", "Trailblazer 4wd", "Trailblazer Awd", "Trailblazer Ext 4wd", "Uplander Fwd", "Vue Awd", "Vue Fwd", "Xlr", "Aveo", "Forenza", "Forenza Wagon", "Verona", "Accord", "Accord Hybrid", "Civic", "Civic Hybrid", "Cr-v 4wd", "Element 2wd", "Element 4wd", "Insight", "Mdx 4wd", "Odyssey 2wd", "Pilot 2wd", "Pilot 4wd", "Ridgeline 4wd", "Rl", "Rsx", "S2000", "Tl", "Tsx", "Accent", "Azera", "Elantra", "Santafe 2wd", "Santafe 4wd", "Sonata", "Tiburon", "Tucson 2wd", "Tucson 4wd", "S-type 3.0 Litre", "S-type 4.2 Litre", "S-type R", "Vdp Lwb", "Xj8", "Xk8 Convertible", "Xkr Convertible", "X-type", "X-type Sport Brake", "Amanti", "Optima", "Optima(ms)", "Rio", "Sedona", "Sorento 2wd", "Sorento 4wd", "Spectra(ld)", "Sportage 2wd", "Sportage 4wd", "L-140/715 Gallardo", "L-147/148 Murcielago", "Lr3", "Range Rover", "Range Rover Sport", "Elise/exige", "Coupe Cambiocorsa/gt/g-sport", "Quattroporte", "Mazda 3", "Mazda 5", "Mazda 6", "Mazda 6 Sport Wagon", "Mazda Rx-8", "Mpv", "Mx-5", "C230", "C280", "C280 4matic", "C350", "C350 4matic", "C55 Amg", "Cl65 Amg", "Clk350", "Clk350 (cabriolet)", "Clk55 Amg (cabriolet)", "Cls500", "Cls55 Amg", "E320 Cdi", "E350", "E350 (wagon)", "E350 4matic", "E350 4matic (wagon)", "E500", "E55 Amg", "E55 Amg (wagon)", "Maybach 57s", "Maybach 62", "Ml350", "Ml500", "R350", "R500", "S350", "S430", "Sl500", "Sl600", "Sl65 Amg", "Slk280", "Slk350", "Slr", "Eclipse", "Endeavor 2wd", "Endeavor 4wd", "Galant", "Lancer", "Lancer Evolution", "Lancer Sportback", "Montero", "Outlander 2wd", "Outlander 4wd", "Vibe", "350z", "350z Roadster", "Altima", "Armada 2wd", "Armada 4wd", "Frontier 2wd", "Frontier V6-2wd", "Frontier V6-4wd", "Fx35 Awd", "Fx35 Rwd", "Fx45 Awd", "G35", "M35", "M35x", "M45", "Maxima", "Murano Awd", "Murano Fwd", "Pathfinder 2wd", "Pathfinder 4wd", "Q45", "Q45 Sport", "Quest", "Qx56 4wd", "Sentra", "Titan 2wd", "Titan 4wd", "Xterra 2wd", "Xterra 4wd", "Boxster", "Boxster S", "Carrera 2 Coupe", "Cayenne", "Cayenne S", "Cayenne Turbo", "Cayman S", "Phantom", "F150 Supercrew 4wd", "C8 Spyder", "Aerio", "Aerio Sx", "Aerio Sx Awd", "Grand Vitara Xl-7", "Grand Vitara Xl-7 4wd", "Grand Vitara Xv6", "Grand Vitara Xv6 Awd", "4runner 2wd", "4runner 4wd", "Avalon", "Camry", "Camry Solara", "Camry Solara Convertible", "Corolla", "Corolla Matrix", "Es 330", "Gs 300 4wd", "Gs 300/gs 430", "Gx 470", "Highlander 2wd", "Highlander 4wd", "Highlander Hybrid 2wd", "Highlander Hybrid 4wd", "Is 250", "Is 250 Awd", "Is 350", "Ls 430", "Lx 470", "Prius", "Rav4 2wd", "Rav4 4wd", "Rx 330 2wd", "Rx 330 4wd", "Rx 400h 4wd", "Sc 430", "Scion Tc", "Scion Xa", "Scion Xb", "Sequoia 2wd", "Sequoia 4wd", "Sienna 2wd", "Sienna 4wd", "Toyota Tacoma 2wd", "Toyota Tacoma 4wd", "Toyota Tundra 2wd", "Toyota Tundra 4wd", "Yaris", "A3 Quattro", "Golf", "Jetta", "New Beetle", "New Beetle Convertible", "Passat Wagon 4motion", "Phaeton", "Rabbit", "Touareg", "Tt Coupe Quattro", "Tt Roadster Quattro", "C70 Convertible", "S40 Awd", "S40 Fwd", "S60 Awd", "S60 Fwd", "S60 R Awd", "S80 Fwd", "V50 Awd", "V70 Fwd", "V70 R Awd", "Xc 70 Awd", "Xc 90 Awd", "Xc 90 Fwd"}, -} diff --git a/vendor/github.com/brianvoe/gofakeit/datetime.go b/vendor/github.com/brianvoe/gofakeit/datetime.go deleted file mode 100644 index 8c064473d3d8..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/datetime.go +++ /dev/null @@ -1,77 +0,0 @@ -package gofakeit - -import ( - "strconv" - "time" -) - -// Date will generate a random time.Time struct -func Date() time.Time { - return time.Date(Year(), time.Month(Number(0, 12)), Day(), Hour(), Minute(), Second(), NanoSecond(), time.UTC) -} - -// DateRange will generate a random time.Time struct between a start and end date -func DateRange(start, end time.Time) time.Time { - return time.Unix(0, int64(Number(int(start.UnixNano()), int(end.UnixNano())))).UTC() -} - -// Month will generate a random month string -func Month() string { - return time.Month(Number(1, 12)).String() -} - -// Day will generate a random day between 1 - 31 -func Day() int { - return Number(1, 31) -} - -// WeekDay will generate a random weekday string (Monday-Sunday) -func WeekDay() string { - return time.Weekday(Number(0, 6)).String() -} - -// Year will generate a random year between 1900 - current year -func Year() int { - return Number(1900, time.Now().Year()) -} - -// Hour will generate a random hour - in military time -func Hour() int { - return Number(0, 23) -} - -// Minute will generate a random minute -func Minute() int { - return Number(0, 59) -} - -// Second will generate a random second -func Second() int { - return Number(0, 59) -} - -// NanoSecond will generate a random nano second -func NanoSecond() int { - return Number(0, 999999999) -} - -// TimeZone will select a random timezone string -func TimeZone() string { - return getRandValue([]string{"timezone", "text"}) -} - -// TimeZoneFull will select a random full timezone string -func TimeZoneFull() string { - return getRandValue([]string{"timezone", "full"}) -} - -// TimeZoneAbv will select a random timezone abbreviation string -func TimeZoneAbv() string { - return getRandValue([]string{"timezone", "abr"}) -} - -// TimeZoneOffset will select a random timezone offset -func TimeZoneOffset() float32 { - value, _ := strconv.ParseFloat(getRandValue([]string{"timezone", "offset"}), 32) - return float32(value) -} diff --git a/vendor/github.com/brianvoe/gofakeit/doc.go b/vendor/github.com/brianvoe/gofakeit/doc.go deleted file mode 100644 index c53335e634f1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/doc.go +++ /dev/null @@ -1,10 +0,0 @@ -/* -Package gofakeit is a random data generator written in go - -Every function has an example and a benchmark - -See the full list here https://godoc.org/github.com/brianvoe/gofakeit - -80+ Functions!!! -*/ -package gofakeit diff --git a/vendor/github.com/brianvoe/gofakeit/faker.go b/vendor/github.com/brianvoe/gofakeit/faker.go deleted file mode 100644 index 38062d5cdf91..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/faker.go +++ /dev/null @@ -1,15 +0,0 @@ -package gofakeit - -import ( - "math/rand" - "time" -) - -// Seed random. Setting seed to 0 will use time.Now().UnixNano() -func Seed(seed int64) { - if seed == 0 { - rand.Seed(time.Now().UTC().UnixNano()) - } else { - rand.Seed(seed) - } -} diff --git a/vendor/github.com/brianvoe/gofakeit/file.go b/vendor/github.com/brianvoe/gofakeit/file.go deleted file mode 100644 index 6c1e8d56cba1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/file.go +++ /dev/null @@ -1,11 +0,0 @@ -package gofakeit - -// MimeType will generate a random mime file type -func MimeType() string { - return getRandValue([]string{"file", "mime_type"}) -} - -// Extension will generate a random file extension -func Extension() string { - return getRandValue([]string{"file", "extension"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/generate.go b/vendor/github.com/brianvoe/gofakeit/generate.go deleted file mode 100644 index 284eef8bb108..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/generate.go +++ /dev/null @@ -1,41 +0,0 @@ -package gofakeit - -import ( - "strings" -) - -// Generate fake information from given string. String should contain {category.subcategory} -// -// Ex: {person.first} - random firstname -// -// Ex: {person.first}###{person.last}@{person.last}.{internet.domain_suffix} - billy834smith@smith.com -// -// Ex: ### - 481 - random numbers -// -// Ex: ??? - fda - random letters -// -// For a complete list possible categories use the Categories() function. -func Generate(dataVal string) string { - // Identify items between brackets: {person.first} - for strings.Count(dataVal, "{") > 0 && strings.Count(dataVal, "}") > 0 { - catValue := "" - startIndex := strings.Index(dataVal, "{") - endIndex := strings.Index(dataVal, "}") - replace := dataVal[(startIndex + 1):endIndex] - categories := strings.Split(replace, ".") - - if len(categories) >= 2 && dataCheck([]string{categories[0], categories[1]}) { - catValue = getRandValue([]string{categories[0], categories[1]}) - } - - dataVal = strings.Replace(dataVal, "{"+replace+"}", catValue, 1) - } - - // Replace # with numbers - dataVal = replaceWithNumbers(dataVal) - - // Replace ? with letters - dataVal = replaceWithLetters(dataVal) - - return dataVal -} diff --git a/vendor/github.com/brianvoe/gofakeit/hacker.go b/vendor/github.com/brianvoe/gofakeit/hacker.go deleted file mode 100644 index 0ac73b7109f3..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/hacker.go +++ /dev/null @@ -1,35 +0,0 @@ -package gofakeit - -import "strings" - -// HackerPhrase will return a random hacker sentence -func HackerPhrase() string { - words := strings.Split(Generate(getRandValue([]string{"hacker", "phrase"})), " ") - words[0] = strings.Title(words[0]) - return strings.Join(words, " ") -} - -// HackerAbbreviation will return a random hacker abbreviation -func HackerAbbreviation() string { - return getRandValue([]string{"hacker", "abbreviation"}) -} - -// HackerAdjective will return a random hacker adjective -func HackerAdjective() string { - return getRandValue([]string{"hacker", "adjective"}) -} - -// HackerNoun will return a random hacker noun -func HackerNoun() string { - return getRandValue([]string{"hacker", "noun"}) -} - -// HackerVerb will return a random hacker verb -func HackerVerb() string { - return getRandValue([]string{"hacker", "verb"}) -} - -// HackerIngverb will return a random hacker ingverb -func HackerIngverb() string { - return getRandValue([]string{"hacker", "ingverb"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/hipster.go b/vendor/github.com/brianvoe/gofakeit/hipster.go deleted file mode 100644 index 3166a9966a13..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/hipster.go +++ /dev/null @@ -1,20 +0,0 @@ -package gofakeit - -// HipsterWord will return a single hipster word -func HipsterWord() string { - return getRandValue([]string{"hipster", "word"}) -} - -// HipsterSentence will generate a random sentence -func HipsterSentence(wordCount int) string { - return sentence(wordCount, HipsterWord) -} - -// HipsterParagraph will generate a random paragraphGenerator -// Set Paragraph Count -// Set Sentence Count -// Set Word Count -// Set Paragraph Separator -func HipsterParagraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { - return paragraphGenerator(paragrapOptions{paragraphCount, sentenceCount, wordCount, separator}, HipsterSentence) -} diff --git a/vendor/github.com/brianvoe/gofakeit/image.go b/vendor/github.com/brianvoe/gofakeit/image.go deleted file mode 100644 index de5a2e6d916c..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/image.go +++ /dev/null @@ -1,8 +0,0 @@ -package gofakeit - -import "strconv" - -// ImageURL will generate a random Image Based Upon Height And Width. https://picsum.photos/ -func ImageURL(width int, height int) string { - return "https://picsum.photos/" + strconv.Itoa(width) + "/" + strconv.Itoa(height) -} diff --git a/vendor/github.com/brianvoe/gofakeit/internet.go b/vendor/github.com/brianvoe/gofakeit/internet.go deleted file mode 100644 index 69dd700e5231..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/internet.go +++ /dev/null @@ -1,55 +0,0 @@ -package gofakeit - -import ( - "fmt" - "math/rand" - "strings" -) - -// DomainName will generate a random url domain name -func DomainName() string { - return strings.ToLower(JobDescriptor()+BS()) + "." + DomainSuffix() -} - -// DomainSuffix will generate a random domain suffix -func DomainSuffix() string { - return getRandValue([]string{"internet", "domain_suffix"}) -} - -// URL will generate a random url string -func URL() string { - url := "http" + RandString([]string{"s", ""}) + "://www." - url += DomainName() - - // Slugs - num := Number(1, 4) - slug := make([]string, num) - for i := 0; i < num; i++ { - slug[i] = BS() - } - url += "/" + strings.ToLower(strings.Join(slug, "/")) - - return url -} - -// HTTPMethod will generate a random http method -func HTTPMethod() string { - return getRandValue([]string{"internet", "http_method"}) -} - -// IPv4Address will generate a random version 4 ip address -func IPv4Address() string { - num := func() int { return 2 + rand.Intn(254) } - return fmt.Sprintf("%d.%d.%d.%d", num(), num(), num(), num()) -} - -// IPv6Address will generate a random version 6 ip address -func IPv6Address() string { - num := 65536 - return fmt.Sprintf("2001:cafe:%x:%x:%x:%x:%x:%x", rand.Intn(num), rand.Intn(num), rand.Intn(num), rand.Intn(num), rand.Intn(num), rand.Intn(num)) -} - -// Username will genrate a random username based upon picking a random lastname and random numbers at the end -func Username() string { - return getRandValue([]string{"person", "last"}) + replaceWithNumbers("####") -} diff --git a/vendor/github.com/brianvoe/gofakeit/job.go b/vendor/github.com/brianvoe/gofakeit/job.go deleted file mode 100644 index c156bde77243..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/job.go +++ /dev/null @@ -1,34 +0,0 @@ -package gofakeit - -// JobInfo is a struct of job information -type JobInfo struct { - Company string - Title string - Descriptor string - Level string -} - -// Job will generate a struct with random job information -func Job() *JobInfo { - return &JobInfo{ - Company: Company(), - Title: JobTitle(), - Descriptor: JobDescriptor(), - Level: JobLevel(), - } -} - -// JobTitle will generate a random job title string -func JobTitle() string { - return getRandValue([]string{"job", "title"}) -} - -// JobDescriptor will generate a random job descriptor string -func JobDescriptor() string { - return getRandValue([]string{"job", "descriptor"}) -} - -// JobLevel will generate a random job level string -func JobLevel() string { - return getRandValue([]string{"job", "level"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/log_level.go b/vendor/github.com/brianvoe/gofakeit/log_level.go deleted file mode 100644 index bde9bf310588..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/log_level.go +++ /dev/null @@ -1,15 +0,0 @@ -package gofakeit - -import ( - "github.com/brianvoe/gofakeit/data" -) - -// LogLevel will generate a random log level -// See data/LogLevels for list of available levels -func LogLevel(logType string) string { - if _, ok := data.LogLevels[logType]; ok { - return getRandValue([]string{"log_level", logType}) - } - - return getRandValue([]string{"log_level", "general"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/logo.png b/vendor/github.com/brianvoe/gofakeit/logo.png deleted file mode 100644 index a97962030afda2145a035db6a01b2f3c256384fa..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 36022 zcmYgXWl$VllpTB+Y;e~QoZ#*RcN;uNaCd_1;I6?5?yey?!2$$#2oOAY1efh^tF~&3 zH#JoM=e%2$5cMXT1Au4+7DE6lJ7zeAZ3_ zKLk)LE?#?7oH^=Uq*{$rZm9CDWU|tGO5@q%(a_S;_kZjzO-@c0;HXMgl9on9Kn$J9 z9Uu|tN^l}w?<8LR73iPq-Y3znz54vY*2m%O4GWyZ#!D%uikQqc8GI!wum1Jr@o+-l5Ol&nnOh)P_TXh66 zAXdgZ5~lRYf_}^284811k!+D%*_tWQX2AV~s~ey^(Bpl{d=dUq6&gl}DCje&8k9%6 zdVt78v3s+G{E4bx7etL{i?R)xsl(4v(-&?g|D5847=nRNi_Gbt*D?MD^aJ(MTE^i7 zVS83qHYyQ6P85^|3Lx))06#?kWq&#&1er!>brHSG^E@IEiU!3Z7~{{9cn*^0pEZEg zkj$24)uIyr(t?;ku8bjS`3h=l*PMgm5+!d^g+2d<%CuHaAW+Q4(6M${ksC+it1}%n0f|<8IM*)dOhn2m ziu;n-6njBK(OvoB=&Fqw^jsSE5+h*hX_<%!e~ZKLs=Him^fm&~j3eYVSpLTei7cld zz9oE`Ry1t^-7H5O!?DPIh~eY+IJr;@{q3z!}Sq@$CriqeQo(}FG~J~)tr0NPBS zdk6YqXK%lX1j@)5wsmw=KO{&3-G0*#ipCt0gd$&x;uIXxf_M7ZCPH@wT`9lFpZuVvz8FbnlrrAmWT3$dSVou@VQjIqrX^D>qO2y(1jN!H_A!H2k=s%TSuqtC za=e;~`@Q_05XA9I)eQn&y8m}$emqBA+}-WSJd{k0D2Q`hccKMyEK3)kU0C={16n!r z7R>)1`oNBQDJmyH=_LjWY2e*V3ztjyNQ zDd8_^ZogY54{4-I1wk2HvXp?k%$8ARlSA=0+SyrtZ)3*$x7umSjI}9}zP{)=x$7F5 zn%>m3duXqGY=N(u(If#%#73eWM9dP@+$mr|Y8D}B1r}1iq4wv zF&wNBZUcgDpTvDcseskFNmW^$Q@6I18eOQ2Pe@oc0XFZ&Gy~k7H*phj+Y459BOuUm z%E8L|R#`btO@ByMR(91Coz_(Sr()>(F(NGiY6)Rj|0{x2=wE+H=7?cyq@l*}u@Gq3 z4->SF^Ay!TPP}9(X%(%=c-J;J6C@x*#@t8J&|NPBD5Pgp^Vs-!45cY!c@s{gxg;7u`X@E0*S^JI$NjUWiHRTk z5GRfWGEa$?Qs4T*f_t#9FLO?=k=LM-X-zdkStuluHUX)RheW0G$tJk}t6jy=p<6}X z_)ioB%%Sbo;AdP1(|eq?b&*KhC4-c-w79`RL;?bW5G#ZHypbX~qSxFN^D*b=&VR43 z2ai1mE+l9dhZBwJGGRhO*YXH?b!`?*Bya*qn3VxE6F~=}m`V#AGgQr*Y0(Y9DcWLV zwRrigBEN0F012kl`<4SUtR{Xgs zA3w&Wrs5G05=y}q1tp@2nf!35Y(Axr6nt5Aj9hgbfurO1b7B(6O4-;j3JVMGxC+WC zW7N2R5%ZVTgwHaKU}eHVCLL(|i)~q4H*9(0FUAYsvrzoi#Pt)ytE(^lj0Ib5(0)m* zL{m~bNxSHYU>3WX72erfSsA!kSWMg$3_VZ6sM4?CtG+s;wp1-{0q=T_}2`-N=R%@~Ni>iJhGtFxTZ>z$-s&;Lv~B zgh+#>2WZqx8=(R((=USsxo90F#76?FoXMUDG>i?{{p;zyoynSs zd7pRaySKk@{w;mHan*6iyZ2-!rD|llnGLXp>)x1Vr|}zwRNlSa{XOEJ%P+Sazah$z zqANL*x&gbt|471oK}9=F<>dz)CuEuJ-v10jx{2oNElG*r1@c(u`-%$|$OmcKDTUgK zx!1%qsL68)`l;kY`^pJ-B_zt_6~0to>R7?uP%(hHG&mN1FeXOCJgMZ#v!| z9cau03Z4;40@;DbyFWmPX?MIM+1!kopuWDo{#pCFX4_3@?%D#q*e$t87CgDSMzlKf zU<68+-9kn*0i)s$givcSMRybgfn(XiXJeE`1*58~^&8E^naX@#o^ZDRVe>!y2)MFS z5f8*Bs_Uj#O`zqH{{_#W_y;1A+49+ZLo#LE-FHvg%1g0OrW2KjT>jJdkR3jKIP`E4 z%*o_=sQ&U0xOYJO{@~F=i;9)xZJ!VdxAKrI z6^2^>}HigY!_Xpl=qJ=mk4D!&yV6e)nI!gDy z4cL^sxQmPUjgG5YB$(10hg;7-S)hY(?eFyCp3MiJ8Sw%tter|l##2!%w98c#v&H3r zT5U}*=uz-XBAfkoNV_H_*Hn;+H%u#by`dV3@$0A%KR>^Xlf39L!%j7fwIN^F zs~Xs9&v;NZklLQ_4o81HbW?5onL07&HbOgdm>5T!XhKpv9e3>QOOw`D{`e*#b+H8W zTo;#(P5BBfZ9JNgHFL-wD^15?p3Ar+fdBb&{^IM|%}C1&B587QB-AgMe`_6G&q24| zIHu!=A4ZLC-3H<|=DTj778g6+3I<_eVSz%z1`!|qH=R54#mq>EwmSc^k;ES$kY(B< zhCP>;7u0qa;*%{k+Al(C4gL`6)*1RD_sjfmHUCZ}m!HtO1-&q+3r%cO?6Py{1obUK zFk*8t2QOdSeal^tT2^G+`(a4!yDvn*qaBeoisI(R5fd_Uk{;s%3q)DbDcM*`FJw)* z4v!o*sk34EdC=CjHjB|j%ErTk-PuLzpV!kINT0B2*mU^$_yiLC2MycoNtbF3qF^<; zR7e_SaT=(9rC}NwkuO>hvZ{@@%qSCA&?zF41nX#P4CGnx0F5+f7ESc~+a8**T@=?a zP~z@DBO7>Vq{BKWWjZwebhl}574W7ZOcS;YRTo`89IJU?)h7H9)*u7VNL(H2F2j5Rb z3>RRuuro?a%Uz-(`AEUdphFG52ypXV-g2L*7(`9?7|cRO2hXG!vKWZ*U%un{NxBD3 zSOi-HjiS^TEwh{};CC9FAZCJ)t=-7!-LU2T5MYfiVkFfNOBLe4&FL&BznYvJy>)OP z_defvZvVV0kCYcsOa+SVS4N=gZo1m;pME-OIjEVt z=DbFd;Ca2lf@(jZ2OavAhQJcTtZEkUZTF?W82ENDck3i_eHKYioiR@=-}Rs8;O;LJ z{6+3p!K3(zRAR9}CtK1jni;fF#xwvVr8mOo=njk3gddRh4tGP-5ko=13L5}P7il<_ zq+jpf#AfL=(-$>ca}(8fA|KXuW5OlX4@GmPnTx$-c|v{idM*Y1x81G>&rU;2YAAas z>yFMGBbLuVLycxa&*{8U=(H>CP9h+PCa)yy`0K$zW+a!}S&7#+A**hP`4}6hFb{%b zph+;+j9GA~X=DTxebUI`QmyUnpPHH)^3vANB4=&kG+>}rDa$Oah6rYk`Vt#Q!t{)B zTBfjJ>Q@1Sv!?5G@a=i0i`~Uw33$jfh9+;n8I(5my<*Pe!)HKOyy}h`s422<$S&N?LPKZ!;q!{DP=&)WeVevSkl2^isOc*ip>4PP! zN$>$K#YT^qu4cJ}6nYqfTnQMc>NR=84(}A^6cH15N7CiWt67%n1O1spuyzX?f#`6@ zP1|Y7hO49Rp95D+V=xC?EKAtS{wuTEz#mZ=F41o}D&_Hs7gs))gBa5mQ#pJq-FL5$ z5eKBQVY@NP1Fx5Zuxjc%>rJT|2fqJn>|^~c{J6WqHIlE+`bZb5T_{t`rtsJsT?n-m zyksu$y+xqd?DKcU3Pq;uX|2WlbMSq*+ZTh=<%g(GkICug7h?HDs)43RR6&YFt{rj* zCnv14jnB$5!e!oSvV(?A?@5`gstqzJC^4QZfC^b9PY}1o`n${`9NqP=@b;D8pfzb( zGClfp2(|M^45}nF@j@Jv1~jz_{Geb^kp>i@kNQ~>gwTgGpaIr_;a;LiN&y|_y>&m` z>w>T%sHELomt4Fms(s63GT;f zSVxHjVMifRC5ap)CnTxUxqbf~`pT2&->LY|xSm9NGhV#DMUKokE!S?4zX?Ln{mn-j z<)YdzW&V9ZuLGRXqzB9% zVO0zhsai>j108&&^M~HTFA?0C#)J^q4C47!cl4qjJLBuOJ-<*UgHvYUGo5Dpf9?Abh zNl6dNfEf+{Q68M@;xmWIixvO+y7Sk_hT%Bk@WVSpkS}8g4$u*a9@#oXRI|{)k7mCs zwvj-NHUj6+bfv+@*l2k@J;2L-_C)~_lN1z=@5hej?NrlHSFev@MvkzmSai)4yS6#A zlEWtplaFuwFV+K===Dvd5+RU{EU6&x+01XEkmzE_$k}LtrI99SKEt7nY#1L~h}Gl; zdz8)Hn1M|dOeFuu&-{kH@Yr40`uZOWD#fUomX|CV_%2JUD^lzJ4U-cZ(r8K~(M!J# z|5~8447jGaJIl%0`4B-xaQ44o=Gopcxa&Qf*DTCUQ(N29e$~Uqyms|Pcx`6&2V~Y( z#!XS^R&$|C2Nq$*31<}k$x#mOw?U^ZJX2R_!)INWIvyqPN!02p@s^ggfLVQh@Udti z*sLh;MwZW6SEDeWD&#?0_dbp3DbzRk*23mc^hq1koD5nHP0j{fP_XESCJZ*a`Xgs1 zrvB9v|K!|xtoB+AAM$SqDx%;IR}vaY17AX!5<4Bs-p*3YgwaGAN>c=cSgA2tjAf}Fw zScC*bg_TXji9e`3y;@IX_%iyidIekNs>?|w=zZmmVMu5psVae=Cuk$4M+ zlfDIKtArGWY%_xT8`?zO(RaG3TxO+~O!0X8Mv zgD2q`Ih<|6RxrS90l+HasFJ)X5Fyg~oR|pagNXtBcwdH_F;|Wbn|$hB zW+eRI>9t{N7te#pc!WfLM_Wtk;b%93bbqOB3GRF& z>LmYl5*u}MP}yATUgDg7o`gav4bxi^PDfKBw9MlNHeuz-W;Q`-NpysvW>DGo{WvH7 zc;8~-#v)S;o@8da0(oR%cyVN5N^$ikrQ6ReDgAn|G7URHb$$I*Ix4zIkf&{07#t|G z_>tIEnd-7YX{(~rN!*S^sE@C5ebS*?nA=7vSZw+WI(n0Wb2I6hO5|_u&|L$qiFvzv zORh7^6+##lTXyXF64688UwB5{Il&Y@;h++!kb(Xo8ZZ=@iee(nV)YV?11XNQ3z-vt z6>%M>QNbuOs?+zVHR}+YN;t|b=-P>^)*-JwlqxNiU_*>8m|=x((t%(9!+DHpor)A{}&wDU}6`guwBpRcEE&%vJ)9I1J@el`~ zVFOTUTGCXu2p7iYz39Iaua?#VDB$KAuy;s_yHSCgLcS=r4#MKUb_kJgi9=fCM(9g# zy}QCoxrW-jvhNKNsn)0&k?m!6qs65Lp(H-o)-AY10Mw}oAD{^#)`xOx6d@{g6D2Ez zFqb9oEyxRN^xz^X+9ndRfg6JnIx=|BI^-&j(Q_QFPeG!>y3>IkbZ9!$nLIbc* zy7FBpT8{PhWdUXQ8ER&PEw9B|UJK{L>2PHMKZf+idyBJONEk>lldtt_nv2%8SjE7V zv(l-=S39p_+YlzQ_smH5@>lpYnZAUdlIc-^d-ebDEte-EfZyPEHav(r6PW8CIiFU88=`W!jBdPB3gY>IT5{yjJdZMpowX@eyE}+kZM6pL*I>o`0-G zw}XIUxAB&zP+d_FwP47{b#j#59t{*RVwnS#sj~BXJ=9XxaW<&eBr)gDt@O4_kSN4w;m6@Ja7;5;A5yA`we7 zUlOJ25jm;t$P8_nCgau1@Pjn33FCaDnhfWb;w+VK5Xp4)v}L^dYFd>dzQlQjnuW*v z0X#wmwFvd^8x4GA-gK3CVe@C9L;I2IXW_M#+xpfrj6DjG-(B9skX2i{3mL{7b4T88LF zepd>NM2r3w3X(`5sS;N!4K!C!na31If3y2z#+d`0u>WulN#@W}_&O`S;_U{Z-j0V} z;2Yz3IxXKQ+F3-xon4hIPF#x7;ooUiziV~)*hFSY`zuxcIY9_yLnQ#f`~(1*;X|t! z@-7?{Jz_^^XOR=fK&u`xx*xf`8@YTEV^v{>ssk`_icpZXTA1lhk+tNvW(bKO5z1$8 zg1uGT{7p+R6jSNw^juc2!(t`TH>W5zFS$_%GCiK`R-oCB6Iy$ZpIlw7K?sz-q_3^4 zn3&n+W%rTOGhv@POG!{7UUTM7nHgYHjE3$H-o?K!s~`Eq|9ht6(ghWcQNwT>!Wfe;KxO> zO=wu6Jcr^h%TX7mhyXjXS}_e z4e3Uuee)4RypXuLHpHBS zBtZ^9p3RQutZx4`1S2(!A~h_B|0sC#_}FV&)Y9?_g*KtLY=9G4+nyzY^2^Nw&rYmV z=i}h(eFQ~&VV_$cL$q-YHaWHPndWZ_sc?seJ362XGjB6A=QS{cVwwr@m(v~Y@6$3e z%G;FjYP@?#7^lj@JSnT5WT61Zj8)0glY<3e#`Ekwvyb{mV%M+!9I zZf5WXkCc?pj$WY%3Sb6)BI~nD3n2%Q;Yaok(jSa?B*FUi4IyvkxL-}&{&xDXbGcKp za(mz6hVg@ymGx?pXZqiMrc%hphB1IIC#>3RY;H!JH1uJwceo_oZjq?6N1Bra{K@Uu z@!xs~I$fX_lu1_84-fH zN-;uFGP0CH-PCPQ0WJR0#%GTv$Gd9%jGX6cXC1HQahAXhzY~G0D2YcZwh3unokmB$ zlfl8y^RG#!h*S#w`OCkHV^{&n_mHsPwmoe3PN8XeGOl|`C5gkEMUmF$9kyVXh@0n@ z+Ua@|DVn!4fXJB5SBk!cK>)(zqT%T+g~8oZ&_>`*5^!FvR=HGlKWsg{6@9$ix;oi< z!j<@Zv25#PH-OXYffgX{GH8SMx<%sYIBBKBg_pi^tAzJ!BPcDUIe-@%Oh8SQDP3 zk$^0y<%`pw{hAIA(%{iFfWb1H5v`+{wHauk;C^Smw=he%^!O$++vv_@*Yx{ewLBVH ze7K+QWOo*kf5o8FB^;=od}#VVEr6|_CFz+emS9(~0DsRlCV+nAxby7)U0wO&U;`A* z`64(t;hndeuFjFhIL4x?mjxRf@Wz(@6%+!ha@tT(|oXHoR`slTAq&YATQA2DTX`9(*TR6omz$ zVWZT+6hI3Oa^24;R9@?IFCF|wV{~= z5NntTn-cSeS`@#z8(TWUa<}V_G{B`B{OPo^va7?qKXkeaJo+MElXz-HpX8Qc$3WS$!gymXR;7j>X$sVas{d zG(Q{XNeob-T*1$XstByi%(DhIS%#U@hsr_-71a=hOjt3vl!h%_DrCbY&`wqsg@{f6 z6SK?k@UX>iq2~jEzS!KUhoPrFX$KD;eg3(1u%r(9&Ew^RU4x)G39A3iwH5C3xcWI| zsg!ELMnxT2f*hM^Z@|jUdrokcP!IpetMs{-kH=hicuN-34MTA?0#v9oRBb|xI+?{? z&AddaR4|T*nw->hDuZv$@!34;%Nlo|-H306=EkNu`h^bjFc6DBtGyxTs|}g1xa+Zn zev$zz!(bbp^HVtoGy4A`o)0C|2hI|cwhvJd;=bo3Z9V-G@9w!L6uqA*Yruo18ZedRX1MWC`25#q2Z0kwhS9~+KuS}JN-b9yH+7?;N^^$&W1V5@ zESHdzs=|{f?PU28Q-(#y&-h1U&uhVLPv1{W5YZpKSq<%(ayn|2ioRO1ni@tTb{b4? z|B6OLLu_(nusO<)-s#ph;*>Ol{*A-p?vrx5I zyS6ON^g;|-7|jhMtJ2Gsa*cph-Vd6pG~2u1zCLdM%0GaC-#sp|e)Gn-0D7SvUAXHJAIdMeT1 zR8NoC-|4^nOosM!z-s!L>&%rHG1aq&7Pe3Yx}0P^kxy&{Z5O`yVsq35jjC3;`!&ar zfeV+@xUW7AzgNsD7R)6oEr4Y4IkcB2Te(y<>@k?|{!84t7(HF}P*e2atfovv1=D6p z1V4Y8E(xkte)F0B*u~;NAhdk-d$VF?1j};o?LqlSk%_;d8xlu-(4jU=lM%@kr~67K zVblArAyKDyY2-b^u%DG|!+)?&{g{t}F+}vDGQviMS+-{l`6kaQ$ zS7c&hQdlub*E06mvEkjo{3w;$%4e37BkUw5!k#J;GcXz-UWEy}K_DRj6t|z&?~FWajb$XrOl2BYzLOg` zC^W1PQ7Rk;mCB`!54Gysm!e517~e@|ExzXo;UCbWSm?lpJ9__}rH={%+B)Tb=PzfuN9X5DtB{b@_o}$ovPG2^(T%oQjV!FJysF|-P_>w!*ty3#AF7SQm+~ZKmFrE*!89Q;(bQ;^?CgA{L4jgR~Lk%`%-!>hh)^v zXD8y&ZG`mC2%mY3T#5Bar0r~YQ<$wC90h~O*5pQC^;H@e&@zl>vKvL6+E^M8t+$`e zU|s=y4f%QRgS;JOLt`V)r!)blnExKpd?WRcuZGg} z0xdWSyoH6AmtqvA7bt#)5UCaG`{!WiXx1W!Cm3|OpsjA*O8?!G-!2C(_hWlZcrevg zMAe2er1N^ z657*90{P_#dCll+bKJ<;B_(D`?F=uH$e3EMiRIJEY_;up!j0e;BwJ_aVSM#R@`}PB zzfOKDPB1{gK_a&Q+-Vpa$A3A$7f+sRLJNnLVt0ghdKK#+`w27(IR32DLub*6wz zpH30p&rgQt4ak53jkLh;w<4RqD=LF(0X*0Fo<+B=3)IkaZO^t5Y-S;L+5fcC?dzce zGWFT_QV|oQSTr^eL;vTGy~8s>RJ3!HlD^CgMhv>2FSJGdn*WW&f1pz*w=pc$T5Q19 zd|jI~j+kTM=dkk6SR{GA85KryKR-WCuV1b4LAw~Yyrd3*2*+m0PQNgqF|u3x=K1o|+hLHMmmd)PZz&dB0u5Ih-j9 z7Yw+)?4+-qjm;7_Aj>Rf%K5%?m42J@wOS@h(T|CzBvg`SS8D3y%HCeTBj`nMp!QX5 zqibVMs5;Pn%A&|>;FZ=!b5N{C)igZ6&%^ooH`^nbN#6*ne;MG{%bqqKpFl1UuRPeS zKUtxMCM#GgMUeJTDd&LoqbgQKTfXA^H)*-4QBKFfpF+B`zhCbi9z=N<6B53*dU-As z;O8GHb43YJ7q0?9RYMp^5mF|GclM*l-xyLl?l@~5!es%CjB54CwFzuo0hm!0`A_y-!~KgPzA@L><*>l}Eo)TMobZ8IAJ4b(@xs(h<(%#lC$U~JrN zBRRjCkdSa!t6u@~CuV}5u}#-qkWBSw1&6ytHkod-xH@wLjce$v@J%E5B+rAT|H?Qm z^MHOrORzmXxsm1#QC4ZN# zi^KdvV|;N_v-9}eL?*jQ>O1(qohVhP{&i>OjC>2e5-l?`>kV?r`bl7`aG&0Sh%+Aj_>Ao$O+H45tx5bRleOJ@dtEbqxUZmJWkx3wV(zFXUR zuDIMNm)$5V%=->Ag+$5QcXKZMqVMH7x*(SnVMiEQbud+mIbwcaL3!w>FO|goY?53+ z`Qe0VuiXYDYhr&+$GX``mz{8vldZ%+PzSgd@zm5*&MT!pM1r);W%iz%7G*t49CPN_ z0Da|%igZBV`|jPQ885h|FL~2_LI4%kO^LnXEYSP>uOZ0@!9$p$s&(&lu2HLVQ805+ zWk!Kn)qqKA^ioBd{VZ!v(D1Iw=-P;w!z7OqzVBM|fxGsaExnitfW($JDuTHX(#GKx zh0hJ;7JT>{Zln1*i!e2>zrucPs?yeu!|Ys`aR&znrHQ7}(d9|XqcZ^Fxbx4CuK+71 z!#qwLJCqRHX|45m4}ac7?EeJ`)A zF91ShUKp$ZZ*x?9r_+Uv7&82RjzG#F6M)x*dGowt$aXsV0zn^v<}Z2C-yrkydi`Y7 zYp-ScB;o7eYky(-8ZgF8B6-{;q4&j)x?^MNgcx$l{*)m!zFiuj#Ber7zc+gy#XEQ% zrO5Hj>()CFCO$3%Ql}{R<__1APT@mP2HYqN4O1KTmF1LzpZ}P;-fT($nv`tcF*k}I z{ue{>dc}pY2cHO()bueYLO2c5IpRQw(`7(cQl&Z1OUnEqEYat5t3#cKFkyg(KA#tt z?Xd#X`1kRTdTV5xK8Ut2x>q1KAtyz2zzhV3o<4Z0YkBL!_>L=m)}6KnpPwH7YXZlb zYqQA!Y2W+v4d0cH3lM`^p5Qr3ZP{l74;f1#?`X9TYaP&RtAS=?6&^_LmFHNz6&@Dz zlw9sqWMt$X;Rp=2I5nu*zcUn-HhrxPsaY*0ycO}fOYK@c*$VtHc1S%pqXZ^Dlv3uS zLk%}aJ4&P!>31_8w6l>0Q(&XT#>L@N0r)C0NAE2jaP*#UMutnbni;y#e*17LV1qIZXeB-Z1ZZ-d$6Hp;?pRvF%XpTre@~!pB;tPbdamb3v{g; z?Rfr`Org(m@+QbkA1e5WlPg9|3x z9nc6TrtzDPCaM8n(N8qoi80DJjp8rS_{J^JK+TSdluKB&67NQUvofB_4YC=1h_xN$ z9es*;r)i0dh|t>FN+TiRCVuW|?Om(uu(l}X2@;W5E&3ckXi31=Mq5C_6GTYeM|S$GIA=!@So_Y;WT1Bn&yphY66Z zZ!ab|`r>n)yM(GXj>wjnpi4#JIXELsBv5b_vta4@k{7waqm>R{t<_;ZTEj9L)OxG` z!ZIQZidvO<1P((0YOz6}(@Z@tVTvu_a!0=UUY?L+m^PK!nJzYsadd7!8vK0E>ga33 zek!!%*mMvSH*Y7Htt;`0>%Da|8=xfwR#*_VuEZa(N#O5`%g+4UIm(;u4XZ+w(JMup zkTZUfkdZxWHriwIRoxak#@zVPN@JKZ7$d3v@oPBn})vCSFqOM!h| zSMLs=RNDwy$W4OXJOlJke)!{9e+eW)s0~MX8!_AOP<@$703r5x+#V`LlmXAO@+Z;c z80GN&SHhZ^@^lp#%uH9EE&+j+@ZjCSD(IU-rH; zOA~Zq|9qT3ZOLuE-d>mE{*f(RPX1t}`R-MtqdR5Pp}vAv&P1{P&P{eIYz?sP)q6IB z)?}!(xMC$KGqSI*x0CnvgCuz1hnPR_$5^1Uf5$Bco|d=~jVC)Gq}>U)Xf6D>uXKo^o+)(!AwxJ4vDkDe2{y82v3tytRl%Y6Y;vJgGtnljBThW1tk)LwvPbg_pPpQg2uIKjf zWIli^i+j;`LzUT}pQ4k1gBo68OgottYEuxT#DkhZi;Ox;z;hz{hB4?eLpfPbBUxWL zHaTw7Sw6^_N~Q#yHjCV{N>KAd8gz?<@b~ZEZ69rItwA&Jz-aD|uV0xfb~dc8_LnDx z?sl>nbR9}xYe+UZt@8sGrD0@btP%zA+08p~#L9-Pf(2?N=7>f6uv$y#AB8JDkSVDq z7Hd0dbn+A#Bla{wxxyaf1UHyg|6ysZ6b_gWlXEeJ%KLOBv_!tufuEZVsf3TC7PpG! zywgv}Cp=k$g!S8m8xZregnc1Ov9abI>nR^|F~X&y?_#Bkpn&{_BvMLB3c-3hFGlt- zKk9WHaS06qIyYwcA=Mq_WV_FqXh$brvE~;tEl<9b0ki%tuM=L3yd&qxl!4wJe_p=x zM+$OorF5yWk003$?8!6-*IMoHGZz>03+yEfbb0@{My1t9*m!gai!}K!*Oxy0k_sLg zy$F8!GKOj94i-goFyR6ZZJ+5CL{1g(D1d8NSh)g!@#?edj-|K~_$N{_q;XZPGJ+As z?{6&vI(>Qgdmi3=s?OrKf@HE8^4h|62~qx0JH|h-H8|3_pYb~lBybYZA}2{aXO&ms zfsvaVW=2Sct}I4*V+vz*NT$29k9%I757DJ}f3&BDl>r264t)QQuM`+z`j5}qq8?vF z?K_L6R=~NQg1WDNvF70Or)#aSTkmD4fgip+-$hAn6^ifjX5meO(V49`fh{aoEx?3n z0%y7W=0~_KC!mGT3p~gMUy~=3L=KNE7bHCN@G(*kqX;p7)RFBwKR+Lorw*)S_U{tK z!r|z-?JjSaF@w^K+mGxF!jmuF7-`u&Zhg6+A{{g|>1ZWGzSenAglHJ1R|uv0iUxka zekbz5?qHF$^HdrJ1IAU}xzF|A^nTDI8`ft2{pXB{sUehZB7BM5ux<&C(0jle7xN!y zvpQF{ed~HVSTE-PeviwQM0ep9vxUtiWWY=zdpS17_OIyl?oQN8{c**d;Yi6VK7L32 z3essCE^=DtZ^`bPV;m1%JZh2^A#IU0hv-<28-fWf5m*=fPOK!VwX$8Xwx0gdWZkPR` ztvA8<66*H$U)U!4-*dgyib^3IW?M>xM|v5c+EdaiDl%I-cf7(Qq}JVk`8QUQ)9$9N zt-bLe=6}8YZ`pJ{n;BtkZSC^+1V^|zm< zNr%Nm=2XsugB2zz4lpQkdxdlHooRnAxZts)yl<6>?9&)`WJY$*Oa-u~@Z?}icQvI@ z^o-%0>hfZy%ZG>7S_>?!l~%X3^8WvtGQP`;pa&2(rD!G30%*f$_ah(~Ss@rysEVEY z89)GA+CKacK$;}(sy-Nx1&0Q7x!|NDcr2#U4;{OgvHyibdHDn?S}*0#W9u`#FNDZP%CYHHX>DDGC^=6qjAu98EFbkS(!)t&N3>JO>=L5jfUAX3uSkV zbmAl1RK7%T&;h)ywH|xoB-7gXy__yc6Uv+Ku~UmF)+A?PK%_2y1-JfW%S0fpuC6|5 zpbQ?sT7H*wvE}vgyB4nKzlF4wCJQu@R6il&X*W)==ntQL6wh{E5RD=lDPkBB8qTM- zI(9V?2+G@|(^DnicYwRh_xQWH_T$k4tcGem!e}#+_OHlyXePmeIt&TEj%l{=DaY_( zOQ{cf0sxnEk@J8SF6HUvmB4!Eb656}^TTysJS!w{*THnrI1b>90Bl^x>vs4j)6q94 zbYoOzbcd%%tWW9I#%;8vP=#j1G)0l;(Tf|}h3Rn?MLeq8v!@*2Y(IbhUBBYt_XBYp zz3@N81yPYD@}pwrPnS^=pi#H&tOc|9s1J+&$QXi`MVrvnie67B*rAkB0A}@SI3!$fscfNE2Eg3r zmnmRY3KB{Tf>i3rDnN>AzL1+V5I3{o%(bHgL^{ zh1OZ=Ji>->I{t4*pG9w*4-MrU%Bj*&S$x4aqJ}(};rI7`#sTBQit-D3ra`akk$}+) zK?>|fim2;${>R-5oVO4~!9TSvBUlDc0LcV^VtQ}4Q5xyn#z>H{%U;V~-Xk7)`ALx! zD;mO~oN$Eyv}LFySaRSzP)4T zG_3VGh~QiC-)1tV;s~R)OEw_!Cgz0t5cTwYhi{E$Ly$RRiGX`09?q=K$F;Zf>(}W< zqayu=K>C~507T9x@67$PS(~AHRk|GcQp4!S`Y%*Q?T!ev$bfJP7^`$k^pFPE_w7WM zPCwr}Gz`zaiqa}@`27{45F7hRMXoeRkk;}cGJ*VqRjE0<&;$l zii=1!f43UeHLjmE9`1druEuLQ{65g_`zIwA7()Dsr@D2b-#>tacKLGt7%UiYq%rA; zgB)%~f|@}{==P+dJaj)Cc(ZdA{G{FDgAUp|SkZu)z>_g=!(YLUBKUiI%-_P~;BDQ` zRT${MO|0G%K@gMbM6S!PE3)dCSXdJH%B!mdPx4x>78?SclEXQ1%{7%~NwWXpJ_wz^ zR+L_Zvjp7Ke`yT3`_1**XEcd&6acluoQyht9;LhjrgKpyns;uik@J-_0a2e_sb&!Y z#(|2wLaJM3k%vr!keo>~GBD6_i71#kuGw#7;zDW7rKG2JHK0HYD175t#&;Zy;M+vYITFbcTk?(7| z+BTOuswM%;|I-2t3yVi{-$&7%_R4UiNuK&2XB z*dqSrKbMWzi8p<04iV=OhXtcKd@euJC=2^{Ttg1;TIB+iV7E`lu<`HHZ6)2UP+G;2 zrt$(V&F9)~dbHxusL_+56pJalcP#^EODen-uj6Jh%UW!vM~a8zT&KL22ORlrtkzE} z8%AA%{y8{qb`cgcVY_}-(9q#4^3I5F%*oBY8m_6AHZuCvh?Bb*4`^jI2ngR$4@Ae} z4U?up{&a}{J#ATo|NArXW=O8W0VSj((nCBHNhUPxP{?t!iK8x;nS~`{+%g8gVEv*S ziAPO@{d%>oBS37Y)iIbo4vP2|k+GU>Vr*h!h`8Z1Sx3;HT!UGILf?D4?&n_)|KZj? zkx9GU=Vt#y(pg4D^}TJF8akzh21Qyr1_bE_rMtVkySqWUo1q1yyOfabZjhGl_x#@f zT6|(LoHH|L@BPGmUytFBLd(<7{nmLB-MQf;d=%RbBk@0(W^|J&-7$H|mrz5m;*uo(V#S6(AjLIOPM zm{$rZ4Pea^GU>CG44gd|C#gIeS6CPOAwcR7fP66*vUyr%6n{wvLj@pIVq;voK7$tb$!aK?;|s=u@!-gJ z+D0;!i_P)XWCMA+VNHK zKjx?O;ZiEh;dJYsuaL}ZPrt9)bCqp}*~PkxF-r_Gg%RGYKTY32VC;jr|0SmuU;|Z?Ukn>^-jS3Il3-C`eeOQW;!aC!#0X z*?L+^g_?p?G8(SYgaw!PQ;jL8;)<;Pw=U&SR8`PwU}6TD9IMdcg8lO0GqC^B$hyW5vPbt^3A{hOBRMd!(A2JXC^i#Yc`#+6}IzW;>!|O5=lK$!X zla6UA4me5-IEfv%%)bPSM-v*q27!Q$j#P~VDCJ?*cHi>7z6{_Zhl+e)b^S^?DjXtm zl;!-RYO}F|3mcsuEaFhrid2xmZ?*dlg9v;?)Vz!mN=aSCW%FcWFEM-ofd|YNA9DIx zv|8>)CUhf#y})O2ixY?w0Dai-(|Kn&mwiWMX(O7(pK%s{@o8qcueH07?k=T zD~=q;(^N2g^d}u3(ui_^0A;uY-;E+{v3`4 z&CCedJy-(bH-zs>AtLN?T=6vx0^rErcajiVe{svjhp+ULYsW&)*5(p-!pQK!z5G@M zXi9Vl;;QuyTb>Z}E#Ck9R5GI@32A9a(Sz^EA@t&u;E>@{3441ME$x-bSr5{io6mK= zE8LtsT*9nic4k?E8_gR@RnwAv`&BzaKoiP7;%d(wmCkM{Lal<}@E|GK9)Y}GF^DvYHIw6`JTO>wJ1ZC zi}bXb4pl`Jq}soPUqnMV`a{$yyNqLAN(nm&PpGuA2@=OUZ-l-+S1^&dApwCjuPhm&BUP;vt zOr1_u%x?@8A^N0{AM?XJKO`|>?>hqWQvXk;Z>Q~l9$p+jivOrF`JMV{X@2F~m%$%C z!$cFF8wa|0n-C#2m9g9}C|Sf^GDj(BA7$O$e)byG7v|>!wSBJaW3xu-s}nbm_nd4) zV7P+K=`Z$RI|XG3xJUtyDC#{3#VLus=T9~_l?@F7HHbVfh0`zxTAoSJyvMr;%Vrx|I!=|yk6%mvn_&^@mRUmQSEU#!73tCzO@iY~UI`++y zQ9uu%Ed$LpE##iPzkqW2DEUA|k#j7A_hXQ{#Pa(U++XbdqAkM{S2LcHa6PoRyPzf} z)uMD>U-q9qtshM1vavCfsC$oW5KFv%iRtMDuCwdnQ6U$eT^jV1D;8~HArv;XQf^>J zMNCK-#)!R?HAfLi!#Yclt~ka{RLU4Z!*%w96hNJOukqo@x2eo!>}*cVUqjVbbxil;6wu(ZCeV_?l;BQ0+rb9= zW*F9M7;R(gA&7IlUUlaGoClLk~*44WpB!zg## z4hQ5sKI%%xu+`*Ycumm-681&24~P#EWg^^3p%*{4v->Kt5$|$X$nmCLTkzpIn~@Tz zehgL&%Xva9y;Y7d_@*{pu%t;9F>{)hgPJnjR~3GCh@S<419B*rXReS}FDv_@@=C}tK3c#+pdmXh%z zbe^0&jxH8$kEPxeHF^h{wS7U1D#VOTDLQH5b`{cL^|p#Rm}0@9Tiz6qwYEN1#L#XQ zyRnaM{4IkgPE?mwNgP<5KyH#7wZuVFRM^2O{*RM35pB$h%XsUgLeT&OV@uX`9i*Bd zmG)Hs6;D-MDCu!-F+~YcQrktYpsGCG_ui0>o_UN*{&ZN2f$3L&5O2LfNjX@hF3FTA zBq9AaNMxYGlI$=t6|a*^xj3}~O3W}KqoAr87FNA6FpD)I4TqbcB$vfct`^Np^SE-H zJUO)|o%PcY?-=*n({wswecGXubn4TfZL7dLCK^Fhv?|334PDi^Dg2Vfe|t3{AZq7# zXll;pU#BX=Z#867N(A-m7J}sk@x~otWfk$5#W*pk9Xvn6c_X0sNBB>%IzLidK*K)i zpAQMOWQNadGcf$>-;JE>{Sr83jGutV$&1Mqi98qbS2jk2?pW|aRN5YG!kNECuS1Cw zEVZ-%X(cGrLn)@*SPu5vtamu$B(-rXT$EG=$A*CbmH# z2&8s|Wo)cN(vpIkG>Qq_;T_BI1jB^Yx}OCvko?L?!QofU9S^ch9u`$o=`SsXl^%R8 z{VFgkH1k+ISeJj4_P2Kr%Y)5pOal*}KqQH}@$*WNfELObAu>%txVFA5y4R6WdeI3p9I?f)(Lpnv)W1uOkL=uR50Tgj85XR=>L}u^Ud$XT zIh6y7u#YVS;stk$bRq$%+&r8k3M$xHX73pphT@oq@BcDDR)F%kK5k20v%Lv2RRC1! zg1@(=UX_mJQ&1t)3#T7)Rc-Z1J`P^Pk@SqJ$3cXz>$^*GWnVWlLutXqaLdByD3b@A z)d{Ac@#<3x>+_&e%S}&AQc2cqk9mbEEonvAA@z8~VH|~#!X#~PndBlgGF|cupMNMD z1pyPU=sAUSbd_|X&d#_VxssIFLWWxf@E0#w>N_i=>R`lFnxZ}NOi%s^5+^4FA?s`0e0swy7gev!#7r$dOrhAoknuVp2L)8?+x17;e;PjXYcy64rm>TP z#LYPW>x0d0D>6LLqqQ1J8Q^*Y8&87!)2B?q+NzjwJDWETQzs6ED|;YQ^gisVZ6ad@ zdJt^jAoY8}jkxJ;k=-SxO^+{Qx_^;m&yfdUJf>$~XvhM@k*BudQ|S;z0=O3K3=svq zQn6@cY0)^SVgX%!0n`Z(${-0+_3ea(Geu9T>Y+&3yhEatW{3L$pu_T^TC9}uWIs|g zZ9?j%B6BT#uJ!9hCH)U2;gCtOk~Kr-Y$Wt@F&^9)tJIt!C3FZhzdd%4Vfos84iB$` zcJ3YeLBuLoNM4v&Sx#U-r_&~wfbH#f3Nlp4aD1OXQ;o8xGV2nH|LeY5Z}+%Pl)M-g zT+bnJma82WD+)MBZ3GX+bte z1kyH@D| z!#T@;V)F2{@q?8Wg9=5om0-wu2@C1+i(6m-_uCefbchkA7d?fv7FyD&W z+=H-t-v1r&(aPiuE6p4frX>U5@y8>?|Nl1L1r(F$7G zX-)y`vLcmdC^P~G**8v(1eJwYOmqsAlo)zZ@(kN7i&>GUB^8DAX@ z&@V>4CW*hnD8k5X|B9TLUTa4mkNfJ<{^E-Xo#Vt%%fcXKq1?Q>-GZ5N=#!@1^fu2W zQd=<9#Dt##YCHXu^Ru(r7rZ`SC{Emf-mS=YmE8lv-UGK@lb^->+_xQYfU{WmaPW7& zVp#B}91AxWstG$`Ra2Y+$X;aF9v>W~bwJhO&4MOD?cFaMl7_*Pujm>nHLwovs1N|Z z5}*&)ykaMY=L8|swe1h$3K;2O2dT=lU5mw8K0hKbLH5P? z9afs^BpP*Bx?YaU&(&2_#9nl?w4wl-6(r}9QIwO1he}XTaKt2w-5NGZ;(O5#FF3!e z#Ze1u%Ra)&je+pmFGzZLxIt;aH7>PTjAzU#jOMUvhj;rAdK?{CxN9C@js<;40t2uL zE#TNFA5`TEcxHb;3*sOXGJRiDoR-WB=xhRlHs) z4C%xV<9d3KK>u#dUBpwYM+o{~z8=8TpxMIZO{6J}AIJr#m#Xg|d&PMr>)0P@8&rT`o zs{JPQg*Tgic;8cnqawlqQe+%zmDU7(eR~)N8LRzr2dHllSH+%qlHX2}3=HMnB23E$_rQmwh^go$w zuUCA?lO)gPXF`1pcXcGboa)6JQvu?6|EL^&?tlYvfVD`DDgFsmYU9sK&``~XI6?^Qm0YvWo|>z9_MPy+87pu^eK9GCM<7> z_}`+$uct^M5*SwZ`4gg)=}(E#1yxl{X)o(Dt4er40S6FH2T%*A;)~t4U}0D})R*=l z{J>8CrIapN1y_|ytq{k=D-AfETLnd54KL6(Fg^-_FGyrdUx+S6% zpSY{i|pt#vdBbTmX#7~FG6t#1VA3}??fDWL4-y}(>uN3!GTCzHqU>% z7kBE_d}zlDwZSt>aX{_U==u|_K7?h`al>3WJo$8XZ`jnFpQs!JOv03#pRd<@>-Uk7`W%550aP)c z`)%dLc3(ydjnDIiLtW zV8+EdVix|zxqS^lh_?2A%^wLHVqO}8?{p02pC~GCBu%kWFrmlHCl#An*S&b^NN>HM zW8yp0uG=H*$<+^d?lVHB!A+MiZY=XyIHp(SN~T15W(A?~u@7BLiuw;p)8?RSrcO;y zzXNGVn8_FWT=XK*F)>AK*3!jk+}f5$B_;hV=#EFvB~;K;jr|9;lFMnQL&oN(E4grS z?KyVrJw6NDI2I3Y0OfT(>BrP$gg_l}uj|dxRc3?=LhrSA2?>fiJ^lOO`L3&epM|e> z9qG2UwHbXO(bc?>*ENQJ2d9>R=@%8aM$#%2_AZSpMTj}q0$M03NgH8AS6wb*DgJ${ z32dxDU7x49Kg5jb;%zF&C+Q(dNv%_?EouvA#HclYEt zB~CbGK_Ym6`RT$_;DzFw%*|{OrMD$uYUg61C}J2cc2R&q$b{~~SR~wuOhl2B4%_-Y zy||;x2hiR9-fndkM%J_)SU<2i6V4Gou~}U`7Q@=zrwZ8|+IZn&u*_gXOsgcRRLcpS zt5h#y+eSD!J40Pq9hsOI4upTYoY%JBF3qy*+x?A86*XmUd|B=!71)z+9~W{yme{zj zwO{Ohba=?B<$X6GsDy3;tc9wBT`eYpewXNM^q3fO9+WbDKi{FiD}VdWkcC{&0f(#h zH9NpB0>FksrrVII4i6{WWwooh)5q)A0o}{xCK`q5TKf4~7ydc_OY`bB3AG{~C?_YU z-YkzYh8C{d9k4n?a#bORJbA}NV&lvLA=Al?mvh6^H%r^)EYIpx>}-#r*7VlRL-2So zc2H0d91tK$^+F4%C>a6Bt)gNgl?MHfn5&u&UPi8rScg*G*{ z90-h}JzAXpUtsucuJ-^=pB}})@lD^(<*v%HAL?9V)gg6p1w+mKLym^qUj_u9E-Nwc)nz?#%^o1CFb%k*~}{C3CZYkX%ht zR^0Q!R>BNsc#s8Kl3rXr?My3;nm){DT-wU0fiT}@tYs{iwj`a;95znd4>9xhR5*Ml zFkk~-58I2A1j9>fC796H6R269nVmJM${1{a=vZ?0tFc_xmFNb{(i~MJdk$h{uNu|! z*EV^{y}OpeK4s1KEjeNT*uaue1yDx~gJD61Mk#74 zhH~T7F*W2^(c?hYWcYJ22t&Bx+ZWFlPn(qsX}hT#MWAui*4|#^(2GnPu$W$neEL^N z*I3lG@Ci&~^(nIfnRu5L77VARtxYmAGGe6Co4bAY)a$4tVP%bhq=?T;1EQNTgAN+O zQz0muvkbII&j1^M7bSzK5I6=IA{;Ph6+D+qcxz$CsKGW=XS((lqnz$ET?zDN;MJ&5 zE6hvf+})q65aV|K9G>cham?;oh&1$X5$S5S#;|dJ$<-o6ty9MP$UH^G6xl!FneOp`6aX^Hj$JjDz%heAdO!V+!q2c(n^?X`|n>*1r52ieD#w z-!g*8&ux>x8%s+*_Nnnp#Kb`~npz?RXS?a$7LeKtdbp2g>RHndf~qi@vJ@8<>%wKhUZu@crcp(QMe&_RQ z-c!!;%;Pq9y|DI50Uoe))W&)E%#qv^A0EP1M)5&)GYeBX4^}4hyOPWbq7k-?j7;IO zaIA}TBUp|Ajk)EBIf55SqI>wh^MJfRm*u!uq{PoD$)RI;Wd%uWlu-O~QQoyFuN0$B zgx&r&JDgMg=~4(if)W{^!x2qI7Zw)6ndf-h)q-#Vx&tBJ`=%cO{ohoY?lScpxj?l@ zI$}k0^8`qGUR$31VylVFKwc3DE2Bgka(~>+S&FBdI}6q4wxZC9lt%* z4@omhc%ZLk4f)NbsJ5>Z9AEmcrp5p z-(he0=2#VGQKS63eeFLku~%om6V7*am0jcwJ$ZDYRD8N9KB>D8A4X89 zrQZP$4^VY@a&>qo!ad8yt;6AVqH%~C?IF_9B#&Hgzu1xJtJUqd3ByQDa?HcnK^YS# zF84aU9QIg#B2^d*%ng8(4vzLs&=Wu)QN-mAW(4h^E2O$-LYBf-EN^Vp_9;-cvvS&;kYZnTMgFCSrx z$?EpUv%;4j+X!+G>{0bY5`42r{^n^w-1QoXcso^aA2e?%1NQRqR^eZ(~>8t zVepxIVd2I0b+r|cqz!MvsM*?n+MmdX2HvVSByI&0rtmjAJ`DH#oJ3Y)6G@%L`g_PS z>7BIGBSBuNrG>S%D6ld5e9`Z2tWmJsh`B)QJzM6DB+yg~>LlkV^0m-(OSldn#h5Dv zo)v&fX$^Vf5ilO?RO>Eryal#-#T9JX+=)C6Lf2Rf|4rm0xPYaGG`S2OdvflDxtoVv zUxp{J(QO)&Ca*kRNCgSEeFRB)eSsT1G8MOe4^s0)&O8PnUmo5Owzg&jzRv`i(&Oa?QLa+f!bc+0}f_px|`UZBF!uIyhr^X{!&#u4+ z|3`z>y^Bgid^?0PYOauuz<#YQcW!?E_PFb1`4b6G$2j-gf~}y_ddC3ImI25m&9bZ# z*9L|ly^zkr-X7RVLRL<{@&#q=Ko)Jd0kEO)2Hg*);YMzXio&dCCAR?aR2VgYX3r@f zt=-B&&I9~sfZE`Agj8p6)J*x{3qRm{c&TG+YrFOMIkJaw*15jA8pE#Hdvc7E*ZdtK zmppzFwRx;svh?IEyfOUNdsv`SA?@1@IB5T$7l@=q!2y#5S$#7m7~2Q-E`2y5Utix5 zjYP#BnXR6Y^WVas9_Ei)_Fl_?XrGsImxBr+v$Pj@$|E1eHdS?W(1$c2z0Rx*GUZUd zLU|Dp7hH#Y?nMO-6j5I9g8XqH$PtCUM^FO7BnJp7VAxfB`G91^@&NZ*c1%q2XDZ+H z+5`vD-0+?C^?*Vrmxm)DE;!JiydNa)xL9bXLRU~?p+bN{|0g?+DEyD34?xCq5Bh6t zq6gumlllgS#1zF86{i9$g@Hd)p2I;=YeS(k-v~Ap>VF@S-`-B**DYvO9m@HZFn@6M zKq)<)|J{5W8L4|WoMjWChK4`XPTqutLW9E21Y^qUkZprSILLsB8&(Y_~-MHczn+`k3J_8Z-~jG+VCIUg&9 zwUXe1f1CkjEx@9>GYKMy05PM)O|d29p$d)Fa)=+W#V`$} zeUrd6z!AC6{mhm4Ssw@qV;P297oJH+Ct49I^*s@@<~s!GdcKbpMk6q{uaWQfJ{mE$ zw*jSbP2Q?rnVCl6QP4?k3n0JQZKKK`p;0tJ0ZLqhDCXJWC^O~&zok41T^L)~OC{F` z-H4n2%D_@X1s^n60=fDJ(0YRyFMCJ78Nd8(?g@x#=8H#QZdgKZU&BoP zo$}5h^w<0rQ&geQOqc}fJ3tujC19yd0fqJ*T-|&o`7oY^6ZPKs7H8OS>F6NeX%(S7 zs_?ESrp^WXs$+~liIYio;`gr7qzv60Wdx1S)9R?h5D;$r*9wHoQ*>jQU%?_5E>1ig z>qqK{Ae(50QA=-nl6!grfg02Ayu*IHIxxJ6W}kxeC}6WLr0W54#B=M=$MewblSd4- z7$O)cs8$VX^jz4(aBfWUpJ!NvDFmpc0tSDa%1jIs3?tJH%mU$`IKHFMQ)6prrH_+2~EqMFg3y1XzrK)H3Y9i4azy zl*o>O8Q5^mD*`7yQA((qscb4EkSsa4mi&b_mMfg2j!2inV$n*Y7^CZ!NC(ozEcVkL zaBwNKJeMC|GS`EcfLS{(V1fj(q{SZzvG2ymEr}-xCR+0YAqy2LLST5|^_q$THOd@r z*>(_#ABGghrQ5tAP94ENL?_)hDZXwV@LDBKd za`Fnr`O@63j{UDqcd@k^lV|mXj=b~A%wz4SvK61tQb$#Z{CFV~fY+KtQxU z2T$qz)_#y1dDZWDcZ|_i(>OkbiK1<|L14`21CZ!bt6Of{o08CTSGh>Z+N+Hqv!Re{RfsaKi{9@A|v#XN&Wlx&wx9N z3hCQ^flhFw5&PDvkoxk`WQ*#()rE2 zTR(~+x$GcWyH|`cHIlB?4$S}7w(U3))q}6%=HMk9`P15j`RqNY+(Tt(I_)!TjAaJ@NxubgSE6!;NHzKs`)(L^(3NT?nve~CEh~_Vc z<_h?{a?YGpPmq%_Hf<(4I_AOpvh2vvQ8l4dxR-_E?Q?a9j>W5nM0kymp4FKz6Jpy{FP8=1!ZIXD(d1PNDo_bNX$O9!tsP*D7mOHk z%@dlLrg$#@^)I1nkrhm4Gd-%F;SEw3K?aVE{v}KS;V3phQa?366?l8=ww9F&Gx@%@ z{ch7wZqq6ujqu3ExV$PipwhmZB8n;8xBz*0D^R;8V982!>9AhI?{*L@=t_g9iCVh_ zyH3v*SDD-R^$+z}2yfA^x&y-P&uMySaR3+UE`O60w7#5VtqXI`vj;PqUIq4Y30(#-9=vQnvh;afuY7@H)=Ui_&;VxyP@QX*e^D+k)93DpA2br_(5REDJvpxJ43i}H10!Lc z#7D|dLz|@h_~OyIMbs4V}uyG(+CJ4~bSC?uzK>E5#V- zB~z3a~ArURkrAlhsTGCh{QS*LmauH7WU-i#NNMkNe{}!9{TzF$?Y2$wTPG{Vhso^?`e+<2v^4+rF7 z%L{&3`a9iqr>EYI&p6@zb_DfLMd8~)1jgvZ&A&30Ppjukv&fQ0zES=?4oP-<1~y6BPZq>fzD&b&6Nr2zAHCMgtvr z`Jp@>#Z!jvjScf26xRax=N)%N~rrM44!qN--}OAouI(a5p`X_})EBa%jN zaTBl>I0(stGos@HH;#L0?K>kT)?ZmX6hnaVSmS1*K%xQ&!KZN0<%)$bu*JzsibfPP zH3KsN)+{O2xjTC??Hk>qiRX4*<9y4 zs+z+>Cx5>Q;6ID+bx$nYrIaYnvSKm#}a@HB#kn|6Eab!f5l~`PNFXP*t%o_ zU|G4#w)oxyc%s~=yKcvY^Rp9Y$&5)4md{3rc5VA|V>$c;p=umriAW_()iftvscv#l z^EY9o1$3DgwRb$G^8Dn+=#}~}JnJ536p9m-bXumsjIIx5h8Q2VK(J;FAf(%qfy_I- z;nImWB=aj_kJYt=pzko-maxCQY5=}!AsrBjwVbbeUz9zaiHn;KK#|2H^e6T(~)OAc3&r{}~_!{^T{kw(yCMNm+owb=X_ z`(qAm41Ir;>DA?WCSUMF3eJV6VUxLdEI;;PvG=#Hao88U7}gQdlu@i%pB5W$jeRbb z{V&Gny<8fb-eZs55sy*I$j}3ouZv0j*QksJvyU<7G9Rr3a#R(8e)MB;Ysft`{>vjW z&)dmRiqV3I*hA=s0A{m}BO*=AC-{lWVDh*5J&b&--}qtHMt^XIHwoPZgKR43J70I2 zFURy>!K%v=GqahcG{arlVO$|0JDwU$YCdxs(eC;WJU{1bg+>EAx9~`cVqS#3x#L6u zAXu0GdMo50+ZjMlxc4oHuE-1_kXk6n&t2mnkh`^$b$9My@;{w$NXLIF!P_ed^|*Hz z?3;d2RxoH5QRE6RH0X(fRci-r^4{yLr{6Jss@uB%V7|R`vtsnx2#}2|pm4bpai1*f z_Xks!s6%%9h{J+_)Fs8r<9zPm^0&9W!32+f-?5`PY5qId1;Fi%)I1E}cu2qY6T zAL`|!Rz;OOXl3xuEZ1cMR*gS_J!9+6>*CS*+h#Nug~5Tkc3}Yh%GDd7HP*K8ilkRA z+Xg^z!5b7(pYPEd)5mEd3L}pFy=;{X#OViDQwWm(-<; zMZm?cjoW%W&V9SM^Y;&c|8%PHl(%8@XV&q8ks+!y7fr?z@9t)L<=El2IXs+^4mRRUkrIpDjlPYNZVh3&X}R=&S};i>xoSBwakFS080tzP@n zuLf;Ogvu{a264$|TCLCTix++Xpr)%%Vs#>>{#3IXf+@B8QL=5K?~cKzlQ!ZWiDNO^Lo_6f4Tz)k_*G5W6ff6m$W zjlTJSvleQkKTC@Ij>|*M|l+8cjTuHXTiIHf`@emsPwZ zh5S8wiqfjzj_3Ux*0*qKMm~iS;n&x5taq=kC-xL5BjB%^+*lw_?iT*GWYgX%X^*1< z5*Qf_Zxr?+hgYKEEn0VeLx?#*o8b2L8@dAu6Fl2>OdmjXQmYi~Od;4dkI%-d4-Jgd zh)Ps5l|6QYY|*7V>Ebi2=NBmH9MyIM@urfA0PPRomG@E_^jiA2>;aV;F(MDWC8 z7=DLO;7Tpt%`*LS?DaWF(`L2aN*dE9DYQuFpJ+*h(@>Q3W)MyargC3%G3^duxMs#3 zO5Fig@a8~~nc>p*Q8}?PB28uQ^1&nXmG?7BK0ntz)+I-puRVPY-@G;FIi*-y-V@rpfMP4XogJ@CWDF3Q^nh~ zd!{XyD1Z{n6~bT#1+c)BPF?8Hgivm#DWsUsV3FGAFVKvWQbEqIjW3=_@`j(!Uext_ zPQ)cmvxm21&?u6CyJu?GdrCN2d*Grg+Eh9ut{!G#OZkqsIbdwIlTvc!@OXKpsF9Au zOBu>V*1f`Gab8tHJm2L+NW=2N=5}r$3tP3kEDxl?zNm0bTGf`)liaApTu^;e_6&z} zfx7`vr7@xWFw(bIrGq#`3mp`y3vaBPhSywjiTS(gF>rsZdbT052FVxQ9Q@+;rA)$h zTNEcl3?cy_JN@qoUslCD^fY5k{WJWrGSzT;fGt5+k1QW~^X`%3@k^H-%d6Xy>HYkm z`%uTsnQlAazeWa) zU8M|NgWvnzzfWe=*23SaD-cSUBuLa$@uS|0;&DV{kRIhM6S0x1{fwZDx<4FG^8!_T zyei#1QBK{2QZR*UZuavhcaP?%+Djrk{6j&*;AtP-`|DgsZH&&HRjse3HZfqXh}U~H zgfv)d-^q36W8<^OJc)iQj0Ykge%ZvVT6=b#Juv+UvRJ#1*GLvz*HI*%H847mNiB|n zPgR6rG|u=oPQ=s`l_@S^spg<@cE-$kx?x>zIG(K)8{O4_$D`wfN(T&>!Bt)r>Fn-62B~p0qSui1S`7v6 z-=>=v{mmq8{U;3Nbj3 ztHt1vTq+kq$X*k@8U(z3@hlS7oB#>PM-gh&u(Zvce@JiEK(udAp__YPz#iGTe;Wp6 z0pm=7xMkI-k@R0XP49n;mxaKrRSBC5vL1Y>s1{^c%wK?TM1L~i#%*G*4YhUhl3;gvg=4VkoE&1 znQxSNhlo#*D>Dg1uBx#v{t2{usAEHKF>ChQS_!?S6=hFgDrO8b%O4N*_=An-Rfl?c zupsO!XEUT(t2%s$(&)LciAj07w=A<16l@vG<6kx!+{SWeG(T&Fj6$4fR;yM^F%K;* z=$yfIM??6LKO{JD$u=5+&8;MLm%EOQXyVZd7+ReS@?f{VI?S%pBVQ8~_JG}Z`7Xs@ zFm;@dH@2{TLF_KK?l1D1iGL0eqt|PVd)y_5rIU&dflDd;Ji13ohbd5ZvU#Hivl}&w zkQRP~ly0B-ewEAPN6czI>kX+l1*>M~y-?!o96_N4zn4PQ%y}Sz@URBa$~$^R8RyE< z=mIS!CSq;4B5(qPEv05|N^oDkt;&CsgWC)(xCLZ-TY!f1*7dH)ZY8(8`^D#K;c{CY zQT&#x+4z{}$yg|zEo>)Fo6lmR*O96V>R!hVolxcQ6sFP3Tt&>rj38>{W=Z8)i2JZ(Sn>S zTz)~EfwkD4I*JajS=;`KsPi^sjlqc(hBa^3iz4}FGSVkbw4nh_zFKz^lJBI#HX?J? zYhHMb1zSEeIdv-D(181+D&#%R})Uxt=#W}>MJUCI(^hvdzk_YCXXa3NIf z#0yfLK7UUv?o+RT5-f=6wz+7oV!Tue3WP6^cM+9{d$O%605>rH=j-I=#!|Hz9^tZL zx_R*LfncmKSJt8J5F6sFuT-?%67-xhRKFQ~U{Sv0p{Gq?X{y17(S3@?aKS)-2-u`UKREg-nLNd9BXU`G`F_KNOEM}?q)iIi-X>)?P47?gu0f5p!e-a#=F`(}1?&*_2(M;0 z!H}IO!Jq?_QhDNNSv9hF)9BK&PyCdh3{0B~_tPseK`pllk-&Je-oDTaOoh>VkB5NY z|5Pt`6D2A7aiR&pW?~Mc;X7UapVz&g3F}tI=xYY^hs3tE+)yTz6(_X=Y*##C>}xBA z+m=rk&ktc>ALmf$m4@c!MlijiKVJU6;6%xoFZyUagd*9!VGSVxdH%epQc1xxSc~qC zWi$POScQ5D_MiO@**uKs$5DKKe)D59F!)YKF?pz|beRjltd+kCU;y?`D^f~I4ZkJb zmff+EmVG3~O{`-5ey%Jr1)2ITM_N6%O#hp(r8&v_Nk4_IyrE>OX+K>UDEx(40Xf zEi=86gAEsWV89)DczA21yXMx>S&Dh1e;O)C3E0w7>V>nchQLv>qfS5d&3Kv0soM^? zG{T(w$_v7V4cMg~cOgMbjP_wm)P-#6GWA=dof!%F9wEr>&_pj&nmY*SCcmZ5Bj+IoJN8SIsY!l$0> zP-xN=A)!1?|A&>GXo2xOk0bxZu6EwP4bZ4U;RD>NP&cM8F9bF=t4r27SK+UZtOK0Z zbwNF}x+IKbFIb{_!{KkQbTuu{KZ@@uH5I8C8RqBCdzK-VGE`u8;y$^b(Pb&y3t0S# zm(%RLJ#ZEixQsKzsY^C&8hrsPB*>Dq;U<5!c8a>(_dtxSZYopxp{yF*OnaIzFzC3_ zV!|p{wkPh{tlz;4!4gK%Qw@(;BOtFpUJ&&PIGeg zidyMQTJDo%9dCILO1WaFmhKruU^LYpdAaLpXpPZF(`tLt*^C`uJrB%Gzy12vUr=Sl zSvr;*zjPH3=$XO`^2_lEDlvec!rutFgzr+@jj;^G;!=HrD_Sy}VS#@u@SqcKR8 zVb_b2@=o>`z2thpyPJr^Y%_V{OaB9a2J?NoHAQGC46H!CN+S*DjgFU?YeD%1jP&p` z1c)67h=>vuTOJBUI=wonUvFUus2m1!2b?7IJLtX!a!RXFI%HGPVHBfSe5x~a-eF$7 zU!lER+CNpm<2w3j3}jy38+i^VjsiO577b|G6RL_LT#F~C$~?APW@l8*6$r~F*%#VdGvo|B?N5lsq6Kp*Beg*m*0gi z4X%|5rGR%i--;pPxL8_tz2DC|_1HfuyUZihiXqQHwFq!RswrEja+BqyNa~U-@SY(s1UKScyc3tbZF3pPL51)7CthGd z@TyLM*l@DB_)oXI4jVi6+tu;bm*cr+FkV)Y3?#8iQ4MN2u~~LqAkJ8>f5``x$kftj zcfR9hMI))dBbi3aD~RItH{3q^`L2*Hw!359o~*|LrDfEWpSFX6zE4wAOR(K<3JlhI zdgM`2(TaL%VVRkiV0Go-k<3WC$|{M$LpC}hE>uz~1=UX@F3Lf~#cM_);j{_(A+Inq z5Q)dxUz0#n^pq%EoI~eI5*vi~JtnA1z-rD_j=QsY-SNT8TNq1#{l!eln# z_S=7hQ%*e%KA$gbV3itQMf+W#kPzdhs_EpOQsLBCN}&>Z+6o=!9zDi1qx89q=cQ#N zHHQ#R4{L~*GCvqbs-|J$w8LWnix7gB-&vP3U=aY8JS!Tv?!lukeS~LTS%Z#l4+y7M zm+UJsOesY{VIh9$tj-3^}OdT%Mk0ISW6woW%5 zdto&mdF})3IpFL!NQKOnS+i&3f(tLkiWMv1^?K6^R6wX-An6sX2mvHX>9c` z!gVYK4vhh@n??9#6~mduVw4)Kije_WghUtc<2VkoEGJz=ZJ=ay^?1Y2JGwnE<=f!z z_Qd6TEO}PE^S@f$``GLF;IoE4b5cFEDXZ0rl`Buj1s7e6v17*~5D38Q^`?0~QsJu@ z{Qmwshf?(fX0sI9&2YMW!Y_IK9% z114vZrGu}E*5rnV2s{UY=MuIg8|tEJYRnoTgpCZqVvI%YJdOaWlJSrZSS(hIn{XHo z?A;lof&Bijui)-m&IbTkttK>lxexa|{s#W_-!&kF10nt6rOXx!7A#(Zm1mxVGtN90 ziXuZ*WOQ{j4H&N%imyrrtkS_(5kg=xiNj74Q8fyS$ipWM+W^sE7i-p2!x||jA&Jqj zXclN#imG7gDc`{}f4etkqdouDT0C^eW%z94*Z9fpkE3?m?mlZ%Ax^5Ws2Fn>EXJzy zFT#l{PK6YZplRx%dg}w&MJW}NOqTo4SEU*zkum8bnA$BOya5%%)hbktgMl+1DLW{D zrD;rw3Y;d$cvu6V6N@wFEsi1H0iet6#bL)^iyiw~`UF%VkSZ!J!Ggt0aOqXoV!`4i z@cVtBl)@K>m*VY((0s0R9u`RhM>`P+=gr4e*WZH0OOA!#=LKU7UT^QEig`;u zG`~A793Rhf5JfTR=awX;-x|XqPv8*Pm4pKo`WPTg0JF$L(S~PugXH(Yz$G?fG%VUP zH7#4#v)OFHqGM0Qrq4d=yQan%IF3Vg%>?}ThFfszcUB<~@Pjc1pEvb->ZX!0P`e$V zn%YO;A`nFZf{;W$AOjRtg{t=7yfK-?q;n86u3d8isHGTSeFGdy87w9-qur7Q7O?zA zF8qv>c7p;~PABzORz{c%gg}+EBXLw!Rh+u&0&MtXb)U4U!onh~SouBN^y}Y1;00)! z)~_CbsuY0RdEiiw^FFF7EQKS1q$#+>;~gSiGs1&_fsi)jmyHE)>RZ%(zRiSgUm)#G zD6&BU0bk6zUWI<7ilcOh4D4>N(C*@Sruj|hWR+JIx2R{_X6Wvf51M**v8cqT)8-Q6(` z1xo42;IPuZQqnA%rs0h5UxwAMzi4O{3W9*8E6%{(5B@dW9&8|zUkM?|FROyLy(PN_ zXc$Z0sLFmQ#0)mGfPga0;;|Tm+tmp!Hq6F9VgXB4m8^0JkFA-6Ig5`*IP*Nnv^ARrGfv(V#m$7ooAfkb#$F%kG2VCBSEc*21nM(cwBP~;f8 zs(qvZRs<`_pbfFu6@kxTdrpjng2BQI!{&WBo{uR`z!*An8rDz(EG{=xIf(;UltDBN zpYk`BWe2n3*#_}mO62`iRna49hlqP62UtT(ND99osb0C~A22qoyvUlSK~v0K?cD#+YG%sZoO!qh=6#d@3`%zmc#w*NyY-q^`@1ziRvrN#7~_ zay?cBGkyoKBrj_GT>2cb*xRRYJ|_ESjA2;CVFi3XXqpOvi!80pT1PHm5dyEa9sD$(IHbP-Ff7lvAuW!kz4-t+G8&XQXI5*@>6Ys1 zF}neb5B)4l@}-Ve=M%Z*k&~QcpHx?mVbPknR(bGXizCy|H7KO}q)8J%DP0br0eZyj w-*RA;lbmF)Fow&Z^Hv8LV`pXqSOLKQ2YNj4@79%l_W%F@07*qoM6N<$f^r-x '9' { - return false - } - if i&1 == odd { - sum += t[c-'0'] - } else { - sum += int(c - '0') - } - } - return sum%10 == 0 -} diff --git a/vendor/github.com/brianvoe/gofakeit/person.go b/vendor/github.com/brianvoe/gofakeit/person.go deleted file mode 100644 index 5fd6cbe22a11..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/person.go +++ /dev/null @@ -1,45 +0,0 @@ -package gofakeit - -import "strconv" - -// SSN will generate a random Social Security Number -func SSN() string { - return strconv.Itoa(randIntRange(100000000, 999999999)) -} - -// Gender will generate a random gender string -func Gender() string { - if Bool() == true { - return "male" - } - - return "female" -} - -// PersonInfo is a struct of person information -type PersonInfo struct { - FirstName string - LastName string - Gender string - SSN string - Image string - Job *JobInfo - Address *AddressInfo - Contact *ContactInfo - CreditCard *CreditCardInfo -} - -// Person will generate a struct with person information -func Person() *PersonInfo { - return &PersonInfo{ - FirstName: FirstName(), - LastName: LastName(), - Gender: Gender(), - SSN: SSN(), - Image: ImageURL(300, 300) + "/people", - Job: Job(), - Address: Address(), - Contact: Contact(), - CreditCard: CreditCard(), - } -} diff --git a/vendor/github.com/brianvoe/gofakeit/status_code.go b/vendor/github.com/brianvoe/gofakeit/status_code.go deleted file mode 100644 index 1751c0fbe401..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/status_code.go +++ /dev/null @@ -1,11 +0,0 @@ -package gofakeit - -// SimpleStatusCode will generate a random simple status code -func SimpleStatusCode() int { - return getRandIntValue([]string{"status_code", "simple"}) -} - -// StatusCode will generate a random status code -func StatusCode() int { - return getRandIntValue([]string{"status_code", "general"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/string.go b/vendor/github.com/brianvoe/gofakeit/string.go deleted file mode 100644 index fc646cf38ac1..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/string.go +++ /dev/null @@ -1,48 +0,0 @@ -package gofakeit - -import ( - "math/rand" -) - -// Letter will generate a single random lower case ASCII letter -func Letter() string { - return string(randLetter()) -} - -// Digit will generate a single ASCII digit -func Digit() string { - return string(randDigit()) -} - -// Lexify will replace ? will random generated letters -func Lexify(str string) string { - return replaceWithLetters(str) -} - -// ShuffleStrings will randomize a slice of strings -func ShuffleStrings(a []string) { - swap := func(i, j int) { - a[i], a[j] = a[j], a[i] - } - //to avoid upgrading to 1.10 I copied the algorithm - n := len(a) - if n <= 1 { - return - } - - //if size is > int32 probably it will never finish, or ran out of entropy - i := n - 1 - for ; i > 0; i-- { - j := int(rand.Int31n(int32(i + 1))) - swap(i, j) - } -} - -// RandString will take in a slice of string and return a randomly selected value -func RandString(a []string) string { - size := len(a) - if size == 0 { - return "" - } - return a[rand.Intn(size)] -} diff --git a/vendor/github.com/brianvoe/gofakeit/struct.go b/vendor/github.com/brianvoe/gofakeit/struct.go deleted file mode 100644 index 2c68a9a3cb21..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/struct.go +++ /dev/null @@ -1,87 +0,0 @@ -package gofakeit - -import ( - "reflect" -) - -// Struct fills in exported elements of a struct with random data -// based on the value of `fake` tag of exported elements. -// Use `fake:"skip"` to explicitly skip an element. -// All built-in types are supported, with templating support -// for string types. -func Struct(v interface{}) { - r(reflect.TypeOf(v), reflect.ValueOf(v), "") -} - -func r(t reflect.Type, v reflect.Value, template string) { - switch t.Kind() { - case reflect.Ptr: - rPointer(t, v, template) - case reflect.Struct: - rStruct(t, v) - case reflect.String: - rString(template, v) - case reflect.Uint8: - v.SetUint(uint64(Uint8())) - case reflect.Uint16: - v.SetUint(uint64(Uint16())) - case reflect.Uint32: - v.SetUint(uint64(Uint32())) - case reflect.Uint64: - //capped at [0, math.MaxInt64) - v.SetUint(uint64(Uint64())) - case reflect.Int: - v.SetInt(int64(Int64())) - case reflect.Int8: - v.SetInt(int64(Int8())) - case reflect.Int16: - v.SetInt(int64(Int16())) - case reflect.Int32: - v.SetInt(int64(Int32())) - case reflect.Int64: - v.SetInt(int64(Int64())) - case reflect.Float64: - v.SetFloat(Float64()) - case reflect.Float32: - v.SetFloat(float64(Float32())) - case reflect.Bool: - v.SetBool(Bool()) - } -} - -func rString(template string, v reflect.Value) { - if template != "" { - r := Generate(template) - v.SetString(r) - } else { - v.SetString(Generate("???????????????????")) - // we don't have a String(len int) string function!! - } -} - -func rStruct(t reflect.Type, v reflect.Value) { - n := t.NumField() - for i := 0; i < n; i++ { - elementT := t.Field(i) - elementV := v.Field(i) - fake := true - t, ok := elementT.Tag.Lookup("fake") - if ok && t == "skip" { - fake = false - } - if fake && elementV.CanSet() { - r(elementT.Type, elementV, t) - } - } -} - -func rPointer(t reflect.Type, v reflect.Value, template string) { - elemT := t.Elem() - if v.IsNil() { - nv := reflect.New(elemT) - r(elemT, nv.Elem(), template) - v.Set(nv) - } else { - r(elemT, v.Elem(), template) - } -} diff --git a/vendor/github.com/brianvoe/gofakeit/unique.go b/vendor/github.com/brianvoe/gofakeit/unique.go deleted file mode 100644 index 4b969a7e9b8f..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/unique.go +++ /dev/null @@ -1,34 +0,0 @@ -package gofakeit - -import ( - "encoding/hex" - "math/rand" -) - -// UUID (version 4) will generate a random unique identifier based upon random nunbers -// Format: xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx -func UUID() string { - version := byte(4) - uuid := make([]byte, 16) - rand.Read(uuid) - - // Set version - uuid[6] = (uuid[6] & 0x0f) | (version << 4) - - // Set variant - uuid[8] = (uuid[8] & 0xbf) | 0x80 - - buf := make([]byte, 36) - var dash byte = '-' - hex.Encode(buf[0:8], uuid[0:4]) - buf[8] = dash - hex.Encode(buf[9:13], uuid[4:6]) - buf[13] = dash - hex.Encode(buf[14:18], uuid[6:8]) - buf[18] = dash - hex.Encode(buf[19:23], uuid[8:10]) - buf[23] = dash - hex.Encode(buf[24:], uuid[10:]) - - return string(buf) -} diff --git a/vendor/github.com/brianvoe/gofakeit/user_agent.go b/vendor/github.com/brianvoe/gofakeit/user_agent.go deleted file mode 100644 index 2ba334121452..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/user_agent.go +++ /dev/null @@ -1,92 +0,0 @@ -package gofakeit - -import "strconv" - -// UserAgent will generate a random broswer user agent -func UserAgent() string { - randNum := randIntRange(0, 4) - switch randNum { - case 0: - return ChromeUserAgent() - case 1: - return FirefoxUserAgent() - case 2: - return SafariUserAgent() - case 3: - return OperaUserAgent() - default: - return ChromeUserAgent() - } -} - -// ChromeUserAgent will generate a random chrome browser user agent string -func ChromeUserAgent() string { - randNum1 := strconv.Itoa(randIntRange(531, 536)) + strconv.Itoa(randIntRange(0, 2)) - randNum2 := strconv.Itoa(randIntRange(36, 40)) - randNum3 := strconv.Itoa(randIntRange(800, 899)) - return "Mozilla/5.0 " + "(" + randomPlatform() + ") AppleWebKit/" + randNum1 + " (KHTML, like Gecko) Chrome/" + randNum2 + ".0." + randNum3 + ".0 Mobile Safari/" + randNum1 -} - -// FirefoxUserAgent will generate a random firefox broswer user agent string -func FirefoxUserAgent() string { - ver := "Gecko/" + Date().Format("2006-02-01") + " Firefox/" + strconv.Itoa(randIntRange(35, 37)) + ".0" - platforms := []string{ - "(" + windowsPlatformToken() + "; " + "en-US" + "; rv:1.9." + strconv.Itoa(randIntRange(0, 3)) + ".20) " + ver, - "(" + linuxPlatformToken() + "; rv:" + strconv.Itoa(randIntRange(5, 8)) + ".0) " + ver, - "(" + macPlatformToken() + " rv:" + strconv.Itoa(randIntRange(2, 7)) + ".0) " + ver, - } - - return "Mozilla/5.0 " + RandString(platforms) -} - -// SafariUserAgent will generate a random safari browser user agent string -func SafariUserAgent() string { - randNum := strconv.Itoa(randIntRange(531, 536)) + "." + strconv.Itoa(randIntRange(1, 51)) + "." + strconv.Itoa(randIntRange(1, 8)) - ver := strconv.Itoa(randIntRange(4, 6)) + "." + strconv.Itoa(randIntRange(0, 2)) - - mobileDevices := []string{ - "iPhone; CPU iPhone OS", - "iPad; CPU OS", - } - - platforms := []string{ - "(Windows; U; " + windowsPlatformToken() + ") AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + ver + " Safari/" + randNum, - "(" + macPlatformToken() + " rv:" + strconv.Itoa(randIntRange(4, 7)) + ".0; en-US) AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + ver + " Safari/" + randNum, - "(" + RandString(mobileDevices) + " " + strconv.Itoa(randIntRange(7, 9)) + "_" + strconv.Itoa(randIntRange(0, 3)) + "_" + strconv.Itoa(randIntRange(1, 3)) + " like Mac OS X; " + "en-US" + ") AppleWebKit/" + randNum + " (KHTML, like Gecko) Version/" + strconv.Itoa(randIntRange(3, 5)) + ".0.5 Mobile/8B" + strconv.Itoa(randIntRange(111, 120)) + " Safari/6" + randNum, - } - - return "Mozilla/5.0 " + RandString(platforms) -} - -// OperaUserAgent will generate a random opera browser user agent string -func OperaUserAgent() string { - platform := "(" + randomPlatform() + "; en-US) Presto/2." + strconv.Itoa(randIntRange(8, 13)) + "." + strconv.Itoa(randIntRange(160, 355)) + " Version/" + strconv.Itoa(randIntRange(10, 13)) + ".00" - - return "Opera/" + strconv.Itoa(randIntRange(8, 10)) + "." + strconv.Itoa(randIntRange(10, 99)) + " " + platform -} - -// linuxPlatformToken will generate a random linux platform -func linuxPlatformToken() string { - return "X11; Linux " + getRandValue([]string{"computer", "linux_processor"}) -} - -// macPlatformToken will generate a random mac platform -func macPlatformToken() string { - return "Macintosh; " + getRandValue([]string{"computer", "mac_processor"}) + " Mac OS X 10_" + strconv.Itoa(randIntRange(5, 9)) + "_" + strconv.Itoa(randIntRange(0, 10)) -} - -// windowsPlatformToken will generate a random windows platform -func windowsPlatformToken() string { - return getRandValue([]string{"computer", "windows_platform"}) -} - -// randomPlatform will generate a random platform -func randomPlatform() string { - platforms := []string{ - linuxPlatformToken(), - macPlatformToken(), - windowsPlatformToken(), - } - - return RandString(platforms) -} diff --git a/vendor/github.com/brianvoe/gofakeit/vehicle.go b/vendor/github.com/brianvoe/gofakeit/vehicle.go deleted file mode 100644 index 093fe3a1d84c..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/vehicle.go +++ /dev/null @@ -1,55 +0,0 @@ -package gofakeit - -// VehicleInfo is a struct dataset of all vehicle information -type VehicleInfo struct { - // Vehicle type - VehicleType string - // Fuel type - Fuel string - // Transmission type - TransmissionGear string - // Brand name - Brand string - // Vehicle model - Model string - // Vehicle model year - Year int -} - -// Vehicle will generate a struct with vehicle information -func Vehicle() *VehicleInfo { - return &VehicleInfo{ - VehicleType: VehicleType(), - Fuel: FuelType(), - TransmissionGear: TransmissionGearType(), - Brand: CarMaker(), - Model: CarModel(), - Year: Year(), - } - -} - -// VehicleType will generate a random vehicle type string -func VehicleType() string { - return getRandValue([]string{"vehicle", "vehicle_type"}) -} - -// FuelType will return a random fuel type -func FuelType() string { - return getRandValue([]string{"vehicle", "fuel_type"}) -} - -// TransmissionGearType will return a random transmission gear type -func TransmissionGearType() string { - return getRandValue([]string{"vehicle", "transmission_type"}) -} - -// CarMaker will return a random car maker -func CarMaker() string { - return getRandValue([]string{"vehicle", "maker"}) -} - -// CarModel will return a random car model -func CarModel() string { - return getRandValue([]string{"vehicle", "model"}) -} diff --git a/vendor/github.com/brianvoe/gofakeit/words.go b/vendor/github.com/brianvoe/gofakeit/words.go deleted file mode 100644 index 631e45c7ddd7..000000000000 --- a/vendor/github.com/brianvoe/gofakeit/words.go +++ /dev/null @@ -1,100 +0,0 @@ -package gofakeit - -import ( - "bytes" - "strings" - "unicode" -) - -type paragrapOptions struct { - paragraphCount int - sentenceCount int - wordCount int - separator string -} - -const bytesPerWordEstimation = 6 - -type sentenceGenerator func(wordCount int) string -type wordGenerator func() string - -// Word will generate a random word -func Word() string { - return getRandValue([]string{"lorem", "word"}) -} - -// Sentence will generate a random sentence -func Sentence(wordCount int) string { - return sentence(wordCount, Word) -} - -// Paragraph will generate a random paragraphGenerator -// Set Paragraph Count -// Set Sentence Count -// Set Word Count -// Set Paragraph Separator -func Paragraph(paragraphCount int, sentenceCount int, wordCount int, separator string) string { - return paragraphGenerator(paragrapOptions{paragraphCount, sentenceCount, wordCount, separator}, Sentence) -} - -func sentence(wordCount int, word wordGenerator) string { - if wordCount <= 0 { - return "" - } - - wordSeparator := ' ' - sentence := bytes.Buffer{} - sentence.Grow(wordCount * bytesPerWordEstimation) - - for i := 0; i < wordCount; i++ { - word := word() - if i == 0 { - runes := []rune(word) - runes[0] = unicode.ToTitle(runes[0]) - word = string(runes) - } - sentence.WriteString(word) - if i < wordCount-1 { - sentence.WriteRune(wordSeparator) - } - } - sentence.WriteRune('.') - return sentence.String() -} - -func paragraphGenerator(opts paragrapOptions, sentecer sentenceGenerator) string { - if opts.paragraphCount <= 0 || opts.sentenceCount <= 0 || opts.wordCount <= 0 { - return "" - } - - //to avoid making Go 1.10 dependency, we cannot use strings.Builder - paragraphs := bytes.Buffer{} - //we presume the length - paragraphs.Grow(opts.paragraphCount * opts.sentenceCount * opts.wordCount * bytesPerWordEstimation) - wordSeparator := ' ' - - for i := 0; i < opts.paragraphCount; i++ { - for e := 0; e < opts.sentenceCount; e++ { - paragraphs.WriteString(sentecer(opts.wordCount)) - if e < opts.sentenceCount-1 { - paragraphs.WriteRune(wordSeparator) - } - } - - if i < opts.paragraphCount-1 { - paragraphs.WriteString(opts.separator) - } - } - - return paragraphs.String() -} - -// Question will return a random question -func Question() string { - return strings.Replace(HipsterSentence(Number(3, 10)), ".", "?", 1) -} - -// Quote will return a random quote from a random person -func Quote() string { - return `"` + HipsterSentence(Number(3, 10)) + `" - ` + FirstName() + " " + LastName() -} diff --git a/vendor/github.com/robfig/cron/README.md b/vendor/github.com/robfig/cron/README.md index 4e0ae1c25f39..ec40c95fcb9d 100644 --- a/vendor/github.com/robfig/cron/README.md +++ b/vendor/github.com/robfig/cron/README.md @@ -1,4 +1,4 @@ -[![GoDoc](http://godoc.org/github.com/robfig/cron?status.png)](http://godoc.org/github.com/robfig/cron) +[![GoDoc](http://godoc.org/github.com/robfig/cron?status.png)](http://godoc.org/github.com/robfig/cron) [![Build Status](https://travis-ci.org/robfig/cron.svg?branch=master)](https://travis-ci.org/robfig/cron) # cron diff --git a/vendor/github.com/robfig/cron/doc.go b/vendor/github.com/robfig/cron/doc.go index 1ce84f7bf462..d02ec2f3b563 100644 --- a/vendor/github.com/robfig/cron/doc.go +++ b/vendor/github.com/robfig/cron/doc.go @@ -84,7 +84,7 @@ You may use one of several pre-defined schedules in place of a cron expression. Intervals -You may also schedule a job to execute at fixed intervals, starting at the time it's added +You may also schedule a job to execute at fixed intervals, starting at the time it's added or cron is run. This is supported by formatting the cron spec like this: @every diff --git a/vendor/modules.txt b/vendor/modules.txt index 1cf623aa57d4..19e66874848b 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -56,9 +56,6 @@ github.com/benbjohnson/clock github.com/beorn7/perks/quantile # github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 github.com/bradfitz/gomemcache/memcache -# github.com/brianvoe/gofakeit v3.17.0+incompatible -github.com/brianvoe/gofakeit -github.com/brianvoe/gofakeit/data # github.com/codegangsta/cli v1.20.0 github.com/codegangsta/cli # github.com/davecgh/go-spew v1.1.1 From a3092dc57b7a91d9d38ece34c142a51912e33a65 Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Wed, 29 May 2019 16:12:42 +0300 Subject: [PATCH 33/49] LDAP: remove unused function (#17351) --- pkg/services/ldap/ldap.go | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/pkg/services/ldap/ldap.go b/pkg/services/ldap/ldap.go index c64533d34922..418673be4463 100644 --- a/pkg/services/ldap/ldap.go +++ b/pkg/services/ldap/ldap.go @@ -30,7 +30,6 @@ type IConnection interface { type IServer interface { Login(*models.LoginUserQuery) (*models.ExternalUserInfo, error) Users([]string) ([]*models.ExternalUserInfo, error) - ExtractGrafanaUser(*UserInfo) (*models.ExternalUserInfo, error) InitialBind(string, string) error Dial() error Close() @@ -148,6 +147,11 @@ func (server *Server) Login(query *models.LoginUserQuery) ( // Check if a second user bind is needed user := users[0] + + if err := server.validateGrafanaUser(user); err != nil { + return nil, err + } + if server.requireSecondBind { err = server.secondBind(user, query.Password) if err != nil { @@ -188,16 +192,6 @@ func (server *Server) Users(logins []string) ( return serializedUsers, nil } -// ExtractGrafanaUser extracts external user info from LDAP user -func (server *Server) ExtractGrafanaUser(user *UserInfo) (*models.ExternalUserInfo, error) { - result := server.buildGrafanaUser(user) - if err := server.validateGrafanaUser(result); err != nil { - return nil, err - } - - return result, nil -} - // validateGrafanaUser validates user access. // If there are no ldap group mappings access is true // otherwise a single group must match From 5e7537878e857c6d0b751860bf431e3ca699af41 Mon Sep 17 00:00:00 2001 From: Dan Cech Date: Thu, 30 May 2019 01:07:19 -0400 Subject: [PATCH 34/49] Security: Prevent csv formula injection attack (#17363) * mitigate https://www.owasp.org/index.php/CSV_Injection - prepend csv cell values that begin with -, +, = or @ with ' - trim trailing whitespace from all csv values * test for csv formula injection mitigation --- public/app/core/specs/file_export.test.ts | 4 +++- public/app/core/utils/file_export.ts | 6 +++++- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/public/app/core/specs/file_export.test.ts b/public/app/core/specs/file_export.test.ts index 9e2ff0a7ce16..ab254a94f2b4 100644 --- a/public/app/core/specs/file_export.test.ts +++ b/public/app/core/specs/file_export.test.ts @@ -92,6 +92,7 @@ describe('file_export', () => { [0x123, 'some string with \n in the middle', 10.01, false], [0b1011, 'some string with ; in the middle', -12.34, true], [123, 'some string with ;; in the middle', -12.34, true], + [1234, '=a bogus formula ', '-and another', '+another', '@ref'], ], }; @@ -108,7 +109,8 @@ describe('file_export', () => { '501;"some string with "" at the end""";0.01;false\r\n' + '291;"some string with \n in the middle";10.01;false\r\n' + '11;"some string with ; in the middle";-12.34;true\r\n' + - '123;"some string with ;; in the middle";-12.34;true'; + '123;"some string with ;; in the middle";-12.34;true\r\n' + + '1234;"\'=a bogus formula";"\'-and another";"\'+another";"\'@ref"'; expect(returnedText).toBe(expectedText); }); diff --git a/public/app/core/utils/file_export.ts b/public/app/core/utils/file_export.ts index 6d341b5582e2..ae8d0ad06dea 100644 --- a/public/app/core/utils/file_export.ts +++ b/public/app/core/utils/file_export.ts @@ -17,7 +17,11 @@ function csvEscaped(text) { return text; } - return text.split(QUOTE).join(QUOTE + QUOTE); + return text + .split(QUOTE) + .join(QUOTE + QUOTE) + .replace(/^([-+=@])/, "'$1") + .replace(/\s+$/, ''); } const domParser = new DOMParser(); From a0bb01103eebfa7e0d94201db5536b295c4f4529 Mon Sep 17 00:00:00 2001 From: Bernard Duggan Date: Thu, 30 May 2019 17:58:29 +1000 Subject: [PATCH 35/49] MySQL/Postgres/MSSQL: Add parsing for day, weeks and year intervals in macros (#13086) closes #11431 --- pkg/components/gtime/gtime.go | 28 ++++++++++++++++++++++++ pkg/components/gtime/gtime_test.go | 34 ++++++++++++++++++++++++++++++ pkg/tsdb/mssql/macros.go | 5 +++-- pkg/tsdb/mysql/macros.go | 6 +++--- pkg/tsdb/postgres/macros.go | 5 +++-- 5 files changed, 71 insertions(+), 7 deletions(-) create mode 100644 pkg/components/gtime/gtime.go create mode 100644 pkg/components/gtime/gtime_test.go diff --git a/pkg/components/gtime/gtime.go b/pkg/components/gtime/gtime.go new file mode 100644 index 000000000000..e3e4e449f661 --- /dev/null +++ b/pkg/components/gtime/gtime.go @@ -0,0 +1,28 @@ +package gtime + +import ( + "regexp" + "strconv" + "time" +) + +// ParseInterval parses and interval with support for all units that Grafana uses. +func ParseInterval(interval string) (time.Duration, error) { + re := regexp.MustCompile(`(\d+)([wdy])`) + result := re.FindSubmatch([]byte(interval)) + + if len(result) == 3 { + num, _ := strconv.Atoi(string(result[1])) + period := string(result[2]) + + if period == `d` { + return time.Hour * 24 * time.Duration(num), nil + } else if period == `w` { + return time.Hour * 24 * 7 * time.Duration(num), nil + } else { + return time.Hour * 24 * 7 * 365 * time.Duration(num), nil + } + } else { + return time.ParseDuration(interval) + } +} diff --git a/pkg/components/gtime/gtime_test.go b/pkg/components/gtime/gtime_test.go new file mode 100644 index 000000000000..e683184023fa --- /dev/null +++ b/pkg/components/gtime/gtime_test.go @@ -0,0 +1,34 @@ +package gtime + +import ( + "errors" + "fmt" + "testing" + "time" +) + +func TestParseInterval(t *testing.T) { + tcs := []struct { + interval string + duration time.Duration + err error + }{ + {interval: "1d", duration: time.Hour * 24}, + {interval: "1w", duration: time.Hour * 24 * 7}, + {interval: "1y", duration: time.Hour * 24 * 7 * 365}, + {interval: "1M", err: errors.New("time: unknown unit M in duration 1M")}, + {interval: "invalid-duration", err: errors.New("time: invalid duration invalid-duration")}, + } + + for i, tc := range tcs { + t.Run(fmt.Sprintf("testcase %d", i), func(t *testing.T) { + res, err := ParseInterval(tc.interval) + if err != nil && err.Error() != tc.err.Error() { + t.Fatalf("expected '%v' got '%v'", tc.err, err) + } + if res != tc.duration { + t.Errorf("expected %v got %v", tc.duration, res) + } + }) + } +} diff --git a/pkg/tsdb/mssql/macros.go b/pkg/tsdb/mssql/macros.go index ec3f103a4ef2..c0794863efe5 100644 --- a/pkg/tsdb/mssql/macros.go +++ b/pkg/tsdb/mssql/macros.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -74,7 +75,7 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'"`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'"`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -109,7 +110,7 @@ func (m *msSqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/tsdb/mysql/macros.go b/pkg/tsdb/mysql/macros.go index bbd928b05631..8cd83d2f9991 100644 --- a/pkg/tsdb/mysql/macros.go +++ b/pkg/tsdb/mysql/macros.go @@ -4,8 +4,8 @@ import ( "fmt" "regexp" "strings" - "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -69,7 +69,7 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'"`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'"`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -104,7 +104,7 @@ func (m *mySqlMacroEngine) evaluateMacro(name string, args []string) (string, er if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } diff --git a/pkg/tsdb/postgres/macros.go b/pkg/tsdb/postgres/macros.go index 2efba13d31ac..f7a194e63cd1 100644 --- a/pkg/tsdb/postgres/macros.go +++ b/pkg/tsdb/postgres/macros.go @@ -6,6 +6,7 @@ import ( "strings" "time" + "github.com/grafana/grafana/pkg/components/gtime" "github.com/grafana/grafana/pkg/tsdb" ) @@ -95,7 +96,7 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } @@ -139,7 +140,7 @@ func (m *postgresMacroEngine) evaluateMacro(name string, args []string) (string, if len(args) < 2 { return "", fmt.Errorf("macro %v needs time column and interval and optional fill value", name) } - interval, err := time.ParseDuration(strings.Trim(args[1], `'`)) + interval, err := gtime.ParseInterval(strings.Trim(args[1], `'`)) if err != nil { return "", fmt.Errorf("error parsing interval %v", args[1]) } From 1497f3d79aa901f7d3aff67e3a44b6771b4e4db2 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Fri, 31 May 2019 09:45:53 +0200 Subject: [PATCH 36/49] Chore: Remove unused properties in explore (#17359) This removes unused properties in explore and datasource meta data (tables and explore properties in plugin.json). --- packages/grafana-ui/src/types/datasource.ts | 2 -- public/app/features/explore/state/reducers.test.ts | 7 ------- public/app/features/explore/state/reducers.ts | 7 ------- public/app/plugins/datasource/graphite/plugin.json | 1 - public/app/plugins/datasource/influxdb/plugin.json | 1 - public/app/plugins/datasource/input/plugin.json | 1 - public/app/plugins/datasource/loki/plugin.json | 2 -- public/app/plugins/datasource/mssql/plugin.json | 1 - public/app/plugins/datasource/mysql/plugin.json | 1 - public/app/plugins/datasource/opentsdb/plugin.json | 1 - public/app/plugins/datasource/postgres/plugin.json | 1 - public/app/plugins/datasource/prometheus/plugin.json | 2 -- .../app/plugins/datasource/stackdriver/plugin.json | 1 - public/app/types/explore.ts | 12 ------------ 14 files changed, 40 deletions(-) diff --git a/packages/grafana-ui/src/types/datasource.ts b/packages/grafana-ui/src/types/datasource.ts index dd364a7d9c9e..dbb80e2fdf59 100644 --- a/packages/grafana-ui/src/types/datasource.ts +++ b/packages/grafana-ui/src/types/datasource.ts @@ -75,9 +75,7 @@ export class DataSourcePlugin< export interface DataSourcePluginMeta extends PluginMeta { builtIn?: boolean; // Is this for all metrics?: boolean; - tables?: boolean; logs?: boolean; - explore?: boolean; annotations?: boolean; alerting?: boolean; mixed?: boolean; diff --git a/public/app/features/explore/state/reducers.test.ts b/public/app/features/explore/state/reducers.test.ts index e7127eb71592..0c37a4b388ea 100644 --- a/public/app/features/explore/state/reducers.test.ts +++ b/public/app/features/explore/state/reducers.test.ts @@ -144,7 +144,6 @@ describe('Explore item reducer', () => { meta: { metrics: true, logs: true, - tables: true, }, components: { ExploreStartPage: StartPage, @@ -154,9 +153,6 @@ describe('Explore item reducer', () => { const queryKeys: string[] = []; const initalState: Partial = { datasourceInstance: null, - supportsGraph: false, - supportsLogs: false, - supportsTable: false, StartPage: null, showingStartPage: false, queries, @@ -164,9 +160,6 @@ describe('Explore item reducer', () => { }; const expectedState = { datasourceInstance, - supportsGraph: true, - supportsLogs: true, - supportsTable: true, StartPage, showingStartPage: true, queries, diff --git a/public/app/features/explore/state/reducers.ts b/public/app/features/explore/state/reducers.ts index 208825c9c19a..969ecd020667 100644 --- a/public/app/features/explore/state/reducers.ts +++ b/public/app/features/explore/state/reducers.ts @@ -103,9 +103,6 @@ export const makeExploreItemState = (): ExploreItemState => ({ graphIsLoading: false, logIsLoading: false, tableIsLoading: false, - supportsGraph: null, - supportsLogs: null, - supportsTable: null, queryKeys: [], urlState: null, update: makeInitialUpdateState(), @@ -246,7 +243,6 @@ export const itemReducer = reducerFactory({} as ExploreItemSta // Capabilities const supportsGraph = datasourceInstance.meta.metrics; const supportsLogs = datasourceInstance.meta.logs; - const supportsTable = datasourceInstance.meta.tables; let mode = ExploreMode.Metrics; const supportedModes: ExploreMode[] = []; @@ -274,9 +270,6 @@ export const itemReducer = reducerFactory({} as ExploreItemSta graphIsLoading: false, logIsLoading: false, tableIsLoading: false, - supportsGraph, - supportsLogs, - supportsTable, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), diff --git a/public/app/plugins/datasource/graphite/plugin.json b/public/app/plugins/datasource/graphite/plugin.json index 01a95727ee79..a1cc0335b68e 100644 --- a/public/app/plugins/datasource/graphite/plugin.json +++ b/public/app/plugins/datasource/graphite/plugin.json @@ -10,7 +10,6 @@ "metrics": true, "alerting": true, "annotations": true, - "tables": false, "queryOptions": { "maxDataPoints": true, diff --git a/public/app/plugins/datasource/influxdb/plugin.json b/public/app/plugins/datasource/influxdb/plugin.json index fa660ee12329..785706dfc401 100644 --- a/public/app/plugins/datasource/influxdb/plugin.json +++ b/public/app/plugins/datasource/influxdb/plugin.json @@ -8,7 +8,6 @@ "metrics": true, "annotations": true, "alerting": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/input/plugin.json b/public/app/plugins/datasource/input/plugin.json index 91782a348065..dbfa0ad489a4 100644 --- a/public/app/plugins/datasource/input/plugin.json +++ b/public/app/plugins/datasource/input/plugin.json @@ -8,7 +8,6 @@ "alerting": false, "annotations": false, "logs": false, - "explore": false, "info": { "description": "Data source that supports manual table & CSV input", diff --git a/public/app/plugins/datasource/loki/plugin.json b/public/app/plugins/datasource/loki/plugin.json index cd14a7fe48ad..1c880bce8111 100644 --- a/public/app/plugins/datasource/loki/plugin.json +++ b/public/app/plugins/datasource/loki/plugin.json @@ -8,8 +8,6 @@ "alerting": false, "annotations": false, "logs": true, - "explore": true, - "tables": false, "info": { "description": "Like Prometheus but for logs. OSS logging solution from Grafana Labs", diff --git a/public/app/plugins/datasource/mssql/plugin.json b/public/app/plugins/datasource/mssql/plugin.json index b3269b91100e..ef280e9209ee 100644 --- a/public/app/plugins/datasource/mssql/plugin.json +++ b/public/app/plugins/datasource/mssql/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/mysql/plugin.json b/public/app/plugins/datasource/mysql/plugin.json index 49d1996332fa..be0714560927 100644 --- a/public/app/plugins/datasource/mysql/plugin.json +++ b/public/app/plugins/datasource/mysql/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/opentsdb/plugin.json b/public/app/plugins/datasource/opentsdb/plugin.json index e7cae327c5b5..a19916482369 100644 --- a/public/app/plugins/datasource/opentsdb/plugin.json +++ b/public/app/plugins/datasource/opentsdb/plugin.json @@ -8,7 +8,6 @@ "defaultMatchFormat": "pipe", "annotations": true, "alerting": true, - "tables": false, "info": { "description": "Open source time series database", diff --git a/public/app/plugins/datasource/postgres/plugin.json b/public/app/plugins/datasource/postgres/plugin.json index 994578a7f2c8..ce72d3b0f2f8 100644 --- a/public/app/plugins/datasource/postgres/plugin.json +++ b/public/app/plugins/datasource/postgres/plugin.json @@ -19,7 +19,6 @@ "alerting": true, "annotations": true, "metrics": true, - "tables": true, "queryOptions": { "minInterval": true diff --git a/public/app/plugins/datasource/prometheus/plugin.json b/public/app/plugins/datasource/prometheus/plugin.json index fb9ebbb52b10..ba1144549489 100644 --- a/public/app/plugins/datasource/prometheus/plugin.json +++ b/public/app/plugins/datasource/prometheus/plugin.json @@ -24,8 +24,6 @@ "metrics": true, "alerting": true, "annotations": true, - "explore": true, - "tables": true, "queryOptions": { "minInterval": true }, diff --git a/public/app/plugins/datasource/stackdriver/plugin.json b/public/app/plugins/datasource/stackdriver/plugin.json index 620a7b1c8cea..20cac315400a 100644 --- a/public/app/plugins/datasource/stackdriver/plugin.json +++ b/public/app/plugins/datasource/stackdriver/plugin.json @@ -7,7 +7,6 @@ "metrics": true, "alerting": true, "annotations": true, - "tables": false, "queryOptions": { "maxDataPoints": true, "cacheTimeout": true diff --git a/public/app/types/explore.ts b/public/app/types/explore.ts index c852d92ee29a..289ae02b0d71 100644 --- a/public/app/types/explore.ts +++ b/public/app/types/explore.ts @@ -214,18 +214,6 @@ export interface ExploreItemState { * True if table result viewer is expanded. Query runs will contain table queries. */ showingTable: boolean; - /** - * True if `datasourceInstance` supports graph queries. - */ - supportsGraph: boolean | null; - /** - * True if `datasourceInstance` supports logs queries. - */ - supportsLogs: boolean | null; - /** - * True if `datasourceInstance` supports table queries. - */ - supportsTable: boolean | null; graphIsLoading: boolean; logIsLoading: boolean; From 60ddad8fdbb1345cfb93022e3eb27760beed8fe2 Mon Sep 17 00:00:00 2001 From: Alexander Zobnin Date: Fri, 31 May 2019 13:22:22 +0300 Subject: [PATCH 37/49] Batch disable users (#17254) * batch disable users * batch revoke users tokens * split batch disable user and revoke token * fix tests for batch disable users * Chore: add BatchDisableUsers() to the bus --- pkg/models/user.go | 5 ++++ pkg/services/auth/auth_token.go | 31 +++++++++++++++++++++++++ pkg/services/auth/auth_token_test.go | 20 ++++++++++++++++ pkg/services/sqlstore/user.go | 26 +++++++++++++++++++++ pkg/services/sqlstore/user_test.go | 34 ++++++++++++++++++++++++++++ 5 files changed, 116 insertions(+) diff --git a/pkg/models/user.go b/pkg/models/user.go index 5ced373f248f..de61150512d2 100644 --- a/pkg/models/user.go +++ b/pkg/models/user.go @@ -94,6 +94,11 @@ type DisableUserCommand struct { IsDisabled bool } +type BatchDisableUsersCommand struct { + UserIds []int64 + IsDisabled bool +} + type DeleteUserCommand struct { UserId int64 } diff --git a/pkg/services/auth/auth_token.go b/pkg/services/auth/auth_token.go index 527d054f6ee9..af23d773f65c 100644 --- a/pkg/services/auth/auth_token.go +++ b/pkg/services/auth/auth_token.go @@ -4,6 +4,7 @@ import ( "context" "crypto/sha256" "encoding/hex" + "strings" "time" "github.com/grafana/grafana/pkg/infra/serverlock" @@ -305,6 +306,36 @@ func (s *UserAuthTokenService) RevokeAllUserTokens(ctx context.Context, userId i }) } +func (s *UserAuthTokenService) BatchRevokeAllUserTokens(ctx context.Context, userIds []int64) error { + return s.SQLStore.WithTransactionalDbSession(ctx, func(dbSession *sqlstore.DBSession) error { + if len(userIds) == 0 { + return nil + } + + user_id_params := strings.Repeat(",?", len(userIds)-1) + sql := "DELETE from user_auth_token WHERE user_id IN (?" + user_id_params + ")" + + params := []interface{}{sql} + for _, v := range userIds { + params = append(params, v) + } + + res, err := dbSession.Exec(params...) + if err != nil { + return err + } + + affected, err := res.RowsAffected() + if err != nil { + return err + } + + s.log.Debug("all user tokens for given users revoked", "usersCount", len(userIds), "count", affected) + + return err + }) +} + func (s *UserAuthTokenService) GetUserToken(ctx context.Context, userId, userTokenId int64) (*models.UserToken, error) { var result models.UserToken diff --git a/pkg/services/auth/auth_token_test.go b/pkg/services/auth/auth_token_test.go index 802b4602cbfc..bf12d914e970 100644 --- a/pkg/services/auth/auth_token_test.go +++ b/pkg/services/auth/auth_token_test.go @@ -117,6 +117,26 @@ func TestUserAuthToken(t *testing.T) { So(model2, ShouldBeNil) }) }) + + Convey("When revoking users tokens in a batch", func() { + Convey("Can revoke all users tokens", func() { + userIds := []int64{} + for i := 0; i < 3; i++ { + userId := userID + int64(i+1) + userIds = append(userIds, userId) + userAuthTokenService.CreateToken(context.Background(), userId, "192.168.10.11:1234", "some user agent") + } + + err := userAuthTokenService.BatchRevokeAllUserTokens(context.Background(), userIds) + So(err, ShouldBeNil) + + for _, v := range userIds { + tokens, err := userAuthTokenService.GetUserTokens(context.Background(), v) + So(err, ShouldBeNil) + So(len(tokens), ShouldEqual, 0) + } + }) + }) }) Convey("expires correctly", func() { diff --git a/pkg/services/sqlstore/user.go b/pkg/services/sqlstore/user.go index 3c94e0617f08..641fc5f1344f 100644 --- a/pkg/services/sqlstore/user.go +++ b/pkg/services/sqlstore/user.go @@ -28,6 +28,7 @@ func (ss *SqlStore) addUserQueryAndCommandHandlers() { bus.AddHandler("sql", SearchUsers) bus.AddHandler("sql", GetUserOrgList) bus.AddHandler("sql", DisableUser) + bus.AddHandler("sql", BatchDisableUsers) bus.AddHandler("sql", DeleteUser) bus.AddHandler("sql", UpdateUserPermissions) bus.AddHandler("sql", SetUserHelpFlag) @@ -487,6 +488,31 @@ func DisableUser(cmd *m.DisableUserCommand) error { return err } +func BatchDisableUsers(cmd *m.BatchDisableUsersCommand) error { + return inTransaction(func(sess *DBSession) error { + userIds := cmd.UserIds + + if len(userIds) == 0 { + return nil + } + + user_id_params := strings.Repeat(",?", len(userIds)-1) + disableSQL := "UPDATE " + dialect.Quote("user") + " SET is_disabled=? WHERE Id IN (?" + user_id_params + ")" + + disableParams := []interface{}{disableSQL, cmd.IsDisabled} + for _, v := range userIds { + disableParams = append(disableParams, v) + } + + _, err := sess.Exec(disableParams...) + if err != nil { + return err + } + + return nil + }) +} + func DeleteUser(cmd *m.DeleteUserCommand) error { return inTransaction(func(sess *DBSession) error { return deleteUserInTransaction(sess, cmd) diff --git a/pkg/services/sqlstore/user_test.go b/pkg/services/sqlstore/user_test.go index 84640687ed9f..e5807ea7bf57 100644 --- a/pkg/services/sqlstore/user_test.go +++ b/pkg/services/sqlstore/user_test.go @@ -175,6 +175,40 @@ func TestUserDataAccess(t *testing.T) { So(found, ShouldBeTrue) }) }) + + Convey("When batch disabling users", func() { + userIdsToDisable := []int64{} + for i := 0; i < 3; i++ { + userIdsToDisable = append(userIdsToDisable, users[i].Id) + } + disableCmd := m.BatchDisableUsersCommand{UserIds: userIdsToDisable, IsDisabled: true} + + err = BatchDisableUsers(&disableCmd) + So(err, ShouldBeNil) + + Convey("Should disable all provided users", func() { + query := m.SearchUsersQuery{} + err = SearchUsers(&query) + + So(query.Result.TotalCount, ShouldEqual, 5) + for _, user := range query.Result.Users { + shouldBeDisabled := false + + // Check if user id is in the userIdsToDisable list + for _, disabledUserId := range userIdsToDisable { + if user.Id == disabledUserId { + So(user.IsDisabled, ShouldBeTrue) + shouldBeDisabled = true + } + } + + // Otherwise user shouldn't be disabled + if !shouldBeDisabled { + So(user.IsDisabled, ShouldBeFalse) + } + } + }) + }) }) Convey("Given one grafana admin user", func() { From d8736a25475d146bd7111bddc5739e1380a1b0d3 Mon Sep 17 00:00:00 2001 From: Carl Bergquist Date: Mon, 3 Jun 2019 10:25:58 +0200 Subject: [PATCH 38/49] Alerting: golint fixes for alerting (#17246) --- Makefile | 7 +++- pkg/api/alerting.go | 4 +- pkg/services/alerting/conditions/evaluator.go | 27 ++++++++----- pkg/services/alerting/conditions/query.go | 39 ++++++++++-------- .../alerting/conditions/query_test.go | 9 ++--- pkg/services/alerting/conditions/reducer.go | 16 ++++---- .../alerting/conditions/reducer_test.go | 18 ++++----- pkg/services/alerting/engine.go | 30 +++++++------- .../alerting/engine_integration_test.go | 2 +- pkg/services/alerting/engine_test.go | 2 +- pkg/services/alerting/eval_context.go | 14 +++---- pkg/services/alerting/interfaces.go | 2 +- pkg/services/alerting/notifier.go | 22 +++++----- .../alerting/notifiers/alertmanager.go | 10 ++--- pkg/services/alerting/notifiers/base.go | 4 +- pkg/services/alerting/notifiers/dingding.go | 4 +- pkg/services/alerting/notifiers/discord.go | 35 ++++++++-------- .../alerting/notifiers/discord_test.go | 4 +- pkg/services/alerting/notifiers/email.go | 6 +-- pkg/services/alerting/notifiers/googlechat.go | 6 +-- pkg/services/alerting/notifiers/hipchat.go | 10 ++--- pkg/services/alerting/notifiers/kafka.go | 8 ++-- pkg/services/alerting/notifiers/line.go | 12 +++--- pkg/services/alerting/notifiers/opsgenie.go | 14 +++---- pkg/services/alerting/notifiers/pagerduty.go | 8 ++-- pkg/services/alerting/notifiers/pushover.go | 2 +- pkg/services/alerting/notifiers/sensu.go | 10 ++--- pkg/services/alerting/notifiers/slack.go | 6 +-- pkg/services/alerting/notifiers/teams.go | 10 ++--- pkg/services/alerting/notifiers/telegram.go | 10 ++--- pkg/services/alerting/notifiers/threema.go | 6 +-- pkg/services/alerting/notifiers/victorops.go | 8 ++-- pkg/services/alerting/notifiers/webhook.go | 8 ++-- pkg/services/alerting/result_handler.go | 14 +++---- pkg/services/alerting/rule.go | 40 +++++++++---------- pkg/services/alerting/scheduler.go | 8 ++-- pkg/services/alerting/test_notification.go | 6 +-- pkg/services/alerting/test_rule.go | 10 ++--- scripts/backend-lint.sh | 1 + 39 files changed, 235 insertions(+), 217 deletions(-) diff --git a/Makefile b/Makefile index 5b89b178e974..7025dff42aa2 100644 --- a/Makefile +++ b/Makefile @@ -1,6 +1,6 @@ -include local/Makefile -.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive devenv devenv-down +.PHONY: all deps-go deps-js deps build-go build-server build-cli build-js build build-docker-dev build-docker-full lint-go test-go test-js test run clean gosec revive devenv devenv-down revive-alerting GO := GO111MODULE=on go GO_FILES := ./pkg/... @@ -84,6 +84,11 @@ revive: scripts/go/bin/revive -config ./scripts/go/configs/revive.toml \ $(GO_FILES) +revive-alerting: scripts/go/bin/revive + @scripts/go/bin/revive \ + -formatter stylish \ + ./pkg/services/alerting/... + # create docker-compose file with provided sources and start them # example: make devenv sources=postgres,openldap ifeq ($(sources),) diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go index 0cd00d3b015f..e5e943260275 100644 --- a/pkg/api/alerting.go +++ b/pkg/api/alerting.go @@ -131,9 +131,9 @@ func AlertTest(c *m.ReqContext, dto dtos.AlertTestCommand) Response { } backendCmd := alerting.AlertTestCommand{ - OrgId: c.OrgId, + OrgID: c.OrgId, Dashboard: dto.Dashboard, - PanelId: dto.PanelId, + PanelID: dto.PanelId, User: c.SignedInUser, } diff --git a/pkg/services/alerting/conditions/evaluator.go b/pkg/services/alerting/conditions/evaluator.go index eef593d39e23..3045b633f1e4 100644 --- a/pkg/services/alerting/conditions/evaluator.go +++ b/pkg/services/alerting/conditions/evaluator.go @@ -14,22 +14,25 @@ var ( rangedTypes = []string{"within_range", "outside_range"} ) +// AlertEvaluator evaluates the reduced value of a timeserie. +// Returning true if a timeserie is violating the condition +// ex: ThresholdEvaluator, NoValueEvaluator, RangeEvaluator type AlertEvaluator interface { Eval(reducedValue null.Float) bool } -type NoValueEvaluator struct{} +type noValueEvaluator struct{} -func (e *NoValueEvaluator) Eval(reducedValue null.Float) bool { +func (e *noValueEvaluator) Eval(reducedValue null.Float) bool { return !reducedValue.Valid } -type ThresholdEvaluator struct { +type thresholdEvaluator struct { Type string Threshold float64 } -func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvaluator, error) { +func newThresholdEvaluator(typ string, model *simplejson.Json) (*thresholdEvaluator, error) { params := model.Get("params").MustArray() if len(params) == 0 { return nil, fmt.Errorf("Evaluator missing threshold parameter") @@ -40,12 +43,12 @@ func newThresholdEvaluator(typ string, model *simplejson.Json) (*ThresholdEvalua return nil, fmt.Errorf("Evaluator has invalid parameter") } - defaultEval := &ThresholdEvaluator{Type: typ} + defaultEval := &thresholdEvaluator{Type: typ} defaultEval.Threshold, _ = firstParam.Float64() return defaultEval, nil } -func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool { +func (e *thresholdEvaluator) Eval(reducedValue null.Float) bool { if !reducedValue.Valid { return false } @@ -60,13 +63,13 @@ func (e *ThresholdEvaluator) Eval(reducedValue null.Float) bool { return false } -type RangedEvaluator struct { +type rangedEvaluator struct { Type string Lower float64 Upper float64 } -func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, error) { +func newRangedEvaluator(typ string, model *simplejson.Json) (*rangedEvaluator, error) { params := model.Get("params").MustArray() if len(params) == 0 { return nil, alerting.ValidationError{Reason: "Evaluator missing threshold parameter"} @@ -82,13 +85,13 @@ func newRangedEvaluator(typ string, model *simplejson.Json) (*RangedEvaluator, e return nil, alerting.ValidationError{Reason: "Evaluator has invalid second parameter"} } - rangedEval := &RangedEvaluator{Type: typ} + rangedEval := &rangedEvaluator{Type: typ} rangedEval.Lower, _ = firstParam.Float64() rangedEval.Upper, _ = secondParam.Float64() return rangedEval, nil } -func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { +func (e *rangedEvaluator) Eval(reducedValue null.Float) bool { if !reducedValue.Valid { return false } @@ -105,6 +108,8 @@ func (e *RangedEvaluator) Eval(reducedValue null.Float) bool { return false } +// NewAlertEvaluator is a factory function for returning +// an `AlertEvaluator` depending on the json model. func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { typ := model.Get("type").MustString() if typ == "" { @@ -120,7 +125,7 @@ func NewAlertEvaluator(model *simplejson.Json) (AlertEvaluator, error) { } if typ == "no_value" { - return &NoValueEvaluator{}, nil + return &noValueEvaluator{}, nil } return nil, fmt.Errorf("Evaluator invalid evaluator type: %s", typ) diff --git a/pkg/services/alerting/conditions/query.go b/pkg/services/alerting/conditions/query.go index 37dbd9b3f7a6..b29f39b49169 100644 --- a/pkg/services/alerting/conditions/query.go +++ b/pkg/services/alerting/conditions/query.go @@ -17,26 +17,31 @@ import ( func init() { alerting.RegisterCondition("query", func(model *simplejson.Json, index int) (alerting.Condition, error) { - return NewQueryCondition(model, index) + return newQueryCondition(model, index) }) } +// QueryCondition is responsible for issue and query, reduce the +// timeseries into single values and evaluate if they are firing or not. type QueryCondition struct { Index int Query AlertQuery - Reducer QueryReducer + Reducer *queryReducer Evaluator AlertEvaluator Operator string HandleRequest tsdb.HandleRequestFunc } +// AlertQuery contains information about what datasource a query +// should be sent to and the query object. type AlertQuery struct { Model *simplejson.Json - DatasourceId int64 + DatasourceID int64 From string To string } +// Eval evaluates the `QueryCondition`. func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.ConditionResult, error) { timeRange := tsdb.NewTimeRange(c.Query.From, c.Query.To) @@ -101,8 +106,8 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) (*alerting.Conditio func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) { getDsInfo := &models.GetDataSourceByIdQuery{ - Id: c.Query.DatasourceId, - OrgId: context.Rule.OrgId, + Id: c.Query.DatasourceID, + OrgId: context.Rule.OrgID, } if err := bus.Dispatch(getDsInfo); err != nil { @@ -154,16 +159,16 @@ func (c *QueryCondition) getRequestForAlertRule(datasource *models.DataSource, t return req } -func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { +func newQueryCondition(model *simplejson.Json, index int) (*QueryCondition, error) { condition := QueryCondition{} condition.Index = index condition.HandleRequest = tsdb.HandleRequest - queryJson := model.Get("query") + queryJSON := model.Get("query") - condition.Query.Model = queryJson.Get("model") - condition.Query.From = queryJson.Get("params").MustArray()[1].(string) - condition.Query.To = queryJson.Get("params").MustArray()[2].(string) + condition.Query.Model = queryJSON.Get("model") + condition.Query.From = queryJSON.Get("params").MustArray()[1].(string) + condition.Query.To = queryJSON.Get("params").MustArray()[2].(string) if err := validateFromValue(condition.Query.From); err != nil { return nil, err @@ -173,20 +178,20 @@ func NewQueryCondition(model *simplejson.Json, index int) (*QueryCondition, erro return nil, err } - condition.Query.DatasourceId = queryJson.Get("datasourceId").MustInt64() + condition.Query.DatasourceID = queryJSON.Get("datasourceId").MustInt64() - reducerJson := model.Get("reducer") - condition.Reducer = NewSimpleReducer(reducerJson.Get("type").MustString()) + reducerJSON := model.Get("reducer") + condition.Reducer = newSimpleReducer(reducerJSON.Get("type").MustString()) - evaluatorJson := model.Get("evaluator") - evaluator, err := NewAlertEvaluator(evaluatorJson) + evaluatorJSON := model.Get("evaluator") + evaluator, err := NewAlertEvaluator(evaluatorJSON) if err != nil { return nil, err } condition.Evaluator = evaluator - operatorJson := model.Get("operator") - operator := operatorJson.Get("type").MustString("and") + operatorJSON := model.Get("operator") + operator := operatorJSON.Get("type").MustString("and") condition.Operator = operator return &condition, nil diff --git a/pkg/services/alerting/conditions/query_test.go b/pkg/services/alerting/conditions/query_test.go index 2e1ecf5f39c5..4c2b1689277a 100644 --- a/pkg/services/alerting/conditions/query_test.go +++ b/pkg/services/alerting/conditions/query_test.go @@ -27,16 +27,15 @@ func TestQueryCondition(t *testing.T) { So(ctx.condition.Query.From, ShouldEqual, "5m") So(ctx.condition.Query.To, ShouldEqual, "now") - So(ctx.condition.Query.DatasourceId, ShouldEqual, 1) + So(ctx.condition.Query.DatasourceID, ShouldEqual, 1) Convey("Can read query reducer", func() { - reducer, ok := ctx.condition.Reducer.(*SimpleReducer) - So(ok, ShouldBeTrue) + reducer := ctx.condition.Reducer So(reducer.Type, ShouldEqual, "avg") }) Convey("Can read evaluator", func() { - evaluator, ok := ctx.condition.Evaluator.(*ThresholdEvaluator) + evaluator, ok := ctx.condition.Evaluator.(*thresholdEvaluator) So(ok, ShouldBeTrue) So(evaluator.Type, ShouldEqual, "gt") }) @@ -163,7 +162,7 @@ func (ctx *queryConditionTestContext) exec() (*alerting.ConditionResult, error) }`)) So(err, ShouldBeNil) - condition, err := NewQueryCondition(jsonModel, 0) + condition, err := newQueryCondition(jsonModel, 0) So(err, ShouldBeNil) ctx.condition = condition diff --git a/pkg/services/alerting/conditions/reducer.go b/pkg/services/alerting/conditions/reducer.go index f55545be311f..bf57110ea1c9 100644 --- a/pkg/services/alerting/conditions/reducer.go +++ b/pkg/services/alerting/conditions/reducer.go @@ -9,15 +9,15 @@ import ( "github.com/grafana/grafana/pkg/tsdb" ) -type QueryReducer interface { - Reduce(timeSeries *tsdb.TimeSeries) null.Float -} +// queryReducer reduces an timeserie to a nullable float +type queryReducer struct { -type SimpleReducer struct { + // Type is how the timeserie should be reduced. + // Ex avg, sum, max, min, count Type string } -func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { +func (s *queryReducer) Reduce(series *tsdb.TimeSeries) null.Float { if len(series.Points) == 0 { return null.FloatFromPtr(nil) } @@ -31,7 +31,7 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { for _, point := range series.Points { if point[0].Valid { value += point[0].Float64 - validPointsCount += 1 + validPointsCount++ allNull = false } } @@ -117,8 +117,8 @@ func (s *SimpleReducer) Reduce(series *tsdb.TimeSeries) null.Float { return null.FloatFrom(value) } -func NewSimpleReducer(typ string) *SimpleReducer { - return &SimpleReducer{Type: typ} +func newSimpleReducer(t string) *queryReducer { + return &queryReducer{Type: t} } func calculateDiff(series *tsdb.TimeSeries, allNull bool, value float64, fn func(float64, float64) float64) (bool, float64) { diff --git a/pkg/services/alerting/conditions/reducer_test.go b/pkg/services/alerting/conditions/reducer_test.go index d2c21771d0b1..eac71378f3d7 100644 --- a/pkg/services/alerting/conditions/reducer_test.go +++ b/pkg/services/alerting/conditions/reducer_test.go @@ -53,7 +53,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("median should ignore null values", func() { - reducer := NewSimpleReducer("median") + reducer := newSimpleReducer("median") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -76,7 +76,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("avg with only nulls", func() { - reducer := NewSimpleReducer("avg") + reducer := newSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -87,7 +87,7 @@ func TestSimpleReducer(t *testing.T) { Convey("count_non_null", func() { Convey("with null values and real values", func() { - reducer := NewSimpleReducer("count_non_null") + reducer := newSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -102,7 +102,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("with null values", func() { - reducer := NewSimpleReducer("count_non_null") + reducer := newSimpleReducer("count_non_null") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -115,7 +115,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("avg of number values and null values should ignore nulls", func() { - reducer := NewSimpleReducer("avg") + reducer := newSimpleReducer("avg") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -144,7 +144,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("diff with only nulls", func() { - reducer := NewSimpleReducer("diff") + reducer := newSimpleReducer("diff") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -171,7 +171,7 @@ func TestSimpleReducer(t *testing.T) { }) Convey("percent_diff with only nulls", func() { - reducer := NewSimpleReducer("percent_diff") + reducer := newSimpleReducer("percent_diff") series := &tsdb.TimeSeries{ Name: "test time serie", } @@ -184,8 +184,8 @@ func TestSimpleReducer(t *testing.T) { }) } -func testReducer(typ string, datapoints ...float64) float64 { - reducer := NewSimpleReducer(typ) +func testReducer(reducerType string, datapoints ...float64) float64 { + reducer := newSimpleReducer(reducerType) series := &tsdb.TimeSeries{ Name: "test time serie", } diff --git a/pkg/services/alerting/engine.go b/pkg/services/alerting/engine.go index f2fd002704ac..8794eb51c325 100644 --- a/pkg/services/alerting/engine.go +++ b/pkg/services/alerting/engine.go @@ -17,10 +17,10 @@ import ( "golang.org/x/sync/errgroup" ) -// AlertingService is the background process that +// AlertEngine is the background process that // schedules alert evaluations and makes sure notifications // are sent. -type AlertingService struct { +type AlertEngine struct { RenderService rendering.Service `inject:""` execQueue chan *Job @@ -33,16 +33,16 @@ type AlertingService struct { } func init() { - registry.RegisterService(&AlertingService{}) + registry.RegisterService(&AlertEngine{}) } // IsDisabled returns true if the alerting service is disable for this instance. -func (e *AlertingService) IsDisabled() bool { +func (e *AlertEngine) IsDisabled() bool { return !setting.AlertingEnabled || !setting.ExecuteAlerts } // Init initalizes the AlertingService. -func (e *AlertingService) Init() error { +func (e *AlertEngine) Init() error { e.ticker = NewTicker(time.Now(), time.Second*0, clock.New()) e.execQueue = make(chan *Job, 1000) e.scheduler = newScheduler() @@ -54,7 +54,7 @@ func (e *AlertingService) Init() error { } // Run starts the alerting service background process. -func (e *AlertingService) Run(ctx context.Context) error { +func (e *AlertEngine) Run(ctx context.Context) error { alertGroup, ctx := errgroup.WithContext(ctx) alertGroup.Go(func() error { return e.alertingTicker(ctx) }) alertGroup.Go(func() error { return e.runJobDispatcher(ctx) }) @@ -63,7 +63,7 @@ func (e *AlertingService) Run(ctx context.Context) error { return err } -func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { +func (e *AlertEngine) alertingTicker(grafanaCtx context.Context) error { defer func() { if err := recover(); err != nil { e.log.Error("Scheduler Panic: stopping alertingTicker", "error", err, "stack", log.Stack(1)) @@ -88,7 +88,7 @@ func (e *AlertingService) alertingTicker(grafanaCtx context.Context) error { } } -func (e *AlertingService) runJobDispatcher(grafanaCtx context.Context) error { +func (e *AlertEngine) runJobDispatcher(grafanaCtx context.Context) error { dispatcherGroup, alertCtx := errgroup.WithContext(grafanaCtx) for { @@ -105,7 +105,7 @@ var ( unfinishedWorkTimeout = time.Second * 5 ) -func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *Job) error { +func (e *AlertEngine) processJobWithRetry(grafanaCtx context.Context, job *Job) error { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) @@ -140,7 +140,7 @@ func (e *AlertingService) processJobWithRetry(grafanaCtx context.Context, job *J } } -func (e *AlertingService) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { +func (e *AlertEngine) endJob(err error, cancelChan chan context.CancelFunc, job *Job) error { job.Running = false close(cancelChan) for cancelFn := range cancelChan { @@ -149,7 +149,7 @@ func (e *AlertingService) endJob(err error, cancelChan chan context.CancelFunc, return err } -func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { +func (e *AlertEngine) processJob(attemptID int, attemptChan chan int, cancelChan chan context.CancelFunc, job *Job) { defer func() { if err := recover(); err != nil { e.log.Error("Alert Panic", "error", err, "stack", log.Stack(1)) @@ -180,8 +180,8 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel e.evalHandler.Eval(evalContext) - span.SetTag("alertId", evalContext.Rule.Id) - span.SetTag("dashboardId", evalContext.Rule.DashboardId) + span.SetTag("alertId", evalContext.Rule.ID) + span.SetTag("dashboardId", evalContext.Rule.DashboardID) span.SetTag("firing", evalContext.Firing) span.SetTag("nodatapoints", evalContext.NoDataFound) span.SetTag("attemptID", attemptID) @@ -194,7 +194,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel ) if attemptID < setting.AlertingMaxAttempts { span.Finish() - e.log.Debug("Job Execution attempt triggered retry", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) + e.log.Debug("Job Execution attempt triggered retry", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.ID, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) attemptChan <- (attemptID + 1) return } @@ -212,7 +212,7 @@ func (e *AlertingService) processJob(attemptID int, attemptChan chan int, cancel evalContext.Rule.State = evalContext.GetNewState() e.resultHandler.handle(evalContext) span.Finish() - e.log.Debug("Job Execution completed", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.Id, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) + e.log.Debug("Job Execution completed", "timeMs", evalContext.GetDurationMs(), "alertId", evalContext.Rule.ID, "name", evalContext.Rule.Name, "firing", evalContext.Firing, "attemptID", attemptID) close(attemptChan) }() } diff --git a/pkg/services/alerting/engine_integration_test.go b/pkg/services/alerting/engine_integration_test.go index 7d0d3360ad5a..6b6fab389d02 100644 --- a/pkg/services/alerting/engine_integration_test.go +++ b/pkg/services/alerting/engine_integration_test.go @@ -17,7 +17,7 @@ import ( func TestEngineTimeouts(t *testing.T) { Convey("Alerting engine timeout tests", t, func() { - engine := &AlertingService{} + engine := &AlertEngine{} engine.Init() setting.AlertingNotificationTimeout = 30 * time.Second setting.AlertingMaxAttempts = 3 diff --git a/pkg/services/alerting/engine_test.go b/pkg/services/alerting/engine_test.go index 4ed317d982f4..86980c21bd48 100644 --- a/pkg/services/alerting/engine_test.go +++ b/pkg/services/alerting/engine_test.go @@ -39,7 +39,7 @@ func (handler *FakeResultHandler) handle(evalContext *EvalContext) error { func TestEngineProcessJob(t *testing.T) { Convey("Alerting engine job processing", t, func() { - engine := &AlertingService{} + engine := &AlertEngine{} engine.Init() setting.AlertingEvaluationTimeout = 30 * time.Second setting.AlertingNotificationTimeout = 30 * time.Second diff --git a/pkg/services/alerting/eval_context.go b/pkg/services/alerting/eval_context.go index 480303fa6d71..8436e9c9a780 100644 --- a/pkg/services/alerting/eval_context.go +++ b/pkg/services/alerting/eval_context.go @@ -26,7 +26,7 @@ type EvalContext struct { dashboardRef *models.DashboardRef - ImagePublicUrl string + ImagePublicURL string ImageOnDiskPath string NoDataFound bool PrevAlertState models.AlertStateType @@ -102,7 +102,7 @@ func (c *EvalContext) GetDashboardUID() (*models.DashboardRef, error) { return c.dashboardRef, nil } - uidQuery := &models.GetDashboardRefByIdQuery{Id: c.Rule.DashboardId} + uidQuery := &models.GetDashboardRefByIdQuery{Id: c.Rule.DashboardID} if err := bus.Dispatch(uidQuery); err != nil { return nil, err } @@ -113,8 +113,8 @@ func (c *EvalContext) GetDashboardUID() (*models.DashboardRef, error) { const urlFormat = "%s?fullscreen&edit&tab=alert&panelId=%d&orgId=%d" -// GetRuleUrl returns the url to the dashboard containing the alert. -func (c *EvalContext) GetRuleUrl() (string, error) { +// GetRuleURL returns the url to the dashboard containing the alert. +func (c *EvalContext) GetRuleURL() (string, error) { if c.IsTestRun { return setting.AppUrl, nil } @@ -123,7 +123,7 @@ func (c *EvalContext) GetRuleUrl() (string, error) { if err != nil { return "", err } - return fmt.Sprintf(urlFormat, models.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelId, c.Rule.OrgId), nil + return fmt.Sprintf(urlFormat, models.GetFullDashboardUrl(ref.Uid, ref.Slug), c.Rule.PanelID, c.Rule.OrgID), nil } // GetNewState returns the new state from the alert rule evaluation. @@ -148,7 +148,7 @@ func (c *EvalContext) GetNewState() models.AlertStateType { func getNewStateInternal(c *EvalContext) models.AlertStateType { if c.Error != nil { c.log.Error("Alert Rule Result Error", - "ruleId", c.Rule.Id, + "ruleId", c.Rule.ID, "name", c.Rule.Name, "error", c.Error, "changing state to", c.Rule.ExecutionErrorState.ToAlertState()) @@ -165,7 +165,7 @@ func getNewStateInternal(c *EvalContext) models.AlertStateType { if c.NoDataFound { c.log.Info("Alert Rule returned no data", - "ruleId", c.Rule.Id, + "ruleId", c.Rule.ID, "name", c.Rule.Name, "changing state to", c.Rule.NoDataState.ToAlertState()) diff --git a/pkg/services/alerting/interfaces.go b/pkg/services/alerting/interfaces.go index be364d6f4cc2..93d3127d6bdd 100644 --- a/pkg/services/alerting/interfaces.go +++ b/pkg/services/alerting/interfaces.go @@ -25,7 +25,7 @@ type Notifier interface { // ShouldNotify checks this evaluation should send an alert notification ShouldNotify(ctx context.Context, evalContext *EvalContext, notificationState *models.AlertNotificationState) bool - GetNotifierUid() string + GetNotifierUID() string GetIsDefault() bool GetSendReminder() bool GetDisableResolveMessage() bool diff --git a/pkg/services/alerting/notifier.go b/pkg/services/alerting/notifier.go index de6d74239ae7..84a26e9a64a0 100644 --- a/pkg/services/alerting/notifier.go +++ b/pkg/services/alerting/notifier.go @@ -35,7 +35,7 @@ type notificationService struct { } func (n *notificationService) SendIfNeeded(context *EvalContext) error { - notifierStates, err := n.getNeededNotifiers(context.Rule.OrgId, context.Rule.Notifications, context) + notifierStates, err := n.getNeededNotifiers(context.Rule.OrgID, context.Rule.Notifications, context) if err != nil { return err } @@ -56,13 +56,13 @@ func (n *notificationService) SendIfNeeded(context *EvalContext) error { func (n *notificationService) sendAndMarkAsComplete(evalContext *EvalContext, notifierState *notifierState) error { notifier := notifierState.notifier - n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUid(), "isDefault", notifier.GetIsDefault()) + n.log.Debug("Sending notification", "type", notifier.GetType(), "uid", notifier.GetNotifierUID(), "isDefault", notifier.GetIsDefault()) metrics.M_Alerting_Notification_Sent.WithLabelValues(notifier.GetType()).Inc() err := notifier.Notify(evalContext) if err != nil { - n.log.Error("failed to send notification", "uid", notifier.GetNotifierUid(), "error", err) + n.log.Error("failed to send notification", "uid", notifier.GetNotifierUID(), "error", err) } if evalContext.IsTestRun { @@ -106,7 +106,7 @@ func (n *notificationService) sendNotifications(evalContext *EvalContext, notifi for _, notifierState := range notifierStates { err := n.sendNotification(evalContext, notifierState) if err != nil { - n.log.Error("failed to send notification", "uid", notifierState.notifier.GetNotifierUid(), "error", err) + n.log.Error("failed to send notification", "uid", notifierState.notifier.GetNotifierUID(), "error", err) } } @@ -123,7 +123,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { Width: 1000, Height: 500, Timeout: setting.AlertingEvaluationTimeout, - OrgId: context.Rule.OrgId, + OrgId: context.Rule.OrgID, OrgRole: models.ROLE_ADMIN, ConcurrentLimit: setting.AlertingRenderLimit, } @@ -133,7 +133,7 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { return err } - renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?orgId=%d&panelId=%d", ref.Uid, ref.Slug, context.Rule.OrgId, context.Rule.PanelId) + renderOpts.Path = fmt.Sprintf("d-solo/%s/%s?orgId=%d&panelId=%d", ref.Uid, ref.Slug, context.Rule.OrgID, context.Rule.PanelID) result, err := n.renderService.Render(context.Ctx, renderOpts) if err != nil { @@ -141,13 +141,13 @@ func (n *notificationService) uploadImage(context *EvalContext) (err error) { } context.ImageOnDiskPath = result.FilePath - context.ImagePublicUrl, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) + context.ImagePublicURL, err = uploader.Upload(context.Ctx, context.ImageOnDiskPath) if err != nil { return err } - if context.ImagePublicUrl != "" { - n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicUrl) + if context.ImagePublicURL != "" { + n.log.Info("uploaded screenshot of alert to external image store", "url", context.ImagePublicURL) } return nil @@ -170,8 +170,8 @@ func (n *notificationService) getNeededNotifiers(orgID int64, notificationUids [ query := &models.GetOrCreateNotificationStateQuery{ NotifierId: notification.Id, - AlertId: evalContext.Rule.Id, - OrgId: evalContext.Rule.OrgId, + AlertId: evalContext.Rule.ID, + OrgId: evalContext.Rule.OrgID, } err = bus.DispatchCtx(evalContext.Ctx, query) diff --git a/pkg/services/alerting/notifiers/alertmanager.go b/pkg/services/alerting/notifiers/alertmanager.go index bc2807d0d3cd..a8fd7db2f5ee 100644 --- a/pkg/services/alerting/notifiers/alertmanager.go +++ b/pkg/services/alerting/notifiers/alertmanager.go @@ -51,7 +51,7 @@ type AlertmanagerNotifier struct { // ShouldNotify returns true if the notifiers should be used depending on state func (am *AlertmanagerNotifier) ShouldNotify(ctx context.Context, evalContext *alerting.EvalContext, notificationState *models.AlertNotificationState) bool { - am.log.Debug("Should notify", "ruleId", evalContext.Rule.Id, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) + am.log.Debug("Should notify", "ruleId", evalContext.Rule.ID, "state", evalContext.Rule.State, "previousState", evalContext.PrevAlertState) // Do not notify when we become OK for the first time. if (evalContext.PrevAlertState == models.AlertStatePending) && (evalContext.Rule.State == models.AlertStateOK) { @@ -89,8 +89,8 @@ func (am *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, m if description != "" { alertJSON.SetPath([]string{"annotations", "description"}, description) } - if evalContext.ImagePublicUrl != "" { - alertJSON.SetPath([]string{"annotations", "image"}, evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + alertJSON.SetPath([]string{"annotations", "image"}, evalContext.ImagePublicURL) } // Labels (from metrics tags + mandatory alertname). @@ -111,9 +111,9 @@ func (am *AlertmanagerNotifier) createAlert(evalContext *alerting.EvalContext, m // Notify sends alert notifications to the alert manager func (am *AlertmanagerNotifier) Notify(evalContext *alerting.EvalContext) error { - am.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.Id, "notification", am.Name) + am.log.Info("Sending Alertmanager alert", "ruleId", evalContext.Rule.ID, "notification", am.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { am.log.Error("Failed get rule link", "error", err) return err diff --git a/pkg/services/alerting/notifiers/base.go b/pkg/services/alerting/notifiers/base.go index 3ebe23c2d1b3..f31c8b36d9c0 100644 --- a/pkg/services/alerting/notifiers/base.go +++ b/pkg/services/alerting/notifiers/base.go @@ -120,8 +120,8 @@ func (n *NotifierBase) NeedsImage() bool { return n.UploadImage } -// GetNotifierUid returns the notifier `uid`. -func (n *NotifierBase) GetNotifierUid() string { +// GetNotifierUID returns the notifier `uid`. +func (n *NotifierBase) GetNotifierUID() string { return n.UID } diff --git a/pkg/services/alerting/notifiers/dingding.go b/pkg/services/alerting/notifiers/dingding.go index a418adc7e651..fc8ce477ecb9 100644 --- a/pkg/services/alerting/notifiers/dingding.go +++ b/pkg/services/alerting/notifiers/dingding.go @@ -64,7 +64,7 @@ type DingDingNotifier struct { func (dd *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { dd.log.Info("Sending dingding") - messageURL, err := evalContext.GetRuleUrl() + messageURL, err := evalContext.GetRuleURL() if err != nil { dd.log.Error("Failed to get messageUrl", "error", err, "dingding", dd.Name) messageURL = "" @@ -82,7 +82,7 @@ func (dd *DingDingNotifier) Notify(evalContext *alerting.EvalContext) error { dd.log.Info("messageUrl:" + messageURL) message := evalContext.Rule.Message - picURL := evalContext.ImagePublicUrl + picURL := evalContext.ImagePublicURL title := evalContext.GetNotificationTitle() if message == "" { message = title diff --git a/pkg/services/alerting/notifiers/discord.go b/pkg/services/alerting/notifiers/discord.go index 160c76528dd9..e011ec0c3e93 100644 --- a/pkg/services/alerting/notifiers/discord.go +++ b/pkg/services/alerting/notifiers/discord.go @@ -21,7 +21,7 @@ func init() { Type: "discord", Name: "Discord", Description: "Sends notifications to Discord", - Factory: NewDiscordNotifier, + Factory: newDiscordNotifier, OptionsTemplate: `

Discord settings

@@ -43,7 +43,7 @@ func init() { }) } -func NewDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, error) { +func newDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, error) { content := model.Settings.Get("content").MustString() url := model.Settings.Get("url").MustString() if url == "" { @@ -58,6 +58,8 @@ func NewDiscordNotifier(model *models.AlertNotification) (alerting.Notifier, err }, nil } +// DiscordNotifier is responsible for sending alert +// notifications to discord. type DiscordNotifier struct { NotifierBase Content string @@ -65,20 +67,21 @@ type DiscordNotifier struct { log log.Logger } -func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { - this.log.Info("Sending alert notification to", "webhook_url", this.WebhookURL) +// Notify send an alert notification to Discord. +func (dn *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { + dn.log.Info("Sending alert notification to", "webhook_url", dn.WebhookURL) - ruleUrl, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { - this.log.Error("Failed get rule link", "error", err) + dn.log.Error("Failed get rule link", "error", err) return err } bodyJSON := simplejson.New() bodyJSON.Set("username", "Grafana") - if this.Content != "" { - bodyJSON.Set("content", this.Content) + if dn.Content != "" { + bodyJSON.Set("content", dn.Content) } fields := make([]map[string]interface{}, 0) @@ -103,7 +106,7 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { embed.Set("title", evalContext.GetNotificationTitle()) //Discord takes integer for color embed.Set("color", color) - embed.Set("url", ruleUrl) + embed.Set("url", ruleURL) embed.Set("description", evalContext.Rule.Message) embed.Set("type", "rich") embed.Set("fields", fields) @@ -112,9 +115,9 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { var image map[string]interface{} var embeddedImage = false - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { image = map[string]interface{}{ - "url": evalContext.ImagePublicUrl, + "url": evalContext.ImagePublicURL, } embed.Set("image", image) } else { @@ -130,7 +133,7 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { json, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: this.WebhookURL, + Url: dn.WebhookURL, HttpMethod: "POST", ContentType: "application/json", } @@ -138,22 +141,22 @@ func (this *DiscordNotifier) Notify(evalContext *alerting.EvalContext) error { if !embeddedImage { cmd.Body = string(json) } else { - err := this.embedImage(cmd, evalContext.ImageOnDiskPath, json) + err := dn.embedImage(cmd, evalContext.ImageOnDiskPath, json) if err != nil { - this.log.Error("failed to embed image", "error", err) + dn.log.Error("failed to embed image", "error", err) return err } } if err := bus.DispatchCtx(evalContext.Ctx, cmd); err != nil { - this.log.Error("Failed to send notification to Discord", "error", err) + dn.log.Error("Failed to send notification to Discord", "error", err) return err } return nil } -func (this *DiscordNotifier) embedImage(cmd *models.SendWebhookSync, imagePath string, existingJSONBody []byte) error { +func (dn *DiscordNotifier) embedImage(cmd *models.SendWebhookSync, imagePath string, existingJSONBody []byte) error { f, err := os.Open(imagePath) defer f.Close() if err != nil { diff --git a/pkg/services/alerting/notifiers/discord_test.go b/pkg/services/alerting/notifiers/discord_test.go index 5fe700245e23..d1cbff6b859a 100644 --- a/pkg/services/alerting/notifiers/discord_test.go +++ b/pkg/services/alerting/notifiers/discord_test.go @@ -22,7 +22,7 @@ func TestDiscordNotifier(t *testing.T) { Settings: settingsJSON, } - _, err := NewDiscordNotifier(model) + _, err := newDiscordNotifier(model) So(err, ShouldNotBeNil) }) @@ -40,7 +40,7 @@ func TestDiscordNotifier(t *testing.T) { Settings: settingsJSON, } - not, err := NewDiscordNotifier(model) + not, err := newDiscordNotifier(model) discordNotifier := not.(*DiscordNotifier) So(err, ShouldBeNil) diff --git a/pkg/services/alerting/notifiers/email.go b/pkg/services/alerting/notifiers/email.go index 44a6b97653ea..5d3422e608b5 100644 --- a/pkg/services/alerting/notifiers/email.go +++ b/pkg/services/alerting/notifiers/email.go @@ -67,7 +67,7 @@ func NewEmailNotifier(model *models.AlertNotification) (alerting.Notifier, error func (en *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { en.log.Info("Sending alert notification to", "addresses", en.Addresses) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { en.log.Error("Failed get rule link", "error", err) return err @@ -100,8 +100,8 @@ func (en *EmailNotifier) Notify(evalContext *alerting.EvalContext) error { }, } - if evalContext.ImagePublicUrl != "" { - cmd.Data["ImageLink"] = evalContext.ImagePublicUrl + if evalContext.ImagePublicURL != "" { + cmd.Data["ImageLink"] = evalContext.ImagePublicURL } else { file, err := os.Stat(evalContext.ImageOnDiskPath) if err == nil { diff --git a/pkg/services/alerting/notifiers/googlechat.go b/pkg/services/alerting/notifiers/googlechat.go index a7e452991b04..2d81787fb916 100644 --- a/pkg/services/alerting/notifiers/googlechat.go +++ b/pkg/services/alerting/notifiers/googlechat.go @@ -120,7 +120,7 @@ func (gcn *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error { "Content-Type": "application/json; charset=UTF-8", } - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { gcn.log.Error("evalContext returned an invalid rule URL") } @@ -152,10 +152,10 @@ func (gcn *GoogleChatNotifier) Notify(evalContext *alerting.EvalContext) error { widgets = append(widgets, fields) // if an image exists, add it as an image widget - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { widgets = append(widgets, imageWidget{ Image: image{ - ImageURL: evalContext.ImagePublicUrl, + ImageURL: evalContext.ImagePublicURL, }, }) } else { diff --git a/pkg/services/alerting/notifiers/hipchat.go b/pkg/services/alerting/notifiers/hipchat.go index e817fe9a076c..2e8be00576bb 100644 --- a/pkg/services/alerting/notifiers/hipchat.go +++ b/pkg/services/alerting/notifiers/hipchat.go @@ -81,9 +81,9 @@ type HipChatNotifier struct { // Notify sends an alert notification to HipChat func (hc *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { - hc.log.Info("Executing hipchat notification", "ruleId", evalContext.Rule.Id, "notification", hc.Name) + hc.log.Info("Executing hipchat notification", "ruleId", evalContext.Rule.ID, "notification", hc.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { hc.log.Error("Failed get rule link", "error", err) return err @@ -148,10 +148,10 @@ func (hc *HipChatNotifier) Notify(evalContext *alerting.EvalContext) error { "date": evalContext.EndTime.Unix(), "attributes": attributes, } - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { card["thumbnail"] = map[string]interface{}{ - "url": evalContext.ImagePublicUrl, - "url@2x": evalContext.ImagePublicUrl, + "url": evalContext.ImagePublicURL, + "url@2x": evalContext.ImagePublicURL, "width": 1193, "height": 564, } diff --git a/pkg/services/alerting/notifiers/kafka.go b/pkg/services/alerting/notifiers/kafka.go index 9761adf2f6ae..ed795453c42a 100644 --- a/pkg/services/alerting/notifiers/kafka.go +++ b/pkg/services/alerting/notifiers/kafka.go @@ -78,20 +78,20 @@ func (kn *KafkaNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("description", evalContext.Rule.Name+" - "+evalContext.Rule.Message) bodyJSON.Set("client", "Grafana") bodyJSON.Set("details", customData) - bodyJSON.Set("incident_key", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("incident_key", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { kn.log.Error("Failed get rule link", "error", err) return err } bodyJSON.Set("client_url", ruleURL) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { contexts := make([]interface{}, 1) imageJSON := simplejson.New() imageJSON.Set("type", "image") - imageJSON.Set("src", evalContext.ImagePublicUrl) + imageJSON.Set("src", evalContext.ImagePublicURL) contexts[0] = imageJSON bodyJSON.Set("contexts", contexts) } diff --git a/pkg/services/alerting/notifiers/line.go b/pkg/services/alerting/notifiers/line.go index 6b84ba8f091e..2048495b6465 100644 --- a/pkg/services/alerting/notifiers/line.go +++ b/pkg/services/alerting/notifiers/line.go @@ -56,7 +56,7 @@ type LineNotifier struct { // Notify send an alert notification to LINE func (ln *LineNotifier) Notify(evalContext *alerting.EvalContext) error { - ln.log.Info("Executing line notification", "ruleId", evalContext.Rule.Id, "notification", ln.Name) + ln.log.Info("Executing line notification", "ruleId", evalContext.Rule.ID, "notification", ln.Name) var err error switch evalContext.Rule.State { @@ -67,8 +67,8 @@ func (ln *LineNotifier) Notify(evalContext *alerting.EvalContext) error { } func (ln *LineNotifier) createAlert(evalContext *alerting.EvalContext) error { - ln.log.Info("Creating Line notify", "ruleId", evalContext.Rule.Id, "notification", ln.Name) - ruleURL, err := evalContext.GetRuleUrl() + ln.log.Info("Creating Line notify", "ruleId", evalContext.Rule.ID, "notification", ln.Name) + ruleURL, err := evalContext.GetRuleURL() if err != nil { ln.log.Error("Failed get rule link", "error", err) return err @@ -78,9 +78,9 @@ func (ln *LineNotifier) createAlert(evalContext *alerting.EvalContext) error { body := fmt.Sprintf("%s - %s\n%s", evalContext.Rule.Name, ruleURL, evalContext.Rule.Message) form.Add("message", body) - if evalContext.ImagePublicUrl != "" { - form.Add("imageThumbnail", evalContext.ImagePublicUrl) - form.Add("imageFullsize", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + form.Add("imageThumbnail", evalContext.ImagePublicURL) + form.Add("imageFullsize", evalContext.ImagePublicURL) } cmd := &models.SendWebhookSync{ diff --git a/pkg/services/alerting/notifiers/opsgenie.go b/pkg/services/alerting/notifiers/opsgenie.go index 3adcdbc74ade..833927dee9f5 100644 --- a/pkg/services/alerting/notifiers/opsgenie.go +++ b/pkg/services/alerting/notifiers/opsgenie.go @@ -90,9 +90,9 @@ func (on *OpsGenieNotifier) Notify(evalContext *alerting.EvalContext) error { } func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error { - on.log.Info("Creating OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", on.Name) + on.log.Info("Creating OpsGenie alert", "ruleId", evalContext.Rule.ID, "notification", on.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { on.log.Error("Failed get rule link", "error", err) return err @@ -106,13 +106,13 @@ func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error bodyJSON := simplejson.New() bodyJSON.Set("message", evalContext.Rule.Name) bodyJSON.Set("source", "Grafana") - bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("alias", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) bodyJSON.Set("description", fmt.Sprintf("%s - %s\n%s\n%s", evalContext.Rule.Name, ruleURL, evalContext.Rule.Message, customData)) details := simplejson.New() details.Set("url", ruleURL) - if evalContext.ImagePublicUrl != "" { - details.Set("image", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + details.Set("image", evalContext.ImagePublicURL) } bodyJSON.Set("details", details) @@ -136,14 +136,14 @@ func (on *OpsGenieNotifier) createAlert(evalContext *alerting.EvalContext) error } func (on *OpsGenieNotifier) closeAlert(evalContext *alerting.EvalContext) error { - on.log.Info("Closing OpsGenie alert", "ruleId", evalContext.Rule.Id, "notification", on.Name) + on.log.Info("Closing OpsGenie alert", "ruleId", evalContext.Rule.ID, "notification", on.Name) bodyJSON := simplejson.New() bodyJSON.Set("source", "Grafana") body, _ := bodyJSON.MarshalJSON() cmd := &models.SendWebhookSync{ - Url: fmt.Sprintf("%s/alertId-%d/close?identifierType=alias", on.APIUrl, evalContext.Rule.Id), + Url: fmt.Sprintf("%s/alertId-%d/close?identifierType=alias", on.APIUrl, evalContext.Rule.ID), Body: string(body), HttpMethod: "POST", HttpHeader: map[string]string{ diff --git a/pkg/services/alerting/notifiers/pagerduty.go b/pkg/services/alerting/notifiers/pagerduty.go index 99302c1af778..d771bfd1ad68 100644 --- a/pkg/services/alerting/notifiers/pagerduty.go +++ b/pkg/services/alerting/notifiers/pagerduty.go @@ -100,10 +100,10 @@ func (pn *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON := simplejson.New() bodyJSON.Set("routing_key", pn.Key) bodyJSON.Set("event_action", eventType) - bodyJSON.Set("dedup_key", "alertId-"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("dedup_key", "alertId-"+strconv.FormatInt(evalContext.Rule.ID, 10)) bodyJSON.Set("payload", payloadJSON) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { pn.log.Error("Failed get rule link", "error", err) return err @@ -116,10 +116,10 @@ func (pn *PagerdutyNotifier) Notify(evalContext *alerting.EvalContext) error { links[0] = linkJSON bodyJSON.Set("links", links) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { contexts := make([]interface{}, 1) imageJSON := simplejson.New() - imageJSON.Set("src", evalContext.ImagePublicUrl) + imageJSON.Set("src", evalContext.ImagePublicURL) contexts[0] = imageJSON bodyJSON.Set("images", contexts) } diff --git a/pkg/services/alerting/notifiers/pushover.go b/pkg/services/alerting/notifiers/pushover.go index 19de6ce08a23..5da1a457e679 100644 --- a/pkg/services/alerting/notifiers/pushover.go +++ b/pkg/services/alerting/notifiers/pushover.go @@ -146,7 +146,7 @@ type PushoverNotifier struct { // Notify sends a alert notification to Pushover func (pn *PushoverNotifier) Notify(evalContext *alerting.EvalContext) error { - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { pn.log.Error("Failed get rule link", "error", err) return err diff --git a/pkg/services/alerting/notifiers/sensu.go b/pkg/services/alerting/notifiers/sensu.go index 7650cb222d92..7f60178d10f5 100644 --- a/pkg/services/alerting/notifiers/sensu.go +++ b/pkg/services/alerting/notifiers/sensu.go @@ -79,7 +79,7 @@ func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { sn.log.Info("Sending sensu result") bodyJSON := simplejson.New() - bodyJSON.Set("ruleId", evalContext.Rule.Id) + bodyJSON.Set("ruleId", evalContext.Rule.ID) // Sensu alerts cannot have spaces in them bodyJSON.Set("name", strings.Replace(evalContext.Rule.Name, " ", "_", -1)) // Sensu alerts require a source. We set it to the user-specified value (optional), @@ -87,7 +87,7 @@ func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { if sn.Source != "" { bodyJSON.Set("source", sn.Source) } else { - bodyJSON.Set("source", "grafana_rule_"+strconv.FormatInt(evalContext.Rule.Id, 10)) + bodyJSON.Set("source", "grafana_rule_"+strconv.FormatInt(evalContext.Rule.ID, 10)) } // Finally, sensu expects an output // We set it to a default output @@ -106,13 +106,13 @@ func (sn *SensuNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("handler", sn.Handler) } - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { bodyJSON.Set("ruleUrl", ruleURL) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("imageUrl", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("imageUrl", evalContext.ImagePublicURL) } if evalContext.Rule.Message != "" { diff --git a/pkg/services/alerting/notifiers/slack.go b/pkg/services/alerting/notifiers/slack.go index f8cad904270f..b9a10c4d5d10 100644 --- a/pkg/services/alerting/notifiers/slack.go +++ b/pkg/services/alerting/notifiers/slack.go @@ -145,9 +145,9 @@ type SlackNotifier struct { // Notify send alert notification to Slack. func (sn *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { - sn.log.Info("Executing slack notification", "ruleId", evalContext.Rule.Id, "notification", sn.Name) + sn.log.Info("Executing slack notification", "ruleId", evalContext.Rule.ID, "notification", sn.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { sn.log.Error("Failed get rule link", "error", err) return err @@ -181,7 +181,7 @@ func (sn *SlackNotifier) Notify(evalContext *alerting.EvalContext) error { imageURL := "" // default to file.upload API method if a token is provided if sn.Token == "" { - imageURL = evalContext.ImagePublicUrl + imageURL = evalContext.ImagePublicURL } body := map[string]interface{}{ diff --git a/pkg/services/alerting/notifiers/teams.go b/pkg/services/alerting/notifiers/teams.go index 4a2cfa1ca911..4d0c47ddad23 100644 --- a/pkg/services/alerting/notifiers/teams.go +++ b/pkg/services/alerting/notifiers/teams.go @@ -50,9 +50,9 @@ type TeamsNotifier struct { // Notify send an alert notification to Microsoft teams. func (tn *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { - tn.log.Info("Executing teams notification", "ruleId", evalContext.Rule.Id, "notification", tn.Name) + tn.log.Info("Executing teams notification", "ruleId", evalContext.Rule.ID, "notification", tn.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { tn.log.Error("Failed get rule link", "error", err) return err @@ -83,9 +83,9 @@ func (tn *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { } images := make([]map[string]interface{}, 0) - if evalContext.ImagePublicUrl != "" { + if evalContext.ImagePublicURL != "" { images = append(images, map[string]interface{}{ - "image": evalContext.ImagePublicUrl, + "image": evalContext.ImagePublicURL, }) } @@ -122,7 +122,7 @@ func (tn *TeamsNotifier) Notify(evalContext *alerting.EvalContext) error { "name": "View Graph", "targets": []map[string]interface{}{ { - "os": "default", "uri": evalContext.ImagePublicUrl, + "os": "default", "uri": evalContext.ImagePublicURL, }, }, }, diff --git a/pkg/services/alerting/notifiers/telegram.go b/pkg/services/alerting/notifiers/telegram.go index 0c2e85579823..be354bc2733e 100644 --- a/pkg/services/alerting/notifiers/telegram.go +++ b/pkg/services/alerting/notifiers/telegram.go @@ -104,13 +104,13 @@ func (tn *TelegramNotifier) buildMessage(evalContext *alerting.EvalContext, send func (tn *TelegramNotifier) buildMessageLinkedImage(evalContext *alerting.EvalContext) *models.SendWebhookSync { message := fmt.Sprintf("%s\nState: %s\nMessage: %s\n", evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { message = message + fmt.Sprintf("URL: %s\n", ruleURL) } - if evalContext.ImagePublicUrl != "" { - message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + message = message + fmt.Sprintf("Image: %s\n", evalContext.ImagePublicURL) } metrics := generateMetricsMessage(evalContext) @@ -141,7 +141,7 @@ func (tn *TelegramNotifier) buildMessageInlineImage(evalContext *alerting.EvalCo return nil, err } - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { return nil, err } @@ -232,7 +232,7 @@ func appendIfPossible(message string, extra string, sizeLimit int) string { // Notify send an alert notification to Telegram. func (tn *TelegramNotifier) Notify(evalContext *alerting.EvalContext) error { var cmd *models.SendWebhookSync - if evalContext.ImagePublicUrl == "" && tn.UploadImage { + if evalContext.ImagePublicURL == "" && tn.UploadImage { cmd = tn.buildMessage(evalContext, true) } else { cmd = tn.buildMessage(evalContext, false) diff --git a/pkg/services/alerting/notifiers/threema.go b/pkg/services/alerting/notifiers/threema.go index 621a04a85d5f..560e8c12e80b 100644 --- a/pkg/services/alerting/notifiers/threema.go +++ b/pkg/services/alerting/notifiers/threema.go @@ -143,12 +143,12 @@ func (notifier *ThreemaNotifier) Notify(evalContext *alerting.EvalContext) error message := fmt.Sprintf("%s%s\n\n*State:* %s\n*Message:* %s\n", stateEmoji, evalContext.GetNotificationTitle(), evalContext.Rule.Name, evalContext.Rule.Message) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { message = message + fmt.Sprintf("*URL:* %s\n", ruleURL) } - if evalContext.ImagePublicUrl != "" { - message = message + fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + message = message + fmt.Sprintf("*Image:* %s\n", evalContext.ImagePublicURL) } data.Set("text", message) diff --git a/pkg/services/alerting/notifiers/victorops.go b/pkg/services/alerting/notifiers/victorops.go index c118d811b9c4..d19ea356547b 100644 --- a/pkg/services/alerting/notifiers/victorops.go +++ b/pkg/services/alerting/notifiers/victorops.go @@ -70,9 +70,9 @@ type VictoropsNotifier struct { // Notify sends notification to Victorops via POST to URL endpoint func (vn *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { - vn.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.Id, "notification", vn.Name) + vn.log.Info("Executing victorops notification", "ruleId", evalContext.Rule.ID, "notification", vn.Name) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err != nil { vn.log.Error("Failed get rule link", "error", err) return err @@ -116,8 +116,8 @@ func (vn *VictoropsNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON.Set("error_message", evalContext.Error.Error()) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("image_url", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("image_url", evalContext.ImagePublicURL) } data, _ := bodyJSON.MarshalJSON() diff --git a/pkg/services/alerting/notifiers/webhook.go b/pkg/services/alerting/notifiers/webhook.go index 3c10c50c5d03..f5ee99245127 100644 --- a/pkg/services/alerting/notifiers/webhook.go +++ b/pkg/services/alerting/notifiers/webhook.go @@ -76,18 +76,18 @@ func (wn *WebhookNotifier) Notify(evalContext *alerting.EvalContext) error { bodyJSON := simplejson.New() bodyJSON.Set("title", evalContext.GetNotificationTitle()) - bodyJSON.Set("ruleId", evalContext.Rule.Id) + bodyJSON.Set("ruleId", evalContext.Rule.ID) bodyJSON.Set("ruleName", evalContext.Rule.Name) bodyJSON.Set("state", evalContext.Rule.State) bodyJSON.Set("evalMatches", evalContext.EvalMatches) - ruleURL, err := evalContext.GetRuleUrl() + ruleURL, err := evalContext.GetRuleURL() if err == nil { bodyJSON.Set("ruleUrl", ruleURL) } - if evalContext.ImagePublicUrl != "" { - bodyJSON.Set("imageUrl", evalContext.ImagePublicUrl) + if evalContext.ImagePublicURL != "" { + bodyJSON.Set("imageUrl", evalContext.ImagePublicURL) } if evalContext.Rule.Message != "" { diff --git a/pkg/services/alerting/result_handler.go b/pkg/services/alerting/result_handler.go index 7141c6cec46a..814a3f8a21e2 100644 --- a/pkg/services/alerting/result_handler.go +++ b/pkg/services/alerting/result_handler.go @@ -46,11 +46,11 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error { metrics.M_Alerting_Result_State.WithLabelValues(string(evalContext.Rule.State)).Inc() if evalContext.shouldUpdateAlertState() { - handler.log.Info("New state change", "alertId", evalContext.Rule.Id, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState) + handler.log.Info("New state change", "alertId", evalContext.Rule.ID, "newState", evalContext.Rule.State, "prev state", evalContext.PrevAlertState) cmd := &models.SetAlertStateCommand{ - AlertId: evalContext.Rule.Id, - OrgId: evalContext.Rule.OrgId, + AlertId: evalContext.Rule.ID, + OrgId: evalContext.Rule.OrgID, State: evalContext.Rule.State, Error: executionError, EvalData: annotationData, @@ -81,10 +81,10 @@ func (handler *defaultResultHandler) handle(evalContext *EvalContext) error { // save annotation item := annotations.Item{ - OrgId: evalContext.Rule.OrgId, - DashboardId: evalContext.Rule.DashboardId, - PanelId: evalContext.Rule.PanelId, - AlertId: evalContext.Rule.Id, + OrgId: evalContext.Rule.OrgID, + DashboardId: evalContext.Rule.DashboardID, + PanelId: evalContext.Rule.PanelID, + AlertId: evalContext.Rule.ID, Text: "", NewState: string(evalContext.Rule.State), PrevState: string(evalContext.PrevAlertState), diff --git a/pkg/services/alerting/rule.go b/pkg/services/alerting/rule.go index 422148bc42f7..9a4065e279da 100644 --- a/pkg/services/alerting/rule.go +++ b/pkg/services/alerting/rule.go @@ -21,10 +21,10 @@ var ( // Rule is the in-memory version of an alert rule. type Rule struct { - Id int64 - OrgId int64 - DashboardId int64 - PanelId int64 + ID int64 + OrgID int64 + DashboardID int64 + PanelID int64 Frequency int64 Name string Message string @@ -44,23 +44,23 @@ type Rule struct { type ValidationError struct { Reason string Err error - Alertid int64 - DashboardId int64 - PanelId int64 + AlertID int64 + DashboardID int64 + PanelID int64 } func (e ValidationError) Error() string { extraInfo := e.Reason - if e.Alertid != 0 { - extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.Alertid) + if e.AlertID != 0 { + extraInfo = fmt.Sprintf("%s AlertId: %v", extraInfo, e.AlertID) } - if e.PanelId != 0 { - extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelId) + if e.PanelID != 0 { + extraInfo = fmt.Sprintf("%s PanelId: %v", extraInfo, e.PanelID) } - if e.DashboardId != 0 { - extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardId) + if e.DashboardID != 0 { + extraInfo = fmt.Sprintf("%s DashboardId: %v", extraInfo, e.DashboardID) } if e.Err != nil { @@ -113,10 +113,10 @@ func getTimeDurationStringToSeconds(str string) (int64, error) { // alert to an in-memory version. func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { model := &Rule{} - model.Id = ruleDef.Id - model.OrgId = ruleDef.OrgId - model.DashboardId = ruleDef.DashboardId - model.PanelId = ruleDef.PanelId + model.ID = ruleDef.Id + model.OrgID = ruleDef.OrgId + model.DashboardID = ruleDef.DashboardId + model.PanelID = ruleDef.PanelId model.Name = ruleDef.Name model.Message = ruleDef.Message model.State = ruleDef.State @@ -140,7 +140,7 @@ func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { } else { uid, err := jsonModel.Get("uid").String() if err != nil { - return nil, ValidationError{Reason: "Neither id nor uid is specified, " + err.Error(), DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Reason: "Neither id nor uid is specified, " + err.Error(), DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } model.Notifications = append(model.Notifications, uid) } @@ -151,11 +151,11 @@ func NewRuleFromDBAlert(ruleDef *models.Alert) (*Rule, error) { conditionType := conditionModel.Get("type").MustString() factory, exist := conditionFactories[conditionType] if !exist { - return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Reason: "Unknown alert condition: " + conditionType, DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } queryCondition, err := factory(conditionModel, index) if err != nil { - return nil, ValidationError{Err: err, DashboardId: model.DashboardId, Alertid: model.Id, PanelId: model.PanelId} + return nil, ValidationError{Err: err, DashboardID: model.DashboardID, AlertID: model.ID, PanelID: model.PanelID} } model.Conditions = append(model.Conditions, queryCondition) } diff --git a/pkg/services/alerting/scheduler.go b/pkg/services/alerting/scheduler.go index 62ef35298fcb..b01618f40955 100644 --- a/pkg/services/alerting/scheduler.go +++ b/pkg/services/alerting/scheduler.go @@ -27,8 +27,8 @@ func (s *schedulerImpl) Update(rules []*Rule) { for i, rule := range rules { var job *Job - if s.jobs[rule.Id] != nil { - job = s.jobs[rule.Id] + if s.jobs[rule.ID] != nil { + job = s.jobs[rule.ID] } else { job = &Job{ Running: false, @@ -42,7 +42,7 @@ func (s *schedulerImpl) Update(rules []*Rule) { if job.Offset == 0 { //zero offset causes division with 0 panics. job.Offset = 1 } - jobs[rule.Id] = job + jobs[rule.ID] = job } s.jobs = jobs @@ -73,6 +73,6 @@ func (s *schedulerImpl) Tick(tickTime time.Time, execQueue chan *Job) { } func (s *schedulerImpl) enqueue(job *Job, execQueue chan *Job) { - s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.Id) + s.log.Debug("Scheduler: Putting job on to exec queue", "name", job.Rule.Name, "id", job.Rule.ID) execQueue <- job } diff --git a/pkg/services/alerting/test_notification.go b/pkg/services/alerting/test_notification.go index 3651fffa68bf..311109ed6078 100644 --- a/pkg/services/alerting/test_notification.go +++ b/pkg/services/alerting/test_notification.go @@ -49,8 +49,8 @@ func handleNotificationTestCommand(cmd *NotificationTestCommand) error { func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { testRule := &Rule{ - DashboardId: 1, - PanelId: 1, + DashboardID: 1, + PanelID: 1, Name: "Test notification", Message: "Someone is testing the alert notification within grafana.", State: models.AlertStateAlerting, @@ -58,7 +58,7 @@ func createTestEvalContext(cmd *NotificationTestCommand) *EvalContext { ctx := NewEvalContext(context.Background(), testRule) if cmd.Settings.Get("uploadImage").MustBool(true) { - ctx.ImagePublicUrl = "https://grafana.com/assets/img/blog/mixed_styles.png" + ctx.ImagePublicURL = "https://grafana.com/assets/img/blog/mixed_styles.png" } ctx.IsTestRun = true ctx.Firing = true diff --git a/pkg/services/alerting/test_rule.go b/pkg/services/alerting/test_rule.go index 7a44845614b1..1575490ea324 100644 --- a/pkg/services/alerting/test_rule.go +++ b/pkg/services/alerting/test_rule.go @@ -13,8 +13,8 @@ import ( // of an alert rule. type AlertTestCommand struct { Dashboard *simplejson.Json - PanelId int64 - OrgId int64 + PanelID int64 + OrgID int64 User *models.SignedInUser Result *EvalContext @@ -28,14 +28,14 @@ func handleAlertTestCommand(cmd *AlertTestCommand) error { dash := models.NewDashboardFromJson(cmd.Dashboard) - extractor := NewDashAlertExtractor(dash, cmd.OrgId, cmd.User) + extractor := NewDashAlertExtractor(dash, cmd.OrgID, cmd.User) alerts, err := extractor.GetAlerts() if err != nil { return err } for _, alert := range alerts { - if alert.PanelId == cmd.PanelId { + if alert.PanelId == cmd.PanelID { rule, err := NewRuleFromDBAlert(alert) if err != nil { return err @@ -46,7 +46,7 @@ func handleAlertTestCommand(cmd *AlertTestCommand) error { } } - return fmt.Errorf("Could not find alert with panel id %d", cmd.PanelId) + return fmt.Errorf("Could not find alert with panel id %d", cmd.PanelID) } func testAlertRule(rule *Rule) *EvalContext { diff --git a/scripts/backend-lint.sh b/scripts/backend-lint.sh index 6e7305364fdf..09b035bff6ee 100755 --- a/scripts/backend-lint.sh +++ b/scripts/backend-lint.sh @@ -36,4 +36,5 @@ exit_if_fail golangci-lint run --deadline 10m --disable-all \ exit_if_fail go vet ./pkg/... exit_if_fail make revive +exit_if_fail make revive-alerting exit_if_fail make gosec From 76e5657b47a4a85dc39a93806bd4c0a21bdabf43 Mon Sep 17 00:00:00 2001 From: Johannes Schill Date: Mon, 3 Jun 2019 11:22:54 +0200 Subject: [PATCH 39/49] Chore: Bump axios to 0.19.0 (#17403) --- package.json | 2 +- yarn.lock | 25 ++++++++++++++++--------- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/package.json b/package.json index 962c443727e6..b4e4ee4f63ee 100644 --- a/package.json +++ b/package.json @@ -44,7 +44,7 @@ "@types/react-window": "1.7.0", "angular-mocks": "1.6.6", "autoprefixer": "9.5.0", - "axios": "0.18.0", + "axios": "0.19.0", "babel-core": "7.0.0-bridge.0", "babel-jest": "24.8.0", "babel-loader": "8.0.5", diff --git a/yarn.lock b/yarn.lock index 1936ceb817f0..57a84a7d860d 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3510,13 +3510,13 @@ aws4@^1.8.0: resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== -axios@0.18.0: - version "0.18.0" - resolved "https://registry.yarnpkg.com/axios/-/axios-0.18.0.tgz#32d53e4851efdc0a11993b6cd000789d70c05102" - integrity sha1-MtU+SFHv3AoRmTts0AB4nXDAUQI= +axios@0.19.0: + version "0.19.0" + resolved "https://registry.yarnpkg.com/axios/-/axios-0.19.0.tgz#8e09bff3d9122e133f7b8101c8fbdd00ed3d2ab8" + integrity sha512-1uvKqKQta3KBxIz14F2v06AEHZ/dIoeKfbTRkK1E5oqjDnuEerLmYTgJB5AiQZHJcljpg1TuRzdjDR06qNk0DQ== dependencies: - follow-redirects "^1.3.0" - is-buffer "^1.1.5" + follow-redirects "1.5.10" + is-buffer "^2.0.2" babel-code-frame@^6.22.0: version "6.26.0" @@ -6113,7 +6113,7 @@ debug@2.6.9, debug@^2.1.1, debug@^2.1.3, debug@^2.2.0, debug@^2.3.3, debug@^2.6. dependencies: ms "2.0.0" -debug@3.1.0: +debug@3.1.0, debug@=3.1.0: version "3.1.0" resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261" integrity sha512-OX8XqP7/1a9cqkxYw2yXss15f26NKWBpDXQd0/uK/KPqdQhxbPa994hnzjcE2VqQpDslf55723cKPUOGSmMY3g== @@ -7636,7 +7636,14 @@ focus-lock@^0.6.3: resolved "https://registry.yarnpkg.com/focus-lock/-/focus-lock-0.6.3.tgz#ef0e82ebac0023f841039d60bf329725d6438028" integrity sha512-EU6ePgEauhWrzJEN5RtG1d1ayrWXhEnfzTjnieHj+jG9tNHDEhKTAnCn1TN3gs9h6XWCDH6cpeX1VXY/lzLwZg== -follow-redirects@^1.0.0, follow-redirects@^1.3.0: +follow-redirects@1.5.10: + version "1.5.10" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.5.10.tgz#7b7a9f9aea2fdff36786a94ff643ed07f4ff5e2a" + integrity sha512-0V5l4Cizzvqt5D44aTXbFZz+FtyXV1vrDN6qrelxtfYQKW0KO0W2T/hkE8xvGa/540LkZlkaUjO4ailYTFtHVQ== + dependencies: + debug "=3.1.0" + +follow-redirects@^1.0.0: version "1.7.0" resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.7.0.tgz#489ebc198dc0e7f64167bd23b03c4c19b5784c76" integrity sha512-m/pZQy4Gj287eNy94nivy5wchN3Kp+Q5WgUPNy5lJSZ3sgkVKSYV/ZChMAQVIgx1SqfZ2zBZtPA2YlXIWxxJOQ== @@ -9252,7 +9259,7 @@ is-buffer@^1.0.2, is-buffer@^1.1.5: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== -is-buffer@^2.0.0: +is-buffer@^2.0.0, is-buffer@^2.0.2: version "2.0.3" resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.3.tgz#4ecf3fcf749cbd1e472689e109ac66261a25e725" integrity sha512-U15Q7MXTuZlrbymiz95PJpZxu8IlipAp4dtS3wOdgPXx3mqBnslrWU14kxfHB+Py/+2PVKSr37dMAgM2A4uArw== From 936308366e2c09d96c6c2a87aef906c43b6e9a08 Mon Sep 17 00:00:00 2001 From: Matthias Steffen Date: Mon, 3 Jun 2019 11:25:46 +0200 Subject: [PATCH 40/49] InfluxDB: Fixes single quotes are not escaped (#17398) Fixes #17397 --- .../datasource/influxdb/influx_query.ts | 2 +- .../influxdb/specs/influx_query.test.ts | 20 +++++++++++++++++++ 2 files changed, 21 insertions(+), 1 deletion(-) diff --git a/public/app/plugins/datasource/influxdb/influx_query.ts b/public/app/plugins/datasource/influxdb/influx_query.ts index 3ee9d703c54b..a655705ffcb6 100644 --- a/public/app/plugins/datasource/influxdb/influx_query.ts +++ b/public/app/plugins/datasource/influxdb/influx_query.ts @@ -146,7 +146,7 @@ export default class InfluxQuery { value = this.templateSrv.replace(value, this.scopedVars); } if (operator !== '>' && operator !== '<') { - value = "'" + value.replace(/\\/g, '\\\\') + "'"; + value = "'" + value.replace(/\\/g, '\\\\').replace(/\'/g, "\\'") + "'"; } } else if (interpolate) { value = this.templateSrv.replace(value, this.scopedVars, 'regex'); diff --git a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts index f8e65c21f2d2..ad76ea5309c5 100644 --- a/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts +++ b/public/app/plugins/datasource/influxdb/specs/influx_query.test.ts @@ -139,6 +139,26 @@ describe('InfluxQuery', () => { }); }); + describe('field name with single quote should be escaped and', () => { + it('should generate correct query', () => { + const query = new InfluxQuery( + { + measurement: 'cpu', + groupBy: [{ type: 'time', params: ['auto'] }], + tags: [{ key: 'name', value: "Let's encrypt." }, { key: 'hostname', value: 'server2', condition: 'OR' }], + }, + templateSrv, + {} + ); + + const queryText = query.render(); + expect(queryText).toBe( + 'SELECT mean("value") FROM "cpu" WHERE ("name" = \'Let\\\'s encrypt.\' OR "hostname" = \'server2\') AND ' + + '$timeFilter GROUP BY time($__interval)' + ); + }); + }); + describe('query with value condition', () => { it('should not quote value', () => { const query = new InfluxQuery( From 95012271ace105215f07df29cdb6015b93afd99b Mon Sep 17 00:00:00 2001 From: Dieter Plaetinck Date: Mon, 3 Jun 2019 11:32:35 +0200 Subject: [PATCH 41/49] Annotations: Improve annotation option tooltips (#17384) * should be 'Tags' not 'All' * fix poor wording in 'Match any' tooltip * add tooltip for 'Tags' input (untested! and can probably be worded better, i just don't know enough about the implementation) --- .../grafana/partials/annotations.editor.html | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/public/app/plugins/datasource/grafana/partials/annotations.editor.html b/public/app/plugins/datasource/grafana/partials/annotations.editor.html index e5a67d6a7dc7..c1164f7f8c74 100644 --- a/public/app/plugins/datasource/grafana/partials/annotations.editor.html +++ b/public/app/plugins/datasource/grafana/partials/annotations.editor.html @@ -7,7 +7,7 @@
  • Dashboard: This will fetch annotation and alert state changes for whole dashboard and show them only on the event's originating panel.
  • -
  • All: This will fetch any annotation events that match the tags filter.
  • +
  • Tags: This will fetch any annotation events that match the tags filter.
@@ -32,10 +32,19 @@ label-class="width-9" checked="ctrl.annotation.matchAny" on-change="ctrl.refresh()" - tooltip="By default Grafana will only show annotation that matches all tags in the query. Enabling this will make Grafana return any annotation with the tags you specify."> + tooltip="By default Grafana only shows annotations that match all tags in the query. Enabling this returns annotations that match any of the tags in the query.">
- Tags + + Tags + + A tag entered here as 'foo' will match +
    +
  • annotation tags 'foo'
  • +
  • annotation key-value tags formatted as 'foo:bar'
  • +
+
+
From 4fe6eaa0f7a64c6d6c6386c56dd155157ed79eac Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 3 Jun 2019 11:41:41 +0200 Subject: [PATCH 42/49] Explore: Handle datasources with long names better in ds picker (#17393) --- public/sass/pages/_explore.scss | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/public/sass/pages/_explore.scss b/public/sass/pages/_explore.scss index fba4a7333462..c06af5864c76 100644 --- a/public/sass/pages/_explore.scss +++ b/public/sass/pages/_explore.scss @@ -22,6 +22,11 @@ .ds-picker { min-width: 200px; max-width: 200px; + + .gf-form-select-box__img-value { + max-width: 150px; + overflow: hidden; + } } } From 20229a40ebb54ae5be3de10a0380e52b12ab435b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20H=C3=A4ggmark?= Date: Mon, 3 Jun 2019 13:39:36 +0200 Subject: [PATCH 43/49] Build: Adds e2e tests back to master workflow with better error messages and with artifacts (#17374) * Refactor: Adds better error message and removes chromium download progess * Test: Adds e2e tests to pr builds for testing purpose * Tests: Changes path to screenshots * Tests: Adds failing test just to test message and artifacts * Tests: Removes failing test --- .circleci/config.yml | 10 +++++ public/e2e-test/core/images.ts | 39 ++++++++++++++++-- public/e2e-test/install/install.ts | 4 +- .../theTruth/smoke-test-scenario.png | Bin 29750 -> 28317 bytes 4 files changed, 49 insertions(+), 4 deletions(-) diff --git a/.circleci/config.yml b/.circleci/config.yml index 42a4d1bb2b91..1d74e6e64316 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -91,6 +91,12 @@ jobs: name: run end-to-end tests command: 'env BASE_URL=http://127.0.0.1:3000 yarn e2e-tests' no_output_timeout: 5m + - store_artifacts: + path: public/e2e-test/screenShots/theTruth + destination: expected-screenshots + - store_artifacts: + path: public/e2e-test/screenShots/theOutput + destination: output-screenshots codespell: docker: @@ -662,6 +668,10 @@ workflows: - mysql-integration-test - postgres-integration-test filters: *filter-only-master + - end-to-end-test: + requires: + - grafana-docker-master + filters: *filter-only-master release: jobs: - build-all: diff --git a/public/e2e-test/core/images.ts b/public/e2e-test/core/images.ts index eb4ca3538d23..2897ba8aa2df 100644 --- a/public/e2e-test/core/images.ts +++ b/public/e2e-test/core/images.ts @@ -23,8 +23,21 @@ export const compareScreenShots = async (fileName: string) => return; } - expect(screenShotFromTest.width).toEqual(screenShotFromTruth.width); - expect(screenShotFromTest.height).toEqual(screenShotFromTruth.height); + if (screenShotFromTest.width !== screenShotFromTruth.width) { + throw new Error( + `The screenshot:[${fileName}] taken during the test has a width:[${ + screenShotFromTest.width + }] that differs from the expected: [${screenShotFromTruth.width}].` + ); + } + + if (screenShotFromTest.height !== screenShotFromTruth.height) { + throw new Error( + `The screenshot:[${fileName}] taken during the test has a width:[${ + screenShotFromTest.height + }] that differs from the expected: [${screenShotFromTruth.height}].` + ); + } const diff = new PNG({ width: screenShotFromTest.width, height: screenShotFromTruth.height }); const numDiffPixels = pixelmatch( @@ -36,7 +49,27 @@ export const compareScreenShots = async (fileName: string) => { threshold: 0.1 } ); - expect(numDiffPixels).toBe(0); + if (numDiffPixels !== 0) { + const localMessage = `\nCompare the output from expected:[${constants.screenShotsTruthDir}] with outcome:[${ + constants.screenShotsOutputDir + }]`; + const circleCIMessage = '\nCheck the Artifacts tab in the CircleCi build output for the actual screenshots.'; + const checkMessage = process.env.CIRCLE_SHA1 ? circleCIMessage : localMessage; + let msg = `\nThe screenshot:[${ + constants.screenShotsOutputDir + }/${fileName}.png] taken during the test differs by:[${numDiffPixels}] pixels from the expected.`; + msg += '\n'; + msg += checkMessage; + msg += '\n'; + msg += '\n If the difference between expected and outcome is NOT acceptable then do the following:'; + msg += '\n - Check the code for changes that causes this difference, fix that and retry.'; + msg += '\n'; + msg += '\n If the difference between expected and outcome is acceptable then do the following:'; + msg += '\n - Replace the expected image with the outcome and retry.'; + msg += '\n'; + throw new Error(msg); + } + resolve(); }; diff --git a/public/e2e-test/install/install.ts b/public/e2e-test/install/install.ts index fa71acfb7851..61bfca0bfca1 100644 --- a/public/e2e-test/install/install.ts +++ b/public/e2e-test/install/install.ts @@ -11,7 +11,9 @@ export const downloadBrowserIfNeeded = async (): Promise => { console.log('Did not find any local revisions for browser, downloading latest this might take a while.'); await browserFetcher.download(constants.chromiumRevision, (downloaded, total) => { - console.log(`Downloaded ${downloaded}bytes of ${total}bytes.`); + if (downloaded === total) { + console.log('Chromium successfully downloaded'); + } }); }; diff --git a/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png b/public/e2e-test/screenShots/theTruth/smoke-test-scenario.png index 8ea1294d4d6c1f03d12959ed0b0c80b587ff8ae9..8321635023004b56394ec42d8ebf0adf531bff71 100644 GIT binary patch literal 28317 zcmeFZXIPV4)HWDIUqO+bot zr39o)?r$tj7@u#ECn% z6&@iFR9FP!u*4rn!C$7^zuf{qDC{38-$Z0Jv(F$9+=x30Hy*o0&kvfrbZx{;typt& z|Dhmu_s`omjyyJ-h_`#_7o(E-t@mBQqqg+SD>)^Z9N9RZ!NrcgMX$1gh5IJiDwZV+ ze>fC4)i$51W|?!9&RiE`?|69X^QAxS9-cnIJ!QZ7?Tkj+?aTE<)$*;mjp|BLGwEQv zZf?_)h;deY^5lsQ5D9v4T^9JdgV#Nc)N=6~=0)4_j!@Lw#zaPS`<{D+CK zBm9Sn|6$^PnD`$i{>Kjgw@8?fJ57OT9u;&jygWvo=z5DT< zCs=MEbCDX1#QZ_Q41NrGSAyT_s(&WkBQ#s1=a$>Q1Kq9Si80;F#Y*$jQtOV z_jKL*wpEAvFr1nv4pyqn`J)p2U`Qw~ma-~c5zYDd@{WSS{*H;Yf6~2(nxQgx6duQ6 zyG6T*3lJ<`eo(QtV2$B@9~Fh#S{xKv8gdme9QUp2o=xe(lXjb(QH0u{9M8SYNLh0y zU8mlp3qWp5|;q~1DaXCZXLwe7Fh7c1Epzx0W zh;#B-T`0tyUHm$wyb_k}am{JIhmJSa#jvrgFlTVf^5siEp~002Q!A^*)U=8zEs;V7 zNUV_si9@Ww2?al>#>wT_Pc`?HxjWN*Vqp*;wp)|lCHioB@L82l2pLY7cqj$z6Fk z&YeU=;=C*^S0;oo{AT9ns4DLhK&|MW4{U5xVzt9)0$W=)rlQc}@2~$z_W1FWCp%Zq zAtbzKXKyH~g6Xlj-FTXdt8;>^!v?E+WZ zGS?UC!3;MpMob|aa!iLRbKxLGY@uFo6KkA?ndoSmPa1hBSs!u1e@iQvV33`WQQ-u=I#jdmwS{##4|*(RR))3h301bY2XEg5U$#<*UF zjY1Vcx6@!i>j@Z)^;#!R5DdRXGt$xJY;J!FCD!PLlFcA)3Z=k#R?`{)%Iw06pd{i9MofWEK5_+M(f#fIoKy) zjJz{9lw=&vI*QiTB-BnQmmO?3CP%vUe>^P++!;|!4~rY%(tYHbn(NILtu2QJHG@18 ze5}@LrM>M=N^OJ<3mDg`N0u7eEJ}^=166KS^+OZ2)g>#k>uD19j1d|WLmpFU+#T$f zaOcpPF8VmG!{JJsNoP^1a^g&sH7ZqWOc@0rs|(Q zalw>(ea`D^q5AH&_r7EdF%S+M9#aWHu~p0i?~~h0`H6DHZ25{|djsAvZunpZl}k&NV;uh2Ile7%)H%$q}Pjr2i;_Pz{b%6WbP^R zv`#6*({IVrqmprf2c4eM3jn+j*sMEHrsNB4_ofZJVRAC?33a#guDh&UI{&P7OvlU; zG?~K{SbV>ZYk@(E_8Q^iWlq+^^!K=)GDZvwm!DsPw|D{`b1aZFBAH{>=H0WioAr0u z)Gp(d9N-!fOJyeE^Rhr+SVPzVF&S>APomMyFINV#yTbR&BII%}H5a?weoQNyHogQU zvA40%4qEGHS!?kLUmK|1NjrI-To6lpOAT9wK8Q%mLrA)iUA@`$lY{8-(TnP2wa}72 zE7$fw_4n&wAR_53?8<+?@}jD4-Rh_nMPhQM`B!G&H?7PD=orq&1zvW8THdXGuH5Y3$5PMItGJk>u<$(I%y+n z-g?`r^_G~lse^G?6>OD2AkW`t_>ob{|+jKqJv2a7%^Mi zI-pavl$|~$&S0GZjEVPxRmCyAFVKved=iCf5a{=hT`kZ+=A7MKpJO!9_i!Vxc=9~F z&ya(Z^&qu~lR*=t&6{oNea0)LVIl{r&3@>V^(*9!-helH65eRDw)T3U^Y}{d1CfpJ zJBlq}NraRytaS>dilq!wy*)RJp=GefP^g}cGybQtfo}s_?LXQ;K-#FZm0I~oZ@A8N zBG@=M^OD;SE+gXVWKW*GLG;$psG=$qAhPBtnnqaOXK=t@HvR4fj(WBQcY#(`bN%f}+h?S2 z+Z(bCWBySW+4pL|;_?wdee!+-n{n0w??g>%zN8~h=b3mSt;!aj=l%>T68GDt)-i0nzrRHv{M@d{6MbO3?K%v>N5ck;m` z7gb}~*A~l!3M~82tW6FwMd*%gKfq>LbtK(FO@-^V<+|CosY?xru60+xG>KtPiE}q2 zTh=)hs>dD*mLGIu=rL=^3QpqQR`5c9A%2R~Fhy&So|T20oSgLaH67|<@9gYUY&smz z;kJ<$xWXkPzDVM#;B&j1va>-OQR`Fg;o4Y6NXfx0y#j?BcrA<{gBVei)$Ur+9u+c> z)^oXPg_g7&Y}@`ooMR*3C-(Hwc=J!@%uE3XlHF}B*2V|($*C2_kH{YETuEPvcz7p-mYEg>qR%W5ks=ar-*Dwl$17P78fi-Dd=~ znA_#-96d0b!}u>RHSvEQp~ks!ad}htx94dF7j27bR#)tA?I^19zq@9B?xOD6VvlA% zkG{uj7+DGUkv~j<5~P>*HSU+0nSr`MWHv>gWw>}v$0z&R_2!_y_kG)K$U%BQz}h5C ztbj{Nyf5}%$w?IhfMG(y+yB$2O9of}K;R$h$)*Ax#$d7i z+6%QE`7LL9XqCE>j?OcEw|Y-bR&J4K9n1tC;zMO-mWLlg;A((I5#TvBzy$P=$rc1_ zh3c%#5>zirNMSHAha68s-f%q8I081%7QGr&;gjKBEV;QLd1|Wbgb+EL7~tUSXTDx= z{gRtT^)4fhEzozn=~hckUA;*)zsX~A5W7&5$3?Ez=j5cewO3e+VVu-;UDrM)m_Qz4 zeFI7k68^=ixPY294W%#njIv4hdMlRXFK@TC+txgw_m72A5<5K3h5A+t7ekn*0BSII zG9ks`PLq&@f0v^Us<9*VAJOHZ`CO>gUjzo#;LvEDmPP}5FQw?yAjv|CKp?g`AZ#D` zBLY4Dr>WcWSBcLg`8M_9QiDNa)OMn(0fVgjjtkUA(0ipi4M`|GUfMoZ{bsNF-h*rg z*`)qR(dY@qmT8Ua6{S}Id#H_E4sC;l!sY^6jzH`;kIywj zAciBlvlI7K%08Rwt;?pV(p1>kz@ZqQ0i!XUnslzzQJPsU^H^Pj^|w~$D+7$SZB#7* z^i-Stu=3wtfwsVX4&YsF!dC(R*tP{(xf1^5p{r@r5HWgK0AXGH{DaaL4jJ3M0Xfe2 zA)qaH>B4#c6ilvEvWKAsdBw}nit^fQ4qg1Vt1P@Vvp{Eyd~>l@ILbZRT1je zAcuIH4^oNY%0QT6%Wac-ua?@>2JhPX6wQCe_`_o|1LQocWD1{o&i56UO!M>0%H{EA zt$TuH5Dye225ZFn^oW;0>BtsVamEIEB&&K20mILWHENULXh zdg@$bTql_zgU1mcFG3bHMm0X+K`rD?_6!y0P4gSSZ1>L0vFfcYa_O5+NCCs-pTavl zbF^zS_&MPy9-uAon2gM)MRL0_KgGR+(#c8kBb-f*KV&`Ez3jAZAMx3=c{Nhl(qTPQ zimZVUSm7U?zlHG_R#_Va0FK5a`+9h%Hv{nGxeIV6#X3M*;?Ac?iDfo)^y$U}&_0|Hve->CguSZs=7(7?z+Av0= zwD^U+1=>}oOu=XP}AGp6MLR0HP@wrc|d;CLQ{xz|V0-LfjwT7wR%eYZOx z_NKI@NBOk^ivdn1UCD%L{z{TD-xV%Es>yc&mQGwzUVX{O6NVu6e5 zktjf&4-tM5s&)R%S$S;FjrnM*n3ej;+sq$^J?GG6v5MknmXyC@gAaz0pwZDuQ;9%S zVy`A}BNtsh%M^H+IL|Uz{lqc#8?HwCsaY-RBTA*t)m#bt`J3$h&Q~3e7mm~jU8~`V)A?Hp{1cJ;Hx%8> zr&g6u1ifC$^c6497X^c+jqo!PMMTp9;)kzk5DVmMn0)YOVoB^)8vW5h@aLM~+EQuzIVMbU_ayak0>hTT?g%m8ZuQVQuET(24f!K-!^ z6ud_HXRD6l2f5xe1)1wSO0i)p-8M?xq5wiwJ`jck*S3V#(#n%{JX%=4LqFJ7iZ-}R z8vot_MQsQpq$l~V_l`>)MQ$@@>_d(+K<>5!@ss$VBD%1KyXvqGYDM9xUbc?h$Qehi zp$Oek>M5K}?1!%L)P@DBz%sEk!7?Q=rkfj9wW}bf{@zf%t~5AxCZ!pyh7EiH zd3A1cpfoQ&h)sO#;aW}y|9YYTQ?^o-Xp=3;WbvPA8sKSS`V0&hPPDk}iZgNaI?6P& zmz<*J{B)bVQawB*EDT<$|B*>Z>*i8n?OW-KO13+HkvC^c-W;Ev)@d)FlQ^$_T(Zj9 z94@r6z07*AJCBf%5Lw|2u)?ot$dvP4&#>Knhgta<@!qg=D*Pi^YC~A+n+w@j{DTav zacte$DJo}gMN7CyNjb3xl_14<$@9>_^QKt9?I%HSv?yOSn1BEFiiU zaiqT1B^b6Y7IL^c-8bpSQIAJ`dn$Gw&N5sb*vX=9+pi+#%+q(0{f!?MDMv#_pMJx< zAgP)A-u$qK#Oms*;pWeuWN`xFnQjMg>7D8?A3md59D`2YVvRHRbKPq3&8bNwN5N?* zP`C*Un)iKe)s+QFv+ktt88wowj>$naUqqGL5tm@=^ zX~My9u{6A~b!k{2wZ)yf0aLUqMpp{M?|58$-lic(qTliCBvLC@3d;DqVX#!pilTVM zw!oFP;?ZZb3`en+qyg6UGxz=P9#Ocu(fn=0L~dyXn`ud$|spGH{7GfhOp)9TJF1b)*@@aDcAYsc4)_bTS>VURPm!RctSJn<0N>o{$|MM&KKG_KCp0kp)*T!^s`ld$|^nIN`IWo)W^NzyC;G zW(>}}G|yn6-T?rEw2gl&R}W?|FW=)jvhe@5=AbYCG?P#(S+I3^xg znC~cUeSm*WJ2`se;OHf0J^Ps&>AE#;ihgN5%V5`$oPR}x^g&xq_~m0*rfGF(#rWY& zT|IiX*r8A*X+DuLLGSJk)0|IMkwNoiP`t5284Cd zcgAzn?uNVH|K@uyA_x1+z*Wd!T7?#iFdFPgn~N$A(EuWlD582$&W}ByY6LR^lB&{D z+-_hwaxaTzWpb@VJ*Be6na7YA5fupjhcJ=_ldLChVMd56g(RXiL3)_zm?|YmS0@nGjgP1lN2NC zPYl}no7|s2wTeuid!QD&3E8AMj9+h_z|d~S=g-1Y($b(NUAt|7+EJdr!ur~_`#1q$sVij{qZCMt82Tzy|O*DrSE$$La=5l z7ECj%o<6*rk)AH#9}tjORwnCz&!153M{m97iQcb&j=g`))Kq_9hVcg8uPl~FtcTfji%n;0(PzJGI;9VlZul)e?wsv4`GcM%*S#cHiJwy%s((pDP~J4tI@Cgxo-^cQL_ ztk4?l?eXyo`dw2+zWK~B()Cs-mNlZ;0M{&AYjD&Xq{iA$$Yr@_V8Yy6`ZsR8il8|p zPkWr2H%SFicN|Jt`0{Oe4?>GR@Oe+OAEkFhh`0nT(nK;)W=MN+G00~isZJdOGsL%K zKw;Z8e>;Z4bFmed*yg!uH(*Q3fc3si4a1V0$l4;m?ZS_Ii2*y1Xa>$$%T-AKSiJ0) zz(OEbQk7j!=U9O;n1!0t+h5_Jyuz>Z__V9nS^~RNeqLt(})R- zUpl!U)Ij{t7}(uw5ZFt!=_`tq%I1UpcmFUH|I4_}gWZHw%nSGGppFZzB>laTD{6b& z3CvO+w_)gV34mmBQBD@(!+ogoQP}Hh?%)q;yk}q*s4Q0~<_OgTr8N+Gm+}a38Zd0M zKzh0pBk3l$+2LIMcW@J#Q_($#(#g;(qZ1`R&0R8VtPX`o)!9OLI+C2>;-OZ0unr(R#I_6B z$?T|l8BUn;I2f193dT{?h7D_(whODBU>;k+*b;VY4|C0o{f-#&M9*Ph*1UU(w>B~% zG{;$8d6uD0i$fY$Q)3^28Y(eKhVm1)GHhgh?Ho6?4kW;X_3@@DXMA8ua&>iX4tb^< z@Jz~Eo(}pJq;Eq-9;0``RLAGy%>19IUcLk6aru+54^*^%TwM`C&-R#_OGHW?$*m2V z>n8Y+mL^@c^8(Fn5GEnZSH7lOHs5YS{n90eX6oH7N^;{NhQ;=SE z0UpQ;6slo{b~RuK3kv494}kqHga@9gA$uA?Qb|qjU!-(5j7h!V63FW1=vYl53$OIIRbhK2tA*UKX!mR4%>9>$ni2Ihx-*E#+7)o!7>WYuB8gt4$r z<359&Ut0dceU^qfgf(`ME?`K9C6PA}=AxAdW7unQm&?L(NI^Wd<#V9AVE){N>}5!o zn}W0X04Jn)?sMmA!vk@-uP?Oiua$CvFGIS=vgg}vB)AODnA9<6?{8hn0U8EojwX+Q3I!~?*p{9s zq57MsZ7#uKmSF^6y52GAGVXT)rgn9Pkf!QHc#f|EMj?u7aE?KH-?!hb>opy8;9K0A z-iH-x1XJMgNVrg7O^G|JzA)HB$5Tra@XlG4?4)NPhnGK@_UTH1C|s>}P`b#OiC29;;`#8QQ{JG8d-PcGdoPAdQ9H z=z2}h?)O+N!1ZCE$|ajbzUJplo9E;sc1u|>b#Cx39 z&s|P}m$f?|@ZF(TSB85zCbk{~e4jZ&j^{&=4wy$Yva$dcOA~2&Er{xXvF!~}0fdro zf!stG+f;0AraM!9TGZnR(L97Wg7-BQWR-j_A<0usSLEzPO-3>X-Gbuf8RAm8QRbg)!fIsn(}>35I-=H(!3Kp575{N3~bRBP)ETbsSM4*p8Zt}Yv4Zzh_6 zTxs4E7#qDH*L;S63;npESSiYB-V-hJz9mRuaU~oZTy$V*AoRylBy4t{sj1Bt#{!N`X-18kYMOd7?-sWi=YMYnM-2NCIS=~O`25!pI?T*RRy|TSWCJ0c-lgDT+5CRgOL)C)3cM*ER%wikA z0Ch8>VEK(yD<{pxZJtak%9KjZ(#r&(d+Q!lL2+i-f*PBq&6i&^KY zRNr+c;CokFaZFoE_4CKn9!ZM8=t6`zFhV=k$?-u4@Vxy)+4~D@E0rdT{x@#zcX9!= zi`drT2JHKtJKEJstl|q&Fs1^8cb$!pY$rpoOwM^2fQWVQG#^cCw2l1Ae>`Z@w|7NK?Hg5}rE_Vcsp=UX`pYUq9oZN%(gpI;tYEeHO z>8g2W-sPpiEW7cY0)$ zmV??JSem?%QGbcjHHGL!8Q?{A6v+KqHpr#ue>KQjso=S8C^6vz{V%i|?$JI4dY2qe z0u3_vgJS&W(+qc+(02;`KNQsW5u$wAxbPuNqb@xzG%1ZMv-wXw~J zv%6%iQv;D&gD(jP5F&=QhFm7XG|^*o!E&2z)ARFzsvi4qY}cpTT-I}x=@}2D1+t6F zOlCyVS);90fen2+k!4pFiFF(zpm_*;))Ew4yY<^E1VUcLR>?|@(7qx=Apf>sOZa8& z(=7+>-Guqxyp{PpgW#llWnQ#)P5zzN)GDCb5$iIOmaMh$7cu~NsF47)L9Y+Dd}_H! z^;|UuqzDo#xG@)ya7Pij;`;pga|b6UB?X14&}P2Nm+yxjJAV}!Pmmu+&#O|pu=QTHWUOH85Yc4FCmDaBSqv^xz60~2zo%J7 zpbn@^ln=t%+C0bXK)qyFy*TLp2;6n*Y7pS#TlS?xtjB-0c@&4P;NrsYoRcHY4EwgB zl<*O8yLb1&g*ZlVk1t3Fxe+Eulzr8x{?)_n{$&oXs0yg3TG?{#JjSMU3mmT_5899b z+_ApC)3Z|Vr6>?rV?k9GLeZ7sdhxFRy)mxx%`LS@x~I)!(H9+0+eDoJMX?z=?Z-B? z(U9K>BffyJY^;ZuA+uSrjQqU|z?cYqcleM9;dSFLx4tUB5+nN1|p4~t5 zq7fe_jy*$6zvBhzBt9-oDE3T5v)vQ-PyDzCwK6M(AWn-#e;HemPCR(W0m#izC{slTL$HSxnNy%4 zD}yQ?5Eei4i0oR>lmZb}%EK$Ig8#;WC&bb=-GOK7CofsY_LiwZPL1P4i`y8D*w0ib|_xm9H_d5qEi zk)yeu*6cXn(o|iI_pr)Ia0|Ea&PzvIK>SWG=tzTI4P1ZFl3yL|+yvTciQsnx;uM5@ zMV4kj!Cfi&W|%N?J>3-mg`4wMb-n<|NMC0rvc(h!O_-F&lLUc!>K;IXPxHJH zphk66hvsDG@PGv$3CZ;`gX9O-r1C1{iPAhh)Wl?<7QMV3Z=b5Nazjqhp4Sty+&Eb7 z$}#07UrUgIB7}5-u|w#bDxA2F`{Na6{%I>qk`0zx-*uA{RYvwPIdyh0O(0ndF z2NUaSpd*aX{$Oi)uhIS4i@+qSHacrhd+(F0zS_UWVdi;3H%DQsAyhL2@&q z?yk>M9MpsrA$`+^)p_(hJmxROr5KLA>s&Z{Jz%<0bsd8q`u6mD38s_Va4w>RweI$E zU|d9`toZy>{h7>;F?YVKa;%6pYI?1avaH7Z7xA6FoVMBnWkerJe4309q3| zBtaZ&8{44K2XVt~tO#jfJ!<>Z8w?w_gZFaaJkO1j(|U1hvzMWi;qU zIaLgCqAue6spRZ6X;@|Wq_(1O#~hJI0-TUS-e2;tEp&}NQK^uHBDdg<#8_tOv&cW zQCbf6PDe(yt0ltMGY9FY3LLl^+j-6bh6a;{Khw2so=ZW4mp)t)wN~4lRs|XH6Jg*lZKnQtELqkKrL`AXiEa{~?^k5z_ind0) zV%uz>$caa8M>E}Uh?w_?7ieTaLq(oaUPyxC1ZBey)j<~MWaHaHGoE7n@6ysztz{e6 z`vzL<59XYdpo6kIh=KFG@f7hUvB1T-Q{G;=Ob7(|E&zakqtBa}F)J~o{ijf3 zK~D&A`OiLKOnU?T#_Ih)4sgp!XVY(XdRBsdFX|+KBJO~JmCB4%EtxCOq2yq8eo8Y1 zcQp1Wc4!$;HUwo~$5%xWJCE`eCTvszk;-yqfwZ0&DQ55CF->yOd@BgLn!vwWc_8Ho zxg287?ZzLhe%Ri`xLwK8FAfJ0boqL*jlyM@)N|nYKH!0Q6nuQ~24VL%AbIg700yUz zPzoERT;Pi0N(pwt4?Um)`V^MGA1QYo*kEj|_a1P|3(f4ZaZ<;)Qve@{H}P6qTO)}% z9!3}yhw>gR_V)U0i{qN2&;Vaan%Pt9+_M z_4Ly65JzEjYdmHOqI(|%ERI#=A`J? zTTVrLx9rDOYs|@~tq5+=81*~iI6>EK4Ni=kK*!Hw_B=KNbPBPtCz@MYNbMbTh>#g2 z9DA4)#q>AO&{q5Ep(K`N3DHdOF`$`&35m}Wx50Vx1yE29@bL=;LdOkZ_do-m!YFfb zaV?D=oL~Qf?j4ZD#q{@l#0fuY(n0u zg1<<2HFy#|FTtT)ilW2PI3=%n%pa(^2MTa<&=S+#3AG!(iI%*NOeo-c9P1VjXMs|S z80nK4tG~D^=BzO;5sz?7&!fv^V~I!u|NTFouk;wmF*DvvPJuA{Xh`Xwe;%UpLBvzN ze*Fd%P1Xc2(w@J|!ct+)S3%W7h!9T*{{d!;fL=M+KW@`fS)G@{=u4I$33&`53;4FY z5Ht3y=oJqmCmK#E3E0Mm5QRn#qjg#jHR-CnY5i&)n`|wX(f~5CK-@|e3fk!-O9<~q z(JUi}%O2LoAP0lcKv?xoEDG4#n43h;6`yi0&%Oj*ZXi&p;K84Pn_l$?pFR6GrPayM zVm8ZD4f1%%ogT#Q{jHgMP`_%xw-h@57D2D3P7l~$%`zy9l{&%A1Bc45_zIsb_LsoS zK4Kr}dIzl>0FJZqu_vfd)jf18i^HX&-ZUKS;1m|*!;okgv-lhlh;jeEzP{}!A!4Cc z$s*ed7m0xJWfA=`v}@5ADY8;iv3*l!tI2WYwGdiPDVQ_nGsD&XNIxD_muI^nKEM*P zd_jLn8c!E-hnB~BIexnulNIoF>C)T|5*Wh{vi$Cy1^)?9^LvPJy04>hK9tKHOb3&; z|L;zDUpIn!2wd)g>LJ9m<`to(fc!Yjg<8g7zu^5na2z9@@)0uh)wj^x^-OZ4nw~;y zjBc?5zy9_RwVIgim`>ak?cDRR#FT7&Z=MOE*QA0IoRi+sI{wZa9G#E#l&gdAxDP;N z+POPhKL9H1A(~`-t!8BilP3d?i8lASz!GptaIAF6vw|VJd~;TZR01FPk|y+$W0a*5 z!7oo(jWW~-$_h^`wJkiKgYF07_U!kW0H__?v)>dBoh(yf_J)|nUGGB~32uJI$7Mc! z!W6(Hgsa%GHgM}Vum?#DefiHuae$%&q<1B#D$r)97{N%oPFs|&7Ft)w?rx@n3g2Ia!kZ&@-M*cLbS6Kzo4hI7&HQLvTnDYn z2_+@c$B!Q$8zz<@{iG*{>G}&TgIh2M)V7=Ry*dZ7XQQC|HlHSp)gE0t`;_fu&5$>4 zTF#x{@eIN!a|fjI#*!2TLSHywfRR#E@v8sE6SU5&xUh%_rm%*4iJuBCgIbB*l1)0n zGab`Pfi1skG2RN1w`1+KN`yWR&^6BiSWs9EVF5%^0f2n4eh{f~Dy$(zxn^vpO=FlY z^8|GZf1*2Nl>LAOKeWzkzI(w7A=MU!Y;uu!e>)=rFr@VeXiYrKlUsX>*=n0dk{A^+f+ zZpj!|1O@k-x5LucCXu>yO+pJnSB62cNfoaQndTj|{x<|onJA>2r~0f<8txIsKiIc* zwbChXIwUn8LbwTob{wkN|EH;kmC5}6=mD^=HB}LS>2yGio0Uh}Dl;rUA74G7h z5p_;Hc%1JM$2`OFBD%IOS(2cv2nCN{`CW8U#fmy!VPf@XWSNoMmriE|^IOW%l1_49 z`BwbJ`&R&0kHCioXeQd~-HVp96LbV0jUsOvQT7bs?F;pnKK11?zeh?y=b!~a9KbMG zwD4Cu!$wSz3@BV-rvLM6L)sfGJCqQAl|tj6+US+Q%DJgeUa7gBuHNU#v`K+2M2QfA{?LMc@7%wfxe{fB^V+dEaX5Yo+TJG|&T^RzHEpT6?`!J zE1vErvw=$*kg(BxKP6nZJ?6pbtB*5>DzNWz_>EgW)!&O(!I=YdRM(;}rH&M!GTP9I z(65lHfBfbLUK!lIjdW91n@?NvE!h1PQkDIzl4ym;-A}50MXDeua43kq9dp=-?sr|a zKNb`Uk_6%Obl~>;V?@H1-=*r%|8^0^M_@3R~%%r-N?iSRV_;Sqw zO8nFI(BC_0H9@zfAGKyLtv-cd`SIJjGuDZ0EDmyvV#Cm(RIgU60ZDaeh zJ{5?8RwZXtTt8#K0XSBqH2U2&k}K}VXfi+mg#Hm}YB}u-;t;ym3q6a5(gE`2*snJ@z#%tO%VGo7g6&b}1vTWb2Wo1I*+`u*;j zo@sX4FB`Sh&O`|52Lb45J9uFN3Jq+A@XM}kP*E#__K4v1OiMrNl>KG)an|4E)(DD{ zxp>L^UZR=yOohxse=D5{F7_Vqu#hn5M$i$U>{ppSi6B!Ip*P%S&&S98zVcE>V97;% zGypy+LsPJ21#-;}u6);cwkNthakU^$b-AqV$fnt2(V$oVFA59Om|=0n|I5Xbv$Xq22~%w>d7(@$Dxmrc?n{*4+MdkyxRI~ zgyDLW8eC(8`r*`^g27{}$Dq;@K4d;tbON-|{>8pRQHSjiC|B5<-6J?mfiIw#_T?$T zr2y=!(@wk?=sW8pa1PV)*;Kdtv~s@+aJ^mV80%LZv)1na0v8M!%Mq>irOR1Cr|LtI z4Y;KX?)s71|C&LezEwe|9$^3g*?~4yV-BywhfV*0*d!)RS59`aquEyg9HsQvo=iCc z4qyh}N-9DG2)8In7uA(tJwdGr8WV-Thn0Olkicb{6N73a^#DF zYu6wrgBp>O){A#zC`)hs7E{qtF9swfCp!T0h2Bd1pO_js(*%%QnlqX)Xzpo(E3v#o zvho4kGJ`ZmeRO0%1l$fSjNO#Y-BW;20Gu5{Ng|$h;sh&0C&;ROf8K#Ga~|9raXj66 z@3%E&mLJ53^M5jHFx-DD0+9g-tw9ong2IOF1Xm0fNb1lXS_rLuoSl+X4!QmAB$VwT z`)8q0whxb}ok?VZpn!eIRR&Tt(ZZrW64Ydzwgjk@^`ob10d^vHWd5l)X)i-`DVot( z#1c)$zaXEn?`eyGh9S;t$e7=}oGK6uu9V`zHS4kOGxtDdj{ChyNqK7`cLpqfMl)MC z7|;=@NQUb7tF7y^V^Fs6jG|Qr6tc*65E*wP`8U`fnZH(G{cM~LmZ7)d(D%$JG#nz z-OvSFImYf2Ht{DwSfsP8Ij_!35DzP~09H0mc zL7+o4ze1h%hMV^{7_ph;doL*n1wYUzw?lnfdVWX zVfjgFUo*tWl+9;sJv0wmbHuVqPm*V{++9gbuKVRz=wa0b>cDw#Y<=tlr3=VqSeO zsh6~diHNoahrN|Q4}<*p3Q<}r*yE-WxWpXr{auyFSNlsBomCV|7P5hzGESOP9UU7J zFkqc3U!CvS@ETlM7TVl&KY#lcFn<#G=vyl##de24wt%43+J280)z}BW8JDrqcj(|F z2t2|k`I6WNr=skMi-BgE`>#s@3oFZVp zNY)kx*L)c6M%hg{++RW`45KIzmHhdm#&d*gntIInX&<#Oy8u~_oUlLHS ztu|c8Z*M-xvB}_gL*KqqmklWiftW72(tjOqo>eT2-bkbx8|vR$cW2jP1bWwc4@KE2 zEXk%b$fR}~*OW7n+qBY}H4_;fiR&A>1#e#?TC-x~yG#9Cv!5QaNN96_Xj*}P#R$GP z=a>45oS7_SdBMgNZH@SV)Hqq`lyep@LI0drOqbI?Alpql16jf9jF}orZMmK(*B_Xe zi6U~I&|m<`CmehvPoDPh2)%$Nb&m1gWKXW8{d~VD{Tq7ql|3JGvIsGuc|TTG#iO}Q z42tCL@>Jz~ZpGDF2uzyCaLZug*YefO1x3URk?Inif1pu=l9m-5pw1B2DTc3 zOGVsD;!g~LMdshRboi)(Pm!NK!AO;;dqwHx$5Sb;*R6?-KcJ9{o{?_$&tHO>P9z__OdSodRoIK!L$2 zAGASmy^felJGDYNH zwW~}a_JP;bY;WdhX=(Wa;a$cff;S+q8DZd$?bTM)%{Wv1+{MMVVdjyuz-*#2PLWU_ zR+X>RVe&p8bi4kU$4A#M!@)P9kAz;f{5%XU_6n#v65atzCNmFTKSR6F$5w*=EN$ja zy8($k!eHK~(2Z8Q##l6TX#j1)1)pgC^Iq4+}GA zpk4l{iO`!2-Hrv3fwPmYU}d)iy{ikt~vk~{@DWA&|k7*Ng^I@!eM85TX7&f;9G9yN4MpRvo&`@UU& zN;g|JzjJ*oyBG49#Ig~d=JwbB1}F>V@>ix9MuK$hSVoSSjLVk8eH9s2pE>1SS&;QB zMc<~^@ChCrRpCQlVrEJx&0@~14NYzNk|5F___eQHlddp9-(117fJIpc51cUiFr;uO z204w6ReDuX<{0BAA%**#>>vR6i#%AMheK!N2!FH)`-It9|G~`UhR$Y|nwrp2zH9*n z;Q7?l3#N^niy7x*EvxrmshqcJypAeo3nZrWg#8PSUY(rS7FHc5rUmAJ2N4#&^)c!> zCWcazv>kh}+_@~5PAa86nm)KB=so%<2}G00>y^G&`E4>iPY2B0%Dbks&Y{irW+u;0 zo!)}J-+>HQ>DLey^FdK#2^^?4E1~Mx81t-OP+GL$AlhfiRT6vrUp-_bBXkFClq`D7VGORS=wWkrQc<*A z`T(0Pr0V^(L5YDuC%2Dldyn$%FWRO(@_m$ls_CnSm|3P3pVeg`=x%sB9(Q?t@1+Lx z>2fkOY1~3^mN=lG)C7hDPso{CoG}&J76xa2@m&5UP8xJf0#$0WRw8<=GY4PU0)jau zH@|QX`0m~}F}6h&d}R&{77O0oQzOIO^`0g0VX&#}$Nbgd3n-Ctl)a3cBz*~KD%54{ z136{c^!=ZLgq7b( zj9|lBj)SE7?XC`m$D*Q14Oqu^^GIip{T2j(Vkf(UVF*E}g42c7S-fo{IdL-Rl3P32%u?YCj*AAgpRcEsU)+r0H+v~ zV+Rb~B>V@nlR^#OetMnS{k~12C(v!drm@MP`9fBAaZXWJuYYmb=Uk-VnO~tf@|bEQ z9l8CAnT5qK%DVVM*1-tX*vPkAwhc{%>|1?#f?7;#RIDiuoWPG(`85O7B=Sy8n3+u2 zr2UkAQ&e)o(N$06X%J6aYCZ_3THVY%|Dv`*v*P~=txX!pp_*6##2+eM39=4s@!VXq zoo@Y5w9&OxVy=7#dgvhNL>m!zr08h>>;hr3RJ9JFKKX09VC^&6v(+0822N!A$yGivV7n;_kr&AD7gz8Oc; z08JpUoko@wfJ>Y!UyE6)tv$g;&>21m?kReX@|Z$a^e=@%gAiJ;yB=R+^r1Oad0I)? zU|edm42eMGq(ErM0C0!LA^rwmyYm0dch4A$*MHt4@D>s!3amUAatGJITbJHN zKw^Pf*LZ+2FRh0c>eA67_ivZ(_zQ$D#9CwerNbBW;SI?ky@6r@s@=|v2*7>b((qX6Sx8bguY5_F7z`Qd>u`<9roN(R4~b33KB<)4sB#P*8&$V zB`jdrkzW_^0d(66OR{ay7dV0M1IdTjj1p$mSsGBDj`n=@3fFUs`?azyZl3)GmMe1x z{r|@bE&#p9XAHU{;J=z&<&%j&zUJ4-PLC~%yi@sn?(x5p*MTQc{P^>HzrgN#;FQ+- z`oKNU_r8zp*;rqBw*684`(4`mejL>Y9?lmh1XlUMR6eriJ@Dwq$LsArU7Rs@Ztb=6 zwim1YOMtCDAn*enxiKwp{YP`9MFqz`uHXBp%lx>1{VmQ_<#!6xV@jV+?PmP(^7)s2 ze{O#(e@gFe*T&x|z5f4bc;1g!kG^luyDK$4zOJ(7k#N1NpddFl_xT;? zea-KRd@lZceC|UG|FbXSJNE0d*526PFAk>f-B$Pa*N!h+rysipJPyde?$u;L!Hap@ zlmGtuTKM2VWBT07zt2kVmy|Zo>oMQ+@axBY-}md^`*m&mk+0YPd%L)-iQKFPT)zDM z_`kou`>X#y@7nLZ_q)q?7Ley2TmcT=CrJK%@oRV7h2QtTJTpJ~cKiK*J)v8HL9?sk z@@&2S=={I;Hm}?N?NwcGDW+5dUi|LEWE_xu;&M1Vm+GHi5%Gmxh=Qf*7bvoUB0!fdf|O# zh6AgB$7DG0_sLmbf)*%cUv69ksabs}0azwATz&B2p&x^sj6{!V_MiWty;>1^Z@)Ro z*p`)iy#I{hKZzZbC~ z0h81PS56m}BeBmC&v5SHJMm(>?E*(0YvTC0P|rbPI0u6^wLwDn26-7#o-E2U0=*Sy*ayF)@BbI0)>$#Q0JD#J@lNs{;h$Umg6b z0|*iR^@D$P@UI{In;ZXS1^*)9zexE1jf5|?jv;~z)AYyQPNl6u<+W z&hJY~v~*^k$@_0yWd)j!CY?JA}mgx*qcXKnG(d4xP4h{U7=WdWeZ2)2HCeOG}8q4#tw4w4M~ckQQo z_}~BEpKhO00SklYZ8k!`QY$(F*2-nS7I`c6ZYbFU}&ffuKP10oQf zac?}{Soh|e;+%&{`zt)`SLTM~?AnvK4xcjT=};~{ySr4kbKkU3$!9*PzhtJ3jNwyo zKf85jH9|z|wvo{lXJ=<=Sy@7Fftk>mbf1M$)Lh8|w#IK=Q?p_9a1oYE%B~?>byHhd zMHpR8k&%hu+i&07H(Q>ccsX(hP3$g6Dw<1o^g?bqho}S zHYRD2H(+}$iiw$}nvA}G|30u(^3s=vD(|^)$$+UAR#xP_`rwVP0cxRm`g{4^uL89m zvodNy-zK|;hy4fD`lc*-z{bs5@WwZzqM~r#bAxdzenv+)F9}L;dj0IbRxC%zF~~^2 zeF49P(djS}wzFh4dgjDbeLq%X0V^ zqK4@uB{DHUQMIcDLwcNH`h9Y>)K!yeG|F6hB0Uaib7f`fi*W6BO%TQ ziPWyiDDEhMzH75w&>5$vr^|PiTFCEOB%+=?dD36vXzc%8EQpo1y^dWNT^%W39~Ef8 zb6D{TY%M;8#CD#d7*-$VxW?y7<18#-^_C8#8Vc&a3X(zIM9sOtjzaHSs-50!BmE= zEP`nEq^%h>kJ*`tiHY){E+&HuFG)nTEmXc)nW|+FZMFRM+Ml7Jl(9NGu1~_UT7JHq zh6xSjRB*Sw5wCb|RPJj4@?m!ldWQo`9Q2wwb0)oX?N49<@7u=4sjH*Hsp!k+^{U0& z`P^NGea6zswLWhg7e=bq^NWrFIkI*Ba+#`~0rU!v2p-SP6`eNj)7H_^0S-tQ2M*K` zL`{X9PuPEVUW`YMZ<}7Dt7~BJCiLs4C)%0~J$>eU##SDyxP>wh=XdVhF%60_v@nBT zEX0U{@|Sr=_%9Kufkf(*R2K2~{e1+DV=Cn5<<5HVqMLuIjFg(AQ~UR&{}>-OWk7!eo-i=k=bVLM(wm*vQZiQpA*&XldoL_eoZ(%DN8n78EQTA=r}~ zX1XYSMOOafOm!S}n{BR&D}z8UZj~3p`N6;H>!`)?59Hl#@0J)zY^+tF!YT1`+1lNx zB79C)hV}A9n}L}bPpqZy16~F9Z{>Xgik_EP_!T)Jo13}P>}JR6BzJc@>W-(DmZ&dt*bX z_W%StonKtfguL*5X(?fTq$(pl{mjFM53MKLle>Dor`3MV%gZBEx-x|Q1Klc4K5|A- zSh#sqEcYPvKzs&&5kvr#Rj2N|hme;k3x--W;J>TCKz91S?5Fg1ls$;(HgHsO9UWU2 zRT&b*>$f%=78+Vv8G>}(-O0(HWz~rlk5Zc7@P7aPebS0Ne8i-KdpHq;*K~DtcfJyL zFHN*3^FYWD9)3$x)BMr*msG00zcLi+4lXVj9+&FVrA`AUhRPP?zfze6b__nKL5T=7 zj$R^;Am+XSEba1Mamc@0av2!bXLjTL14ElnPmZG2M<9H@&n8Uh2EQS$@d+zSK)MHR zm8HxCNnmJb=s&6CLtt80Ll0B;Fdfj;(D;NavT}3?{f&t^-wI9v1V;;Tm7frl*@q9Gq^v5u zdGjV6_#{X=dNYv5-Kq(MIW2ueuwI97@Se2Cvp|;?e9X|p!9FcgPzXc+{dxS~pZ?G2 zKruH@RY_)RZNXv%a)GqNtdOP^Urh<{3HW)>V)vJ|O87$U+s+V}Hzl`5A7;i^aIl2S z)D3e=s#b^=Zj`Y_qyR#L@MMKbAa!A>Y?oW&X14`cSrd(&qcXIINPmi5Iav+8g>ndh z3$V8BwDv=CraiYcJ>@YGqe2hr#p8`JnBRo9ILaY8{$7`E*Im!4nswfCOZSsf?RvL` znDvLY;zK4-ht~5PV>@J-i@SJ5-R>V_jtbo;bQbckV>J(anTz&<;&wjh zSd)v3@VgewdRqz3l5&KUf;QB^?>Iije%E2Sem}}@$|p3Ybhh_m;OdK_f*@;~$nw|c zpJ4N*g71UZsT&JHVao2*{;>&X+ZE2NX7rv<+jvUYCoYz@udpy;?S8 zP0%LB{>lZ`b>4yV<5an?sZ4j%TOb^ZZZ~F*@as=X#g*&aUy_JL^p=4m8_#8gU*pQ-Peem-I zx^upxPdG+HV}uF}lqNrVt4KoYN?@Pidu^pVR=%-x*m{-B)9k+cQq4?&Y=@2fAe7hQ z9thT@EyKxLv2nPv3Fq0B@XnH)TV>QxLLXg*yfLln1niXc+l7@P-nEr-qr>mA^ZOns zeCF;L-u}gWIa{jPmk)Z62fVO(HtDy3bnp2P>QHK3_vt=An@@>d{cq*YTgnw`6hh$Q z$q&0ud|PfJuMS7Se5YgWMz$LyW0+P{0k^P3PN3r_EN_!Ryi@ zVXyX<%?1%t<*M5yHTtzl$+*F*49yP>Gi=X8-uylT7i)E#d5t$*PPW!`<%N&@+x2_- zTbobf%;YL4qF|kRJZ#6g*YAAp-=JT0R$l*7qNT5p+hU9O8;bZ@%S zcD$7=2Tm%<&S9PM#mLuejvD;bgLQw{>gdkMwM?4q^X+R)a`uy`Fh% zr|D?W2|ySD-3T|Sb$T=$xXp*!Sp1rkXU4fa(SdDl=Er$9eJ&?sTp{w^^?Z-~Q4ppW z=GtA&E~&vD!uKNLiW;YYja`xePmHsuFAF5c0U)VJ@mK}Phm;{ed%Q8~Q^%o=<&T2c zi>y=saZt%&ut6j;ssoS`4-b!FH}X;3{gxQ?;s=3~DqO*aam8-8#B1tQ(+eo)=3F^dVT3^~}S{;x{N6!Je7<_qVB+pn=Q+cORh+3Sg!mEwp4XX{qI0sc*6n;}g9=Xv2M77J%06yx>+$;EwA%KKOIuU`X)WGoQMm4Vz1?&J zp%KtZBa1Rz?=LwM8o488P}cZ#2q$6sksza9$`QQi13hK=cs$(f}})`brZ4uT3V zc}1B_h?)znK3s;qNP~`Kjrqzx z1Z4?Ud9GnLq0(!HR668>4Gjg%Cr&otuG8pyg(pV^0%yGd4~zf>-SZTpzy}0gkzS*b ze(nIVxDNe_BFZ=M(%3RAPgPE)SA2*|Np3-cVN34ez1)aqr)>-$s!NLlAIT+UJ=>2% zqDc3t1fC(Awge8FosGe`pq?FQuhv?B@BG}303CziLXfepQCkC6wK^zn`u_G>;)}}n zfQBiTulMxyIBQ`M!R>`vb`-8UpU$+OsU<~0G7lXch0a)i$!epvnbmO(mAPVz0_O4{ zJ{THmQs!dezba-Gk(HI@J#a*QyJE*M7N2OAV72)w7 zQus6ki%Ip1C>i8E^c^Sj+DwXQa`SVa!q6F)6b4ZmKMTK&EzVF*G)DZc;Yy@l_5)(A z3BA=3wJ9ul5r#vzy$~GM54G;}`(^Bg>7vkMU(>TauP3Gqolx;LYKV&$ihyOSj)GFi z;wcSDZr;aNSVE&(EX91UXRpob_Y&^+VZwGb-Y^bP9l}`XF?}BYP7AW;iIj2=Epwli z{B(P8NQ5ga`%ZDL0GGf_5bYtagW~O#7ZP6=Yd=wk_51KUJlUsIAojFj4@0$Pl9KPe zBLYeyfMIEL8CJgNvl?dI_U4N`Xl!*G#zaB*prN548;HgXmw~{t_3hQel!r%U$?Ce-j<&|?=NwRk!;xYpxjnjl5qG9xmu?^YE{4PE|6@Y zNhvh~015*&JslybG@Iy72n6FUKxY9+(ty=jS*%2|b6btDne{D1iVD=ehBH&AGsHyH z%Veh3iTH`hvMU}LW@=@z;N9FzPuH=BCHRcsLr`G6PBY_{PY*J8n_5IujT6ID{%n_0 zL!Y(_a{pzeaQY5op*MUGvjixENxYHty>eH7wBk%*6`nywG8p?wQ7chhH|taqyk~)- z?gEBqGx8;oyrgdd?-xvO8BHb{2aNgD(tkR*FVrIcz*%*{Q^ig9m}E~_TE}>j&gv#z z`z#yM8Z-LZu*zeoLh8$uu06lh^BYRZ+vQSaHDM}qy0NdL&xf3SE*lYLqlEnF?4<)| zxy~Bl4&ttdYSq)oYn9g?YF`!%=veaKeacT-+|o~4?p@dpskVmnywu!y@Ep>)>9s?@ zuZY+6b|v$m%Lk5#gqj|U&C~;s9bEk@W7!*h)#F3EAGPoaacF5&1g&AF_aT&bnrrmSLAMS<9an&~q|$RphoAR62F zXn9OZXpkityDTh`t=Ht}R~K|CYgkQ{U>vE(cvTDhs`q~)HLTZ~m(b)YX2a~`tZEw> zTPC#l`v#ZN1iheI7b*W2$M2BG3}z9m6^YWq97C zdy{v&xrRgL9Y=5hzbY#LLM^(Wg^vOrTflWp7X!pWO}7>28d8`fRg8h;7S9-xhr_SA&UA>m zAspL|<+F0Vn*57^LZ7cFW=`Scu|UdhH^g$IWnje{y&1T9a!7VwC;4XqrSduSi%1E~ z67zXR)a2iTqUN36qn_900oz7N4h}lWzo_-sKCn{W_^krz**(s96%~G!w`ge2=6Afo zb`LFAhoWtkT7>oL5N8Aq7aS0_FYoa1hdT4DXSJuLT2yqNboDPf_eKpT4|}k!H0;4L z&8Kuj_h>PZHfp7E4X=*(^JJ6l)(he+EK*=SU5tT!=(ZhMXW z=mmyTalr^%c)|Gl5q*M?g5^_gbB zJRI^omVA?zYqGXNV|cj&ybg8#PRy02{#U5-LK&|^rs9>SR8jgeC3o2 zxP2o zW?yQEu*$)%?`|=`urnJzTOE(oea##3auNxa3JHoK%I;G)w59v_u>f0BVZYppghGJg z^EXs{q2KjR3jJ9lBAxWA2{wP4J`9(EC6vhvWXeo-dIs$}*sdE>B-ikKD)%9!Z{gERq%uly7bTLXfIoxLT4mkELT_{IBdo4ItoU_?=pI6Z z*oGBmVd5^w-CNo9`u)pt%+`Z{Zd*HTGR*A8IDo2gaUjZR?7gJdf$D;SjvoA=z0qri zP_SiIayRvj8m1mkl0P=J^I}7-AnlZPP!U^mSDLoDZ`ullTon$DR z_`H5wUcOPBD})gIa+L3`J2A-T3O;fJOQ)(eUh-6lr*9+!go9AG#!ZfC@oDO=+YzqIX9MZ$bVu_F_jBI!{ajD)-X(;XPHk6zXI8w%8Qjgk5vf}>H61J#0YQLK03Ed*z zYrvj6?7*p4TKVb`oy7BFK2|ScV~NJQq!#KJy@-CQZc{Qy=t<0rhQ?{GQ;2AipNoCk%Esw}!Ox*SEmqP*Q& zq-|zq=0Cqo7geQ(>1%2ezl&VhsS?1KxsG9e|NVC#|70-JUDKARw@7@C_8gw)JuVbZ zXb7FMxiwjztiXYMINFHb$YoA1_+iogT|%Q#w=YUyT?2P}x<%dkSojp#0de4W zt?p%y9paS(LlJdvw|&xx72MXAg@LIlmza?_mg}UHb%qiGO(2#Sn8f=OLqd)}v(=P-wO zXb{bH*Ngq+NiERA0ONUj=vwg{&S^k{npn5V1H!Jmn>}bV)=J0xu-l|vkvLYzzNlf~ z69))26_1)_-)`pz;l$mhKl``LEv9*)_Zxf&WfgMC#vp=k7~fUgEwr)$l`)v&r`7qi zdR)794W3_H)%y3*52#I4r+3(sfzit6V0>XMU_308tLJ^4l9&WUT8?ZK+c1fOGsZY?&O;rCsPD71%3s?Ysi zAwzOA-Vv3xX+F(PF$4^U!*PTeXhQu79M3*ZbD{#d+$$!~Nd2yFPbH|ubc0b}u)@T8Gz1DEXEF$(XoKleX+kwI z8{ZiA2}a~#I7K8cy}4#^^LSU~SXam5mr~UqC@!387P|ATc!!`Yxrb}&iZK}4-PKUu z?TE>O_`6^StRsHNVsnYSv?FRQ|D%ctRicwF2 z?W$(jwuQ&Fb#-(){;&GwQ6rROsxzkb#W8@hh7Ogv01f3Y{aC|9KZS&@A4+himvi2} zEP|;Gyx?`nHhH;#gS@d@OdV&QggJ{~2b}%N?$p#?02Dwsd5`H#{V^=zE%j5nQ`a9b zkNCz4<>SS5mM z&2<@VwNEvk-&UWaW_su1YTk*%JV60YwqRb`V48{a-+3$lei;o^dabH|7PO?Ka2%E7$IKWPX6yZy)EiKs zRO&j&q3=Kaf$<&@=sn*ms3$XQR=z#!GM}N?Q;ub`yeHxf`EYJ?t&CNIj8)v)Y8rJz z>()9q5-hmc>|Zkcgb1ly5POlfS<$u9j05c8qSv8s$v@UF6CenNl}7LHMX0B6%Q@eU zOhqcP)Ov$6e=CSh$-|a(6kdf&Y;_hW0@PF90QOeuI-IJ=v@}@JtAldt$VD$|EW@rH zEd;qi^izH`eOA7_MyAkw7O=HP3*)6^gUh~dFVrrHGE@m`{E=O2_cN5$_j2@dPe!g` za9v5jms<29lLwUC;23XohYH6=sXW=+_|l?z;4V8a3Of3Tm46W+!4@bE1hKbkiY~MC zrG~b9U;>}q@Bn~UO{K-;>^3kQ#TE(cD6DHeDpYits7-Fs2I4M^_osY86>l507D~Vr zNIeFM1@)91sPK{lh1Z^ydwpdI=}XUYk-kd|Vq zCix!3+x*8ot3f_2?aMg>`L~x~&6~oSS9oLhiOHbqf$Q{jiU=tS(nLV-84jZkI}9tFUqGp@p(w;m886B9dba`G^ZJQ4%Ta)34rT3Bf46k%mNxaiHb~D$Bow8(5w1ZSYfqd z25NA@Y}nLkt@#~AzVD>-({|8-Iic{yU3Cy^9;&5g{MT+!K_4tG!)Sdp_pi2Pb&IN7 zgL;dXoQ(}{@*u*+m>}%Qwz(j^7*wHxrjH0}RBs6vX}1Y;Xd9pmV0E5=z4+7{6V-Et zUX~Gt%tJtKu5WnBZlYgMfELVJ7;9ogFGxf_4GeUd-N^&FTK!3dXO>WcBbK;?ZDmBn z8Cac{|1M{kT#N1#!$c&wIHmZLlAwwwcpvO|T6W6P3YA4(#9X-!fIOE?=K9u%z=!`S zXDIgYn@;>!^JA~=Z?P#w5i)=EBGlC&2B97U#~;-A*kc=($Jb}GUN#zeC)J<9KWT6_ zGEJ*~cx$*eKuk~iJy6WMt24W$Q0X(FA`4nQz{<1+2-D*W1=A;DIke}D*Rv(|yz5M< zQ#k%5W|+gKMH|M)GyxbNKNSovs)GoW^EwcX|BM)Na~^ID@*pr zPmaKBL|xz#q^SdMOzY%JgS;R=(5_j*0|@%7dAG6$iWl3smv!YL)3sgfWtFbr!0z3d zrS0;IgH-f}&0!c`+^{f!KE9D6r+NS@f)T3HgVqgyt?U)yl^47aqYD)ipop}E-1EpS zaKOLxB<$_t5*gxn!Fr?oPxyetLa>F89}DvTwR(4s3AOwrBlWl4WfHh=H$U(_R|;6A zTpRY5J?GmbqW=7R8dbilIn6HEVZKn!?`4)i>I$kp3tMj;Mxo2)(Kk5+*p>Ro9-+J?dBQWzWw4Y%hmgv z+7ralg|Y_F4F+D$?*}@yHq+^0NS8qhL-hSXTPx3r{6XR={!2}q=H|R0d-wEEp9O6^ zg9ep6RwFAtCN*|m)pVNvxx#&0Lk*@j*56+L+Vw62bZ^wFSIjN)y?{{=>PEna)z#wU zQP%zwQmGhF8^>~C6;MA&tXj7b&vQ(yS|aTjfXh`qWb=fhZk-w z{v<yQe z1AJN`LF*YCz%31K-dw~KNjdhms?P1(>RY^~VSW93zr#+du+7%WRIUdi@MBbO=sRJ8 z_SXcG{MhGsMdn~-_wo7<-3JlTtB_5A7I$#a`FEO914Y9lA~N2-y|lZ$5UAQ-njH1N z%eK>H{u{CU5gc_U0v54_!28fjYrXrY9#AwpeWc@I>9 zE`F`su76)qaqhf+JTEk{elDy}cz6F8-gS4vRZ~+FT#?)PZWVa(&iPJ#LJT&NdNLEY z8leQtu#y9sJJL12XZd$GK5lfyuD@;vL@JHr(3MC|l#`;vQ-2`-{>%8sByv_EG0$%+`b!1TR~(ffHdw%9>Mpxj{($-q~ty|gU`sfHUK zh?U=1$vW0spReKQcnu)NR>Nj*1j?U2fL?s;0RzXP4J??mKzZ>=;1wyD&)-TwrWhK@a2V3jFpG$Bvq89X z=i5Z1!iuj0Q-LbxB&?}d13llnx_SW%>Hvyryb3i0!Pq8-5d4dP8d$c%u>eju&cYsh zBoe=nd~$W{A*dgsyDWVRL8G1Fr0S!vM)`6~txgVs(zi3iqac>lpJGTIe>wq^B*wfD z9eY6_p4(W$GD50t5}4jLGW!R<_K+f3SNt1%5n+4$)Z!E6FqRjCY11y2LjCG-DjS46Wh zCMogU8N$b4S{CgA`|#tbW1}2$uEUYfqnv?Z+zC1zJ;cA;u#F1PZW~;O4zM=}30QHK zSS_ze*8lG!L@*noK;!m*kuFeTML*Y*OO03~$69w)n!;}G{sbcEqd$XblZZ&D!^i@~ zKDhSt+tr(qsS4-890TT3jR&C((abV49E_?L@vYjp?!6NKcD7Es+r3|5*UX1z3BhFs z#kChoUENot)Pn{X)dx&VgBkxj^;W5aHJH0Z+PEeGXo;4SNUG+2It@TKdx3HO9_ajw zh7WrsG0p%oC_WcUh&GF%q_|q!Ta10+5D+k=h2e4JnWL!f-U8s_k8;=Ywn@KbPiQt# zPQxc-i~x6gUu+ZcX@CBVT3Iy&zh4^kq5C=CF0x`T=8uPGfPypE^ETxX2*zCiSYxht zii*Mqb^X8$_gf8^`BA51*BUmaGRTkXXbvwfCd4U%cB7ypdn6~41*$BL2%fL9+3b8jh1)MjH<4ApUm65+^An(YRQXI>vX1TO7ZK(X%g7vb zYzc}^mbxVjVuUe7dz9zdezXwh^&MztsJczmiZ-yTG~3#IeV?t}Fz@8_-ZLyhUFBV7SBJK%m>^(E~yD9?^5N~Pz;nCc*x zVYw>lcK#Ypxynkz>^*L=m6eLX5!4CG;Qso`@4~^SeZ*;<)#cuscX@}sArzq5Ht$ez zZmX^Gr3;?jt`C+Dt0PQX*#NsL-UE3IH&6&;6bKjZ$D@7v@Ue#R7utNL>j+ zpE*^K_!3f38r*GbZNIaB{Njk5%N1H#0x45%knf8?67K@dN+1l%1optf!*IV`61|u6-4Juob{G1) zz9qKZ!x;E<^ee&*e%57UFhy}JO9$pMmw%+-bpqk~2|BsdU$%N@3?@UYazbF9^QYqe zVDU>RqX(^47Oaz4i|}MKZu0bZMOv&O=e18^r2=f(ve*Wb# zL)H?jAuzHF7Vkn8iUDsSq`lHISZ;~1S!^W9_jIQy9uBSp6+pb=9>nDorQkgPy%}00 zSRqDhK$(P>Q=>oh^cjh?+9-0g-u1fx`|nD)P#TC8pvnRJE%-%MH&2Hb*Xod+s5S;R8FDJOwA@z{-R-`Dw7AP=fV7q2wqn%!%GM^|V ztGZ|=gS=mBbsQ9$m!CslCvX6jgMupP4qr)7g_=alJYY%cEZjzSPaN*M-BAA_#GtGK zImlvlHF;mO9Aq99s3wu*3_k5AZmuMC;3ya8wxt6=R&@-Ind*g8`MIHDlSPE~wy3VI zz;j+F;KZoj@!ym?mo3AHOyE7eYQTdadodT9S93p|(Id=>gg#ZP6}+NoEp9B<1u17;1dS>XDvd(> zg|0-FCqos6#e=C05Z4`xqd`=++1<4d@?+DYVu;Hv(O>a6x3*SJRTqcZ;o;-LiGzm1 zdu&Vo=Hh_G!7y1dp`t4EB$g$NLC_GRU7+=E^C2M|w9YdCm?S9$iUKVg0eYq0F%0Ze zGtP3a@rH)>Q4BO@gXZCYchq-y7La{;18BF+tNevx0^MA0#3`Hy?W*H)h2GhEOpOdxTjhFBb!C`X}a zY6Y6AxrcbtD&^Dj9%OdV(FXN60^J9O#F0#Dil?Aiu-|Shb4FU4pq_wotm0uoGPEHB z*z2og(EVv9Z|&@Wl70i~bu(shQR26v&*~sSA8Ug$nHt&r(h%qfO!ErwHH5OZbM6i1 ze-VSxPe$y-_0&}=b6cshJ3o~Gy~<#}EHIEim0uWo@T3iZTn*k44@ICQ!UI>e=WrDM z?F$+1_e~=&@|>)y?Sbg?mL}kIxO>1XVB!PvXG8qNG5~aMG>aiI7 z)Px7fUTI@D5~L(f<)^SZU*rHFrC~l({Y>$oAQvbC*S3C^K1XRO-BW3|Zn`T6dq=x26?aVIQdi@1XJ zG%5-643TVY%w8Cea0qM^+(F$NqUMZ!j#E4gob~2^Cn2TXDt(pJ+6@X(mD_4}SyKB; zqANjLZR;4B6ml5(>&HJfx{LfT$kKZz`fD70lxfehGxGApxVgFKe^vPQB>m?QYKp#O zM1fU4RAw|m)lPz?3UqHLvL`iA#AUP`5cc6?N4~`tN)W#?sp|vw)p^)Q3PFFoUL!f! zo)jkO|23lPZQW@jqoPPKQzUlh8lE|HMs>ZS1AIYi*qS6G*4&&0RqE(tEvMy_x3ICG zRw~of-iC}3ZNA+Vfum9*kgi9}(F7AV654jRt|w z=T06g3SC-SLNC|<27O@4tK>afu%Hw;YZA#VbK%Z;+x&9^No*s=Rpf7p_8^$lC(8N2 z#TRC>AH6_!K2c6KWy@Fw`Dk>b`wk!|2i%xJG~n9X+eJi0!#Z}R?MXEY793;SqwD1D zvF$+RmCnZow>Ny$!d=H|`S$PMzp$ZLY}Yo6HE;asu{KNO!DVeiLr4axVTOa$J%1P( zu(bkYkA61jV-eA=F=^A}H%a&$pmsB;aDSMj;w#Eh?JKTVmZX!s zA7Pq1*BOGQMkv`d)5tcfYF4L&*UIqi7o)lY=IkIMnwI)s6xb&)b&#WWBS_oSbU@$V z`3pgpY9F*VNgb-Ax!O!U)&XCSgYSBG=Ui#t=C%Htg?CSz-FPD&1?t!otu1Fh^4f@(}c5skdBeo#gq>4 zqt(=upO2%E@QSYhx(MTS3KT2gCQJl$#`UNy2Vc+$;tf^;ZQZ{RcFP^{#Q!F^Y&3B@ zRtwZY4u#H96X4+ikTAi+FF&S#G%3$I1J%6AiHQ)Y1{gFd^CQYCNqKC(0MkU}GiMTf zN(s+=1*DnOA8Kf`gza~<(`?E)ZBUkp)Z=r|Pu|z*_+$2~k+b%{26!4V>E(SCx-ZvZ zpo@FbOcG>CL|`+eNeB%2xd8Pv>UMUCJS=Q+ZLh8BkNc*)luY3)2KOu}IV6W%=cva+ z`VHWtW#BCcl{oog>4~D;&}E{e)<_HiBCK8~>XqQk1V{OSduTt6E9~0eo0oFT6=Vik zEJ9-jdmWTKRl%{{n43BD!}sTjk#1TzLN^zN%KUBj+4BFmd#dNyFb>=VeH3|7fU9jt zUvnALTORn`cUl}YSaSSdi$jn622J*p99O!(UhYg1T z6wrARm=795$EMvIk4`r!w=)81gv%?DhtzNR>2Nf3d(juNxaB3pbF7_;`Ke-Ky(qIX z7j2!nF!^`ew$o=3g3o@<)BveG9_ip_08R{MWE_C_J!*zH$6qX~i6i4YOGNYZJLq&Q zbIPn2mGm0GEkKC=a;3j_vW2KLU{0r?otI3re@D=uhRi>Ydwt%agJ;}-C-d_hM?rfCw2UH}fwxSn z`|%E{?lR*1uXHXE=#~hu{C~eeU^W6c$q2R_#})w+)6t zeh--uyIS9EpPzgwwjX(e3;x|gUKiB3sFNa)MCq{`KMXS;2@OPaf~I^?4h*|II1?S_FV`1~@rzl;Apha8U(!50r3Q zgr=OKVQ2&c3axk1KYH1N>x3hlbj+hdi3ql%_8E_~9p4MoKq)>=TPjvR3L390tS69H z>p-KAx$*Lk8q12i9OU0>Y03{d8$cqIo1f!w^=LA9c-$g!y&*Ek9C^+=Farr;wHFIK zApX0CihFV3C?99Baf}vF8|tgA_WN0fC`0B!$U}!qKK`tQbMw-I*d|1$y8869_4Zy) z`cdwWpj?I;RuJ4x46>mdw87Oiv3jPgXvX@Fht7w*3tzMFpt+IF;n&uILmzHxv~3QCmK5%5879=1N4Eie(cjY+}isAU3A& z`N5CMG%B{&F9J? z6cx0OjZ6?IwSiJzCpwvb6u!(5=j09!kNy4kJzdoqGT@d6C~FbX>lgSF zctNWm@=3u{(rTS*v?6mk@y7@0V~ePB2Afw+A}LjR7Cair=QJ#2Fg$ zdyP3nf+7;wY96|C^f!cNftyHBo__L@B@84xc@w`gE?!DT#}R_hPo6p&3i?Te01qNm zFM|Ol$O_{UT)~?`Lsetr2~JIlk3!vi`0DLUc~s(hhh^pLxpW!xXR;xJ_}eOeSsF=v*Wtp zv*0vY5aZK=2rCnbDKj=2Qw?q{{0>stB>d}#Tdg-b>{L#H@4C9)=qQo-%3Q-XpD*L= z*LOMT_^^EKy`zHQ*}wL8+e)~bknWl$Tsl7+m5aL`c_fNJVGDc<=6LB|W=6zP3p49# z&ogVYqXI9R;=#iJFPYSx!53HEKO^+|v}!E6)H-=;7R&`cVP9C=>WgM!?!4naj0j#% zn;cxl1@y&m4k&SBRXqhyP0-?w#3=o+wQuQgIdaQqjqvf~FUm@{!yl_{YV`_%zrpdW zHc^%Ub|&Fm)!4Rc={I`Gh`K*O-Yo%NQr%<*pRiiY3}(36>#r=#Mm@&vLP|r*K3o7_ zR!0HPsiZ^7W=afyWZT^Ipe;jY~&;EKbmr{4xsiWT5AE8Xd%2 zJM1BawcOOHRVZB;Ji*~QjZvvO*6=DoyO{qVdjEffUTij`G;57vQYbu>D2^vsV&hFw zOMh?W85{y(a(b|j-Fka2F{7mOgdcrZy&CN%KJzRX`XCkhcpPZVQbD3-I-MfW&+aID z$_DKwedP%Y(P`7R*Jcmm?Jorb{U?X!`%m;V#rGF)t0$pRiZjo~e+OS&|E+P$(FC)^ zboxXJWeg{(SA84v?A5JEel!x={%T{Uu?6ptk25AiUjPeXv6tz}Moqrr2*iofsgYRxno?nCHh-XM;8I;`voCM?_>WJ=*7EUW z1lhYLB_v{jpDEM(p8=0RjPEV>N|P(Bm7&qA>QYbH;5@C+ZoE1ZN6u(Lf`iP<*6IP51Qn( z$JF(mMh4i}v|FGE71FP`5B>^}*cidt(Pn7QnDB~-Z5SP4CUrM$=4=0cQXfF_;J$l^ zoX3crH8Vf_G^Dte!V38(0;2fFafcVe7fiAHL3mfS$vK72Rh2e$&=;;MuO(*}^D`Ss z{~-L(>(lA3+WJm>l&FDe^T;DHq|Vw5Q+@F4B)+H2oZon~n+-a*^6KKUDV9m>&mROY zF!g$X-11)29Mk3D(BGaH#W!^2LZ}qHT$r0F3Ps6GkvW^6z@R_}ZRKjO(1-9d|FKx0 zheX}(XCQb~jK3j-DUJ6;XPzMt_e@3dA4WKrxnh1na`*`-9@kD)NLju5_RJ4%Hq(cz zya;x_LT`^kR?>mB#ql@|Hu%gfpWn1pT5lJMk!-yr*5*}@leLFMnFG20b7(hbo>83a z7PD`LqlMp*V;I8XJyE^3_WMwL=l?XbEfMZqB}>FTCQ}>K1R~?721KyNz!_rr{_~2( zf$xK6>A=rd=K48pG7OQIfzdj6Tc9X0!TA_ei!u#@gnvN z>c0Q^EWOn8s(fL>cC+Wsd;I6!4T$PZ6`u53H&JtMuv2I zwQn3F69R=0NV2GnNy&yqp$w!^LL+3&dt9QP7w6Bph^5r7da5qrhpo@f-;Hb#&hs$f zPYqjYXeC*aGR^c-RdtnBjG`(A3{C6UjsBB8bD76e9Q(BC?J1PI_$G$Xm50hY@gZ-6l#ue<5eGFfeuL4@x5J1jv#Ex(COuQGJ}fx3qtLZ$rA0_9*$q) zukO6Lsf=R}k9E;*3ju!+gaIHL*Y}`lFCDFC+7hitCg#z0Z`p+&XXfa&!StZsF$u`# zt;p^(k>1(E?VMcE$gv!e9j4?F@$*K0b{L1NiV8S&$Ro7_y1)G+<4gWvoKCMd?nPaLcz=aM`cW6>&lKKhZ&>LkxsaM)*ECFq|L6F} zgCiV=7~F#rHW}nC4Uz(?8srTV(7e9(;Gs)oDe%dQ*~>cHVII#GOe(*7`baGC0h}zdLgCXfs&{?7|Q$;|LP$CdP zg%D{VfyC-K5D_BvML|Pdj#8_FQRHDD(LzXRmG~kNgg_Hs5<|E=LLMZUeZyM)-{o@6 zANH@Co2+}zJ?A^$+57BszVuTj6D1O9cY&FUb4ge?H|iQ8n~6tWgsTuY7WYrb73q)? z8@?MGjv>DguQfRYNF-$sQP1W-d7nt%O&qrOV0sY>wY{-~(3~JA+{27we9gA~-nge| zi(dxrOxd$GPH+iW=Ve$0cXvnFJ4m+9P|n4z7^`B+nzFMnGU4Fe#n;T_CrtFH%|-5w zfg4oKzB7-HNTS|-Iud(Y0+f?ky)6-?t`3giQXO;c@~VF~Hh%xO41;lgg8oN9^Y0L8 zi^N#9Jyr&COP8DG7Z@m6W78EIUXm>LOUlYNYaZZ^3>UpsU!eOyP8@S6l}e3nXg?@kJS>MZ-0kxaUEeV7vA8!=PkU>vn>lRK(W#RnIbngSJ(U=Q>}Oj z+19C`q@;XCH0el7uYbADJ!e$c#%wfz5Amq%!QHzH`nfw`R`=I08ibK(%GfaSi*$It ztNHfrh6gX3+3}A}{{m-h??}Vu0#au2GO}Bd+f5MIg7K z0bMmqXR5keAvR}1qy*z9)%T^Zw{NgB<(g;QRO!VWIuL%bF4jqMY9#hpTs89$9!c;z z3ebgOcJiWdq@u`+?&9TWPdH(IMRg|J;4Iwft$CkZFxoTVc=Y!*=i2K_ zeh3HWKq0%hDct^9{xs{{o7fHvr%}*vINI$91JQu~d8((QVeW?{Txv6d7!6o_BETvBBa_8uFe6&sba~xK zx8lk<8Aow|A;~3MZqdi;p>GZ_zI|-9$RhD*95B8)D)e`8aZ!oPqNN_j zpYtf~KSp{71n5gOCX-3!%-|4E%^}b5osyGS!U8POz@R1eTtEaoOI$45*iRcWZbfh$ zc)blX^zxx134tZ_>=T$ z#KS^sFsKUBjJfvO_C7_GVm3tt$*V&}zPQ3&g_&>);=jI2my(5fQ$CWHMD zp`)W9RiRC`Fg;99InKsdA`wA^>T=HLNyF%N;__2lRuRn|EcNR!sfyG+?m(;CbDE{I z?Ci^^kLI+R>c-nPtFPvXmQW~CwW%ohXxo34QcKNy;8X*>dtUcYiq0FSsL4mnWt~I^fZuAAzzND zXSKbKRy1gCJEy+hn)S7gT3yr5a2(>U+3n3q`7Kh`Z4^yO`uYx@|FwUp`_;0DV9TBc z%>mtMO{H)nY;49gHG!Zo(swRGLYD>O9LiZ6D@DUU)*X`WLLOt;pNl50qD7Z8!BCZ^R-!pj zY|jgpeK0=S5BhwMvV8*SpSfE#QsqQ>nvTl*rp!ULtjCZvtyS0pR=dp6Dn!;mM6_9# zFXN0jRz0+0Z9|UFpFLYZHmNRAY}PqQhmD!GXMKAKzqClN*Jlk~O_SWIRW7RS$_UGu z6xo17D#EWY+XpvgjrPV=*$^IkIl38MMYn0kkwHkS2~r{%w-40ncZmb+EvQ7-3~nz7 zRAx~?ZM2!hwt2`1fh3ZLZ6bn{kIm_|6w3y$?YqM_reO)A@m+M8r@G2Ez=1weHnJ#G zs+YAdLgZRrXI7(lZ+ZPX8kHt?gFVo+vA$Mc&e!_B!M zbX^6AIKgyfHD9txvzeau+GGXKmn6`x>Ju^PU+ur2l*m7NG z(jGY$#$fFwp|o$;v_n6=H`#r$0;uJlJ4#v`sg8~i=fY%g0xi5`bEc1GA>8jh5dd=# zT*ik|+n5mJeVmkxo0Cr8P51LNOrdfv>7orAo=CfH*8BF9&B2Icw+f<8BZ1$GkTm&ph$H-+AUSA7sXut>J7h&tTvWIU2LDnT5?PY%u>1AAfe}%?`cUp*K79 o>|r}BJ$%>|g%8g*`8Mp*Z(n?xmpLD{HNcW Date: Mon, 3 Jun 2019 13:40:33 +0200 Subject: [PATCH 44/49] Feature: Adds redux action logging toggle from url params (#17368) With live tailing introduced in Explore we now have a lot of actions dispatching and the Redux Dev Tools doesn't cope with the amount and rate of actions and crashes. This PR turns on redux action logging when you add logActions=true in the url and turns it off if you refresh the page or add logActions=false in the url. --- public/app/core/actions/application.ts | 3 +++ public/app/core/middlewares/application.ts | 27 ++++++++++++++++++++++ public/app/core/reducers/application.ts | 17 ++++++++++++++ public/app/core/reducers/index.ts | 2 ++ public/app/store/configureStore.ts | 22 ++++++++++-------- public/app/types/application.ts | 3 +++ public/app/types/store.ts | 2 ++ 7 files changed, 67 insertions(+), 9 deletions(-) create mode 100644 public/app/core/actions/application.ts create mode 100644 public/app/core/middlewares/application.ts create mode 100644 public/app/core/reducers/application.ts create mode 100644 public/app/types/application.ts diff --git a/public/app/core/actions/application.ts b/public/app/core/actions/application.ts new file mode 100644 index 000000000000..9bde989e8ca6 --- /dev/null +++ b/public/app/core/actions/application.ts @@ -0,0 +1,3 @@ +import { noPayloadActionCreatorFactory } from 'app/core/redux'; + +export const toggleLogActions = noPayloadActionCreatorFactory('TOGGLE_LOG_ACTIONS').create(); diff --git a/public/app/core/middlewares/application.ts b/public/app/core/middlewares/application.ts new file mode 100644 index 000000000000..3ca9768d626f --- /dev/null +++ b/public/app/core/middlewares/application.ts @@ -0,0 +1,27 @@ +import { Store, Dispatch } from 'redux'; +import { StoreState } from 'app/types/store'; +import { ActionOf } from '../redux/actionCreatorFactory'; +import { toggleLogActions } from '../actions/application'; + +export const toggleLogActionsMiddleware = (store: Store) => (next: Dispatch) => (action: ActionOf) => { + const isLogActionsAction = action.type === toggleLogActions.type; + if (isLogActionsAction) { + return next(action); + } + + const logActionsTrue = + window && window.location && window.location.search && window.location.search.indexOf('logActions=true') !== -1; + const logActionsFalse = + window && window.location && window.location.search && window.location.search.indexOf('logActions=false') !== -1; + const logActions = store.getState().application.logActions; + + if (logActionsTrue && !logActions) { + store.dispatch(toggleLogActions()); + } + + if (logActionsFalse && logActions) { + store.dispatch(toggleLogActions()); + } + + return next(action); +}; diff --git a/public/app/core/reducers/application.ts b/public/app/core/reducers/application.ts new file mode 100644 index 000000000000..458f49316191 --- /dev/null +++ b/public/app/core/reducers/application.ts @@ -0,0 +1,17 @@ +import { ApplicationState } from 'app/types/application'; +import { reducerFactory } from 'app/core/redux'; +import { toggleLogActions } from '../actions/application'; + +export const initialState: ApplicationState = { + logActions: false, +}; + +export const applicationReducer = reducerFactory(initialState) + .addMapper({ + filter: toggleLogActions, + mapper: (state): ApplicationState => ({ + ...state, + logActions: !state.logActions, + }), + }) + .create(); diff --git a/public/app/core/reducers/index.ts b/public/app/core/reducers/index.ts index 1c8670ed0d6c..cc0c950ec4a0 100644 --- a/public/app/core/reducers/index.ts +++ b/public/app/core/reducers/index.ts @@ -1,9 +1,11 @@ import { navIndexReducer as navIndex } from './navModel'; import { locationReducer as location } from './location'; import { appNotificationsReducer as appNotifications } from './appNotification'; +import { applicationReducer as application } from './application'; export default { navIndex, location, appNotifications, + application, }; diff --git a/public/app/store/configureStore.ts b/public/app/store/configureStore.ts index e561a7f5e592..2d7d3288d3b9 100644 --- a/public/app/store/configureStore.ts +++ b/public/app/store/configureStore.ts @@ -1,7 +1,7 @@ import { createStore, applyMiddleware, compose, combineReducers } from 'redux'; import thunk from 'redux-thunk'; import { combineEpics, createEpicMiddleware } from 'redux-observable'; -// import { createLogger } from 'redux-logger'; +import { createLogger } from 'redux-logger'; import sharedReducers from 'app/core/reducers'; import alertingReducers from 'app/features/alerting/state/reducers'; import teamsReducers from 'app/features/teams/state/reducers'; @@ -17,6 +17,8 @@ import organizationReducers from 'app/features/org/state/reducers'; import { setStore } from './store'; import { startSubscriptionsEpic, startSubscriptionEpic, limitMessageRateEpic } from 'app/features/explore/state/epics'; import { WebSocketSubject, webSocket } from 'rxjs/webSocket'; +import { StoreState } from 'app/types/store'; +import { toggleLogActionsMiddleware } from 'app/core/middlewares/application'; const rootReducers = { ...sharedReducers, @@ -51,15 +53,17 @@ const epicMiddleware = createEpicMiddleware({ dependencies }); export function configureStore() { const composeEnhancers = (window as any).__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; - const rootReducer = combineReducers(rootReducers); + const logger = createLogger({ + predicate: (getState: () => StoreState) => { + return getState().application.logActions; + }, + }); + const storeEnhancers = + process.env.NODE_ENV !== 'production' + ? applyMiddleware(toggleLogActionsMiddleware, thunk, epicMiddleware, logger) + : applyMiddleware(thunk, epicMiddleware); - if (process.env.NODE_ENV !== 'production') { - // DEV builds we had the logger middleware - setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk, epicMiddleware)))); - } else { - setStore(createStore(rootReducer, {}, composeEnhancers(applyMiddleware(thunk, epicMiddleware)))); - } - + setStore(createStore(rootReducer, {}, composeEnhancers(storeEnhancers))); epicMiddleware.run(rootEpic); } diff --git a/public/app/types/application.ts b/public/app/types/application.ts new file mode 100644 index 000000000000..d4562d68c448 --- /dev/null +++ b/public/app/types/application.ts @@ -0,0 +1,3 @@ +export interface ApplicationState { + logActions: boolean; +} diff --git a/public/app/types/store.ts b/public/app/types/store.ts index 975cd40ae71c..66a3db1a3cb9 100644 --- a/public/app/types/store.ts +++ b/public/app/types/store.ts @@ -13,6 +13,7 @@ import { OrganizationState } from './organization'; import { AppNotificationsState } from './appNotifications'; import { PluginsState } from './plugins'; import { NavIndex } from '@grafana/ui'; +import { ApplicationState } from './application'; export interface StoreState { navIndex: NavIndex; @@ -29,6 +30,7 @@ export interface StoreState { appNotifications: AppNotificationsState; user: UserState; plugins: PluginsState; + application: ApplicationState; } /* From fb39831df243386377d9188eedeb951c3e1c3698 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Hugo=20H=C3=A4ggmark?= Date: Mon, 3 Jun 2019 14:54:32 +0200 Subject: [PATCH 45/49] Explore: Queries the datasource once per run query and uses DataStreamObserver (#17263) * Refactor: Removes replaceUrl from actions * Refactor: Moves saveState thunk to epic * Refactor: Moves thunks to epics * Wip: removes resulttype and queries once * Refactor: LiveTailing uses observer in query * Refactor: Creates epics folder for epics and move back actioncreators * Tests: Adds tests for epics and reducer * Fix: Checks for undefined as well * Refactor: Cleans up previous live tailing implementation * Chore: merge with master * Fix: Fixes url issuses and prom graph in Panels * Refactor: Removes supportsStreaming and adds sockets to DataSourcePluginMeta instead * Refactor: Changes the way we create TimeSeries * Refactor: Renames sockets to streaming * Refactor: Changes the way Explore does incremental updates * Refactor: Removes unused method * Refactor: Adds back Loading indication --- .../components/SetInterval/SetInterval.tsx | 2 +- packages/grafana-ui/src/types/datasource.ts | 6 +- .../grafana-ui/src/utils/processSeriesData.ts | 1 + pkg/plugins/datasource_plugin.go | 1 + public/app/core/time_series2.ts | 2 +- public/app/core/utils/explore.ts | 99 +--- public/app/features/explore/Explore.tsx | 2 +- .../app/features/explore/ExploreToolbar.tsx | 10 +- .../app/features/explore/GraphContainer.tsx | 6 +- public/app/features/explore/LogsContainer.tsx | 5 +- public/app/features/explore/QueryRow.tsx | 13 +- .../app/features/explore/TableContainer.tsx | 8 +- .../app/features/explore/state/actionTypes.ts | 80 ++- public/app/features/explore/state/actions.ts | 302 +--------- .../app/features/explore/state/epics.test.ts | 550 ------------------ public/app/features/explore/state/epics.ts | 159 ----- .../state/epics/limitMessageRateEpic.ts | 25 + .../epics/processQueryErrorsEpic.test.ts | 67 +++ .../state/epics/processQueryErrorsEpic.ts | 40 ++ .../epics/processQueryResultsEpic.test.ts | 119 ++++ .../state/epics/processQueryResultsEpic.ts | 76 +++ .../state/epics/runQueriesBatchEpic.test.ts | 421 ++++++++++++++ .../state/epics/runQueriesBatchEpic.ts | 220 +++++++ .../state/epics/runQueriesEpic.test.ts | 71 +++ .../explore/state/epics/runQueriesEpic.ts | 39 ++ .../explore/state/epics/stateSaveEpic.test.ts | 61 ++ .../explore/state/epics/stateSaveEpic.ts | 72 +++ .../features/explore/state/reducers.test.ts | 8 +- public/app/features/explore/state/reducers.ts | 144 ++--- .../explore/utils/ResultProcessor.test.ts | 453 +++++++++++++++ .../features/explore/utils/ResultProcessor.ts | 176 ++++++ .../app/plugins/datasource/loki/datasource.ts | 200 ++++--- .../datasource/loki/language_provider.ts | 4 +- .../app/plugins/datasource/loki/plugin.json | 1 + public/app/plugins/datasource/loki/types.ts | 3 + .../prometheus/components/PromQueryField.tsx | 2 +- .../datasource/prometheus/datasource.ts | 157 ++++- .../plugins/datasource/prometheus/types.ts | 15 + public/app/store/configureStore.ts | 35 +- public/app/types/explore.ts | 23 +- public/test/core/redux/epicTester.ts | 57 +- public/test/mocks/mockExploreState.ts | 86 +++ 42 files changed, 2469 insertions(+), 1352 deletions(-) delete mode 100644 public/app/features/explore/state/epics.test.ts delete mode 100644 public/app/features/explore/state/epics.ts create mode 100644 public/app/features/explore/state/epics/limitMessageRateEpic.ts create mode 100644 public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts create mode 100644 public/app/features/explore/state/epics/processQueryErrorsEpic.ts create mode 100644 public/app/features/explore/state/epics/processQueryResultsEpic.test.ts create mode 100644 public/app/features/explore/state/epics/processQueryResultsEpic.ts create mode 100644 public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts create mode 100644 public/app/features/explore/state/epics/runQueriesBatchEpic.ts create mode 100644 public/app/features/explore/state/epics/runQueriesEpic.test.ts create mode 100644 public/app/features/explore/state/epics/runQueriesEpic.ts create mode 100644 public/app/features/explore/state/epics/stateSaveEpic.test.ts create mode 100644 public/app/features/explore/state/epics/stateSaveEpic.ts create mode 100644 public/app/features/explore/utils/ResultProcessor.test.ts create mode 100644 public/app/features/explore/utils/ResultProcessor.ts create mode 100644 public/test/mocks/mockExploreState.ts diff --git a/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx b/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx index cdcc1f406bbb..026aa5600a15 100644 --- a/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx +++ b/packages/grafana-ui/src/components/SetInterval/SetInterval.tsx @@ -38,7 +38,7 @@ export class SetInterval extends PureComponent { } componentDidUpdate(prevProps: Props) { - if (_.isEqual(prevProps, this.props)) { + if ((isLive(prevProps.interval) && isLive(this.props.interval)) || _.isEqual(prevProps, this.props)) { return; } diff --git a/packages/grafana-ui/src/types/datasource.ts b/packages/grafana-ui/src/types/datasource.ts index dbb80e2fdf59..a2629ec6f6de 100644 --- a/packages/grafana-ui/src/types/datasource.ts +++ b/packages/grafana-ui/src/types/datasource.ts @@ -83,7 +83,7 @@ export interface DataSourcePluginMeta extends PluginMeta { category?: string; queryOptions?: PluginMetaQueryOptions; sort?: number; - supportsStreaming?: boolean; + streaming?: boolean; /** * By default, hidden queries are not passed to the datasource @@ -164,10 +164,6 @@ export abstract class DataSourceApi< */ abstract query(options: DataQueryRequest, observer?: DataStreamObserver): Promise; - convertToStreamTargets?(options: DataQueryRequest): Array<{ url: string; refId: string }>; - - resultToSeriesData?(data: any, refId: string): SeriesData[]; - /** * Test & verify datasource settings & connection details */ diff --git a/packages/grafana-ui/src/utils/processSeriesData.ts b/packages/grafana-ui/src/utils/processSeriesData.ts index 84aadcc9f655..38e9abf91358 100644 --- a/packages/grafana-ui/src/utils/processSeriesData.ts +++ b/packages/grafana-ui/src/utils/processSeriesData.ts @@ -160,6 +160,7 @@ export const toLegacyResponseData = (series: SeriesData): TimeSeries | TableData const type = guessFieldTypeFromSeries(series, 1); if (type === FieldType.time) { return { + alias: fields[0].name || series.name, target: fields[0].name || series.name, datapoints: rows, unit: fields[0].unit, diff --git a/pkg/plugins/datasource_plugin.go b/pkg/plugins/datasource_plugin.go index 8c846839edaf..1379daf5a6da 100644 --- a/pkg/plugins/datasource_plugin.go +++ b/pkg/plugins/datasource_plugin.go @@ -29,6 +29,7 @@ type DataSourcePlugin struct { BuiltIn bool `json:"builtIn,omitempty"` Mixed bool `json:"mixed,omitempty"` Routes []*AppPluginRoute `json:"routes"` + Streaming bool `json:"streaming"` Backend bool `json:"backend,omitempty"` Executable string `json:"executable,omitempty"` diff --git a/public/app/core/time_series2.ts b/public/app/core/time_series2.ts index 05815ab7ab38..d7a57b77afc9 100644 --- a/public/app/core/time_series2.ts +++ b/public/app/core/time_series2.ts @@ -329,7 +329,7 @@ export default class TimeSeries { isMsResolutionNeeded() { for (let i = 0; i < this.datapoints.length; i++) { - if (this.datapoints[i][1] !== null) { + if (this.datapoints[i][1] !== null && this.datapoints[i][1] !== undefined) { const timestamp = this.datapoints[i][1].toString(); if (timestamp.length === 13 && timestamp % 1000 !== 0) { return true; diff --git a/public/app/core/utils/explore.ts b/public/app/core/utils/explore.ts index 99e168b8590f..4a4697d7d0a2 100644 --- a/public/app/core/utils/explore.ts +++ b/public/app/core/utils/explore.ts @@ -1,44 +1,35 @@ // Libraries import _ from 'lodash'; +import { from } from 'rxjs'; +import { toUtc } from '@grafana/ui/src/utils/moment_wrapper'; +import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; // Services & Utils import * as dateMath from '@grafana/ui/src/utils/datemath'; import { renderUrl } from 'app/core/utils/url'; import kbn from 'app/core/utils/kbn'; import store from 'app/core/store'; -import TableModel, { mergeTablesIntoModel } from 'app/core/table_model'; import { getNextRefIdChar } from './query'; // Types import { - colors, TimeRange, RawTimeRange, TimeZone, IntervalValues, DataQuery, DataSourceApi, - toSeriesData, - guessFieldTypes, TimeFragment, DataQueryError, LogRowModel, LogsModel, LogsDedupStrategy, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, } from '@grafana/ui'; -import TimeSeries from 'app/core/time_series2'; -import { - ExploreUrlState, - HistoryItem, - QueryTransaction, - ResultType, - QueryIntervals, - QueryOptions, - ResultGetter, -} from 'app/types/explore'; -import { seriesDataToLogsModel } from 'app/core/logs_model'; -import { toUtc } from '@grafana/ui/src/utils/moment_wrapper'; -import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; +import { ExploreUrlState, HistoryItem, QueryTransaction, QueryIntervals, QueryOptions } from 'app/types/explore'; +import { config } from '../config'; export const DEFAULT_RANGE = { from: 'now-6h', @@ -116,7 +107,6 @@ export async function getExploreUrl( export function buildQueryTransaction( queries: DataQuery[], - resultType: ResultType, queryOptions: QueryOptions, range: TimeRange, queryIntervals: QueryIntervals, @@ -137,7 +127,7 @@ export function buildQueryTransaction( // Using `format` here because it relates to the view panel that the request is for. // However, some datasources don't use `panelId + query.refId`, but only `panelId`. // Therefore panel id has to be unique. - const panelId = `${queryOptions.format}-${key}`; + const panelId = `${key}`; const options = { interval, @@ -156,7 +146,6 @@ export function buildQueryTransaction( return { queries, options, - resultType, scanning, id: generateKey(), // reusing for unique ID done: false, @@ -328,28 +317,6 @@ export function hasNonEmptyQuery(queries: TQuery ); } -export function calculateResultsFromQueryTransactions(result: any, resultType: ResultType, graphInterval: number) { - const flattenedResult: any[] = _.flatten(result); - const graphResult = resultType === 'Graph' && result ? result : null; - const tableResult = - resultType === 'Table' && result - ? mergeTablesIntoModel( - new TableModel(), - ...flattenedResult.filter((r: any) => r.columns && r.rows).map((r: any) => r as TableModel) - ) - : mergeTablesIntoModel(new TableModel()); - const logsResult = - resultType === 'Logs' && result - ? seriesDataToLogsModel(flattenedResult.map(r => guessFieldTypes(toSeriesData(r))), graphInterval) - : null; - - return { - graphResult, - tableResult, - logsResult, - }; -} - export function getIntervals(range: TimeRange, lowLimit: string, resolution: number): IntervalValues { if (!resolution) { return { interval: '1s', intervalMs: 1000 }; @@ -358,37 +325,6 @@ export function getIntervals(range: TimeRange, lowLimit: string, resolution: num return kbn.calculateInterval(range, resolution, lowLimit); } -export const makeTimeSeriesList: ResultGetter = (dataList, transaction, allTransactions) => { - // Prevent multiple Graph transactions to have the same colors - let colorIndexOffset = 0; - for (const other of allTransactions) { - // Only need to consider transactions that came before the current one - if (other === transaction) { - break; - } - // Count timeseries of previous query results - if (other.resultType === 'Graph' && other.done) { - colorIndexOffset += other.result.length; - } - } - - return dataList.map((seriesData, index: number) => { - const datapoints = seriesData.datapoints || []; - const alias = seriesData.target; - const colorIndex = (colorIndexOffset + index) % colors.length; - const color = colors[colorIndex]; - - const series = new TimeSeries({ - datapoints, - alias, - color, - unit: seriesData.unit, - }); - - return series; - }); -}; - /** * Update the query history. Side-effect: store history in local storage */ @@ -566,3 +502,20 @@ export const sortLogsResult = (logsResult: LogsModel, refreshInterval: string) = return result; }; + +export const convertToWebSocketUrl = (url: string) => { + const protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://'; + let backend = `${protocol}${window.location.host}${config.appSubUrl}`; + if (backend.endsWith('/')) { + backend = backend.slice(0, backend.length - 1); + } + return `${backend}${url}`; +}; + +export const getQueryResponse = ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver +) => { + return from(datasourceInstance.query(options, observer)); +}; diff --git a/public/app/features/explore/Explore.tsx b/public/app/features/explore/Explore.tsx index eef4b8b21dc9..8028e8362d79 100644 --- a/public/app/features/explore/Explore.tsx +++ b/public/app/features/explore/Explore.tsx @@ -51,11 +51,11 @@ import { } from 'app/core/utils/explore'; import { Emitter } from 'app/core/utils/emitter'; import { ExploreToolbar } from './ExploreToolbar'; -import { scanStopAction } from './state/actionTypes'; import { NoDataSourceCallToAction } from './NoDataSourceCallToAction'; import { FadeIn } from 'app/core/components/Animations/FadeIn'; import { getTimeZone } from '../profile/state/selectors'; import { ErrorContainer } from './ErrorContainer'; +import { scanStopAction } from './state/actionTypes'; interface ExploreProps { StartPage?: ComponentClass; diff --git a/public/app/features/explore/ExploreToolbar.tsx b/public/app/features/explore/ExploreToolbar.tsx index 9d6c4a1d3d96..9d3cb9841208 100644 --- a/public/app/features/explore/ExploreToolbar.tsx +++ b/public/app/features/explore/ExploreToolbar.tsx @@ -10,6 +10,7 @@ import { TimeZone, TimeRange, SelectOptionItem, + LoadingState, } from '@grafana/ui'; import { DataSourcePicker } from 'app/core/components/Select/DataSourcePicker'; import { StoreState } from 'app/types/store'; @@ -261,9 +262,7 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps exploreDatasources, range, refreshInterval, - graphIsLoading, - logIsLoading, - tableIsLoading, + loadingState, supportedModes, mode, isLive, @@ -271,8 +270,9 @@ const mapStateToProps = (state: StoreState, { exploreId }: OwnProps): StateProps const selectedDatasource = datasourceInstance ? exploreDatasources.find(datasource => datasource.name === datasourceInstance.name) : undefined; - const loading = graphIsLoading || logIsLoading || tableIsLoading; - const hasLiveOption = datasourceInstance && datasourceInstance.convertToStreamTargets ? true : false; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; + const hasLiveOption = + datasourceInstance && datasourceInstance.meta && datasourceInstance.meta.streaming ? true : false; const supportedModeOptions: Array> = []; let selectedModeOption = null; diff --git a/public/app/features/explore/GraphContainer.tsx b/public/app/features/explore/GraphContainer.tsx index 0fba2ae6ded4..6d1bb6c4e387 100644 --- a/public/app/features/explore/GraphContainer.tsx +++ b/public/app/features/explore/GraphContainer.tsx @@ -1,7 +1,7 @@ import React, { PureComponent } from 'react'; import { hot } from 'react-hot-loader'; import { connect } from 'react-redux'; -import { TimeRange, TimeZone, AbsoluteTimeRange } from '@grafana/ui'; +import { TimeRange, TimeZone, AbsoluteTimeRange, LoadingState } from '@grafana/ui'; import { ExploreId, ExploreItemState } from 'app/types/explore'; import { StoreState } from 'app/types'; @@ -69,8 +69,8 @@ function mapStateToProps(state: StoreState, { exploreId }) { const explore = state.explore; const { split } = explore; const item: ExploreItemState = explore[exploreId]; - const { graphResult, graphIsLoading, range, showingGraph, showingTable } = item; - const loading = graphIsLoading; + const { graphResult, loadingState, range, showingGraph, showingTable } = item; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; return { graphResult, loading, range, showingGraph, showingTable, split, timeZone: getTimeZone(state.user) }; } diff --git a/public/app/features/explore/LogsContainer.tsx b/public/app/features/explore/LogsContainer.tsx index d8d85efcc135..79846e1d4bc9 100644 --- a/public/app/features/explore/LogsContainer.tsx +++ b/public/app/features/explore/LogsContainer.tsx @@ -13,6 +13,7 @@ import { LogsModel, LogRowModel, LogsDedupStrategy, + LoadingState, } from '@grafana/ui'; import { ExploreId, ExploreItemState } from 'app/types/explore'; @@ -151,14 +152,14 @@ function mapStateToProps(state: StoreState, { exploreId }) { const { logsHighlighterExpressions, logsResult, - logIsLoading, + loadingState, scanning, scanRange, range, datasourceInstance, isLive, } = item; - const loading = logIsLoading; + const loading = loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; const { dedupStrategy } = exploreItemUIStateSelector(item); const hiddenLogLevels = new Set(item.hiddenLogLevels); const dedupedResult = deduplicatedLogsSelector(item); diff --git a/public/app/features/explore/QueryRow.tsx b/public/app/features/explore/QueryRow.tsx index 2a0429dbd971..49880c11230a 100644 --- a/public/app/features/explore/QueryRow.tsx +++ b/public/app/features/explore/QueryRow.tsx @@ -20,7 +20,6 @@ import { QueryFixAction, DataSourceStatus, PanelData, - LoadingState, DataQueryError, } from '@grafana/ui'; import { HistoryItem, ExploreItemState, ExploreId } from 'app/types/explore'; @@ -180,9 +179,7 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps) range, datasourceError, graphResult, - graphIsLoading, - tableIsLoading, - logIsLoading, + loadingState, latency, queryErrors, } = item; @@ -190,15 +187,9 @@ function mapStateToProps(state: StoreState, { exploreId, index }: QueryRowProps) const datasourceStatus = datasourceError ? DataSourceStatus.Disconnected : DataSourceStatus.Connected; const error = queryErrors.filter(queryError => queryError.refId === query.refId)[0]; const series = graphResult ? graphResult : []; // TODO: use SeriesData - const queryResponseState = - graphIsLoading || tableIsLoading || logIsLoading - ? LoadingState.Loading - : error - ? LoadingState.Error - : LoadingState.Done; const queryResponse: PanelData = { series, - state: queryResponseState, + state: loadingState, error, }; diff --git a/public/app/features/explore/TableContainer.tsx b/public/app/features/explore/TableContainer.tsx index 18ee70d8ee20..ea227e78b976 100644 --- a/public/app/features/explore/TableContainer.tsx +++ b/public/app/features/explore/TableContainer.tsx @@ -9,6 +9,7 @@ import { toggleTable } from './state/actions'; import Table from './Table'; import Panel from './Panel'; import TableModel from 'app/core/table_model'; +import { LoadingState } from '@grafana/ui'; interface TableContainerProps { exploreId: ExploreId; @@ -38,8 +39,11 @@ export class TableContainer extends PureComponent { function mapStateToProps(state: StoreState, { exploreId }) { const explore = state.explore; const item: ExploreItemState = explore[exploreId]; - const { tableIsLoading, showingTable, tableResult } = item; - const loading = tableIsLoading; + const { loadingState, showingTable, tableResult } = item; + const loading = + tableResult && tableResult.rows.length > 0 + ? false + : loadingState === LoadingState.Loading || loadingState === LoadingState.Streaming; return { loading, showingTable, tableResult }; } diff --git a/public/app/features/explore/state/actionTypes.ts b/public/app/features/explore/state/actionTypes.ts index b572b6ca041b..68b9ac604eb1 100644 --- a/public/app/features/explore/state/actionTypes.ts +++ b/public/app/features/explore/state/actionTypes.ts @@ -9,18 +9,23 @@ import { LogLevel, TimeRange, DataQueryError, + SeriesData, + LogsModel, + TimeSeries, + DataQueryResponseData, + LoadingState, } from '@grafana/ui/src/types'; import { ExploreId, ExploreItemState, HistoryItem, RangeScanner, - ResultType, - QueryTransaction, ExploreUIState, ExploreMode, + QueryOptions, } from 'app/types/explore'; import { actionCreatorFactory, noPayloadActionCreatorFactory, ActionOf } from 'app/core/redux/actionCreatorFactory'; +import TableModel from 'app/core/table_model'; /** Higher order actions * @@ -142,21 +147,19 @@ export interface ModifyQueriesPayload { export interface QueryFailurePayload { exploreId: ExploreId; response: DataQueryError; - resultType: ResultType; } export interface QueryStartPayload { exploreId: ExploreId; - resultType: ResultType; - rowIndex: number; - transaction: QueryTransaction; } export interface QuerySuccessPayload { exploreId: ExploreId; - result: any; - resultType: ResultType; latency: number; + loadingState: LoadingState; + graphResult: TimeSeries[]; + tableResult: TableModel; + logsResult: LogsModel; } export interface HistoryUpdatedPayload { @@ -238,6 +241,41 @@ export interface ResetQueryErrorPayload { refIds: string[]; } +export interface SetUrlReplacedPayload { + exploreId: ExploreId; +} + +export interface ProcessQueryErrorsPayload { + exploreId: ExploreId; + response: any; + datasourceId: string; +} + +export interface ProcessQueryResultsPayload { + exploreId: ExploreId; + latency: number; + datasourceId: string; + loadingState: LoadingState; + series?: DataQueryResponseData[]; + delta?: SeriesData[]; +} + +export interface RunQueriesBatchPayload { + exploreId: ExploreId; + queryOptions: QueryOptions; +} + +export interface LimitMessageRatePayload { + series: SeriesData[]; + exploreId: ExploreId; + datasourceId: string; +} + +export interface ChangeRangePayload { + exploreId: ExploreId; + range: TimeRange; +} + /** * Adds a query row after the row with the given index. */ @@ -333,13 +371,6 @@ export const modifyQueriesAction = actionCreatorFactory('e */ export const queryFailureAction = actionCreatorFactory('explore/QUERY_FAILURE').create(); -/** - * Start a query transaction for the given result type. - * @param exploreId Explore area - * @param transaction Query options and `done` status. - * @param resultType Associate the transaction with a result viewer, e.g., Graph - * @param rowIndex Index is used to associate latency for this transaction with a query row - */ export const queryStartAction = actionCreatorFactory('explore/QUERY_START').create(); /** @@ -392,6 +423,7 @@ export const splitCloseAction = actionCreatorFactory('e * The copy keeps all query modifications but wipes the query results. */ export const splitOpenAction = actionCreatorFactory('explore/SPLIT_OPEN').create(); + export const stateSaveAction = noPayloadActionCreatorFactory('explore/STATE_SAVE').create(); /** @@ -440,6 +472,24 @@ export const historyUpdatedAction = actionCreatorFactory( export const resetQueryErrorAction = actionCreatorFactory('explore/RESET_QUERY_ERROR').create(); +export const setUrlReplacedAction = actionCreatorFactory('explore/SET_URL_REPLACED').create(); + +export const processQueryErrorsAction = actionCreatorFactory( + 'explore/PROCESS_QUERY_ERRORS' +).create(); + +export const processQueryResultsAction = actionCreatorFactory( + 'explore/PROCESS_QUERY_RESULTS' +).create(); + +export const runQueriesBatchAction = actionCreatorFactory('explore/RUN_QUERIES_BATCH').create(); + +export const limitMessageRatePayloadAction = actionCreatorFactory( + 'explore/LIMIT_MESSAGE_RATE_PAYLOAD' +).create(); + +export const changeRangeAction = actionCreatorFactory('explore/CHANGE_RANGE').create(); + export type HigherOrderAction = | ActionOf | SplitOpenAction diff --git a/public/app/features/explore/state/actions.ts b/public/app/features/explore/state/actions.ts index bfeb96aef35b..4f95744eb479 100644 --- a/public/app/features/explore/state/actions.ts +++ b/public/app/features/explore/state/actions.ts @@ -7,25 +7,14 @@ import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { Emitter } from 'app/core/core'; import { LAST_USED_DATASOURCE_KEY, - clearQueryKeys, ensureQueries, generateEmptyQuery, - hasNonEmptyQuery, - makeTimeSeriesList, - updateHistory, - buildQueryTransaction, - serializeStateToUrlParam, parseUrlState, getTimeRange, getTimeRangeFromUrl, generateNewKeyAndAddRefIdIfMissing, - instanceOfDataQueryError, - getRefIds, } from 'app/core/utils/explore'; -// Actions -import { updateLocation } from 'app/core/actions'; - // Types import { ThunkResult } from 'app/types'; import { @@ -34,19 +23,9 @@ import { DataQuery, DataSourceSelectItem, QueryFixAction, - TimeRange, LogsDedupStrategy, } from '@grafana/ui'; -import { - ExploreId, - ExploreUrlState, - RangeScanner, - ResultType, - QueryOptions, - ExploreUIState, - QueryTransaction, - ExploreMode, -} from 'app/types/explore'; +import { ExploreId, RangeScanner, ExploreUIState, QueryTransaction, ExploreMode } from 'app/types/explore'; import { updateDatasourceInstanceAction, changeQueryAction, @@ -55,7 +34,6 @@ import { changeSizeAction, ChangeSizePayload, changeTimeAction, - scanStopAction, clearQueriesAction, initializeExploreAction, loadDatasourceMissingAction, @@ -64,9 +42,6 @@ import { LoadDatasourceReadyPayload, loadDatasourceReadyAction, modifyQueriesAction, - queryFailureAction, - querySuccessAction, - scanRangeAction, scanStartAction, setQueriesAction, splitCloseAction, @@ -77,21 +52,19 @@ import { ToggleGraphPayload, ToggleTablePayload, updateUIStateAction, - runQueriesAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, loadExploreDatasources, - queryStartAction, - historyUpdatedAction, - resetQueryErrorAction, changeModeAction, + scanStopAction, + scanRangeAction, + runQueriesAction, + stateSaveAction, } from './actionTypes'; import { ActionOf, ActionCreator } from 'app/core/redux/actionCreatorFactory'; import { getTimeZone } from 'app/features/profile/state/selectors'; -import { isDateTime } from '@grafana/ui/src/utils/moment_wrapper'; -import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; -import { startSubscriptionsAction, subscriptionDataReceivedAction } from 'app/features/explore/state/epics'; +import { offOption } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; /** * Updates UI state and save it to the URL @@ -99,7 +72,7 @@ import { startSubscriptionsAction, subscriptionDataReceivedAction } from 'app/fe const updateExploreUIState = (exploreId: ExploreId, uiStateFragment: Partial): ThunkResult => { return dispatch => { dispatch(updateUIStateAction({ exploreId, ...uiStateFragment })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; }; @@ -118,7 +91,7 @@ export function addQueryRow(exploreId: ExploreId, index: number): ThunkResult { +export function changeDatasource(exploreId: ExploreId, datasource: string): ThunkResult { return async (dispatch, getState) => { let newDataSourceInstance: DataSourceApi = null; @@ -135,8 +108,12 @@ export function changeDatasource(exploreId: ExploreId, datasource: string, repla dispatch(updateDatasourceInstanceAction({ exploreId, datasourceInstance: newDataSourceInstance })); + if (getState().explore[exploreId].isLive) { + dispatch(changeRefreshInterval(exploreId, offOption.value)); + } + await dispatch(loadDatasource(exploreId, newDataSourceInstance)); - dispatch(runQueries(exploreId, false, replaceUrl)); + dispatch(runQueries(exploreId)); }; } @@ -215,7 +192,7 @@ export function clearQueries(exploreId: ExploreId): ThunkResult { return dispatch => { dispatch(scanStopAction({ exploreId })); dispatch(clearQueriesAction({ exploreId })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; } @@ -242,7 +219,7 @@ export function loadExploreDatasourcesAndSetDatasource( dispatch(loadExploreDatasources({ exploreId, exploreDatasources })); if (exploreDatasources.length >= 1) { - dispatch(changeDatasource(exploreId, datasourceName, true)); + dispatch(changeDatasource(exploreId, datasourceName)); } else { dispatch(loadDatasourceMissingAction({ exploreId })); } @@ -419,201 +396,17 @@ export function modifyQueries( }; } -export function processQueryErrors( - exploreId: ExploreId, - response: any, - resultType: ResultType, - datasourceId: string -): ThunkResult { - return (dispatch, getState) => { - const { datasourceInstance } = getState().explore[exploreId]; - - if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { - // Navigated away, queries did not matter - return; - } - - console.error(response); // To help finding problems with query syntax - - if (!instanceOfDataQueryError(response)) { - response = toDataQueryError(response); - } - - dispatch( - queryFailureAction({ - exploreId, - response, - resultType, - }) - ); - }; -} - -/** - * @param exploreId Explore area - * @param response Response from `datasourceInstance.query()` - * @param latency Duration between request and response - * @param resultType The type of result - * @param datasourceId Origin datasource instance, used to discard results if current datasource is different - */ -export function processQueryResults( - exploreId: ExploreId, - response: any, - latency: number, - resultType: ResultType, - datasourceId: string -): ThunkResult { - return (dispatch, getState) => { - const { datasourceInstance, scanning, scanner } = getState().explore[exploreId]; - - // If datasource already changed, results do not matter - if (datasourceInstance.meta.id !== datasourceId) { - return; - } - - const series: any[] = response.data; - const refIds = getRefIds(series); - - // Clears any previous errors that now have a successful query, important so Angular editors are updated correctly - dispatch( - resetQueryErrorAction({ - exploreId, - refIds, - }) - ); - - const resultGetter = - resultType === 'Graph' ? makeTimeSeriesList : resultType === 'Table' ? (data: any[]) => data : null; - const result = resultGetter ? resultGetter(series, null, []) : series; - - dispatch( - querySuccessAction({ - exploreId, - result, - resultType, - latency, - }) - ); - - // Keep scanning for results if this was the last scanning transaction - if (scanning) { - if (_.size(result) === 0) { - const range = scanner(); - dispatch(scanRangeAction({ exploreId, range })); - } else { - // We can stop scanning if we have a result - dispatch(scanStopAction({ exploreId })); - } - } - }; -} - /** * Main action to run queries and dispatches sub-actions based on which result viewers are active */ -export function runQueries(exploreId: ExploreId, ignoreUIState = false, replaceUrl = false): ThunkResult { +export function runQueries(exploreId: ExploreId): ThunkResult { return (dispatch, getState) => { - const { - datasourceInstance, - queries, - showingGraph, - showingTable, - datasourceError, - containerWidth, - mode, - range, - } = getState().explore[exploreId]; - - if (datasourceError) { - // let's not run any queries if data source is in a faulty state - return; - } - - if (!hasNonEmptyQuery(queries)) { - dispatch(clearQueriesAction({ exploreId })); - dispatch(stateSave(replaceUrl)); // Remember to save to state and update location - return; - } - - // Some datasource's query builders allow per-query interval limits, - // but we're using the datasource interval limit for now - const interval = datasourceInstance.interval; + const { range } = getState().explore[exploreId]; const timeZone = getTimeZone(getState().user); const updatedRange = getTimeRange(timeZone, range.raw); dispatch(runQueriesAction({ exploreId, range: updatedRange })); - // Keep table queries first since they need to return quickly - if ((ignoreUIState || showingTable) && mode === ExploreMode.Metrics) { - dispatch( - runQueriesForType(exploreId, 'Table', { - interval, - format: 'table', - instant: true, - valueWithRefId: true, - }) - ); - } - if ((ignoreUIState || showingGraph) && mode === ExploreMode.Metrics) { - dispatch( - runQueriesForType(exploreId, 'Graph', { - interval, - format: 'time_series', - instant: false, - maxDataPoints: containerWidth, - }) - ); - } - if (mode === ExploreMode.Logs) { - dispatch(runQueriesForType(exploreId, 'Logs', { interval, format: 'logs' })); - } - - dispatch(stateSave(replaceUrl)); - }; -} - -/** - * Helper action to build a query transaction object and handing the query to the datasource. - * @param exploreId Explore area - * @param resultType Result viewer that will be associated with this query result - * @param queryOptions Query options as required by the datasource's `query()` function. - * @param resultGetter Optional result extractor, e.g., if the result is a list and you only need the first element. - */ -function runQueriesForType( - exploreId: ExploreId, - resultType: ResultType, - queryOptions: QueryOptions -): ThunkResult { - return async (dispatch, getState) => { - const { datasourceInstance, eventBridge, queries, queryIntervals, range, scanning, history } = getState().explore[ - exploreId - ]; - - if (resultType === 'Logs' && datasourceInstance.convertToStreamTargets) { - dispatch( - startSubscriptionsAction({ - exploreId, - dataReceivedActionCreator: subscriptionDataReceivedAction, - }) - ); - } - - const datasourceId = datasourceInstance.meta.id; - const transaction = buildQueryTransaction(queries, resultType, queryOptions, range, queryIntervals, scanning); - dispatch(queryStartAction({ exploreId, resultType, rowIndex: 0, transaction })); - try { - const now = Date.now(); - const response = await datasourceInstance.query(transaction.options); - eventBridge.emit('data-received', response.data || []); - const latency = Date.now() - now; - // Side-effect: Saving history in localstorage - const nextHistory = updateHistory(history, datasourceId, queries); - dispatch(historyUpdatedAction({ exploreId, history: nextHistory })); - dispatch(processQueryResults(exploreId, response, latency, resultType, datasourceId)); - } catch (err) { - eventBridge.emit('data-error', err); - dispatch(processQueryErrors(exploreId, err, resultType, datasourceId)); - } }; } @@ -653,7 +446,7 @@ export function setQueries(exploreId: ExploreId, rawQueries: DataQuery[]): Thunk export function splitClose(itemId: ExploreId): ThunkResult { return dispatch => { dispatch(splitCloseAction({ itemId })); - dispatch(stateSave()); + dispatch(stateSaveAction()); }; } @@ -677,64 +470,7 @@ export function splitOpen(): ThunkResult { urlState, }; dispatch(splitOpenAction({ itemState })); - dispatch(stateSave()); - }; -} - -const toRawTimeRange = (range: TimeRange): RawTimeRange => { - let from = range.raw.from; - if (isDateTime(from)) { - from = from.valueOf().toString(10); - } - - let to = range.raw.to; - if (isDateTime(to)) { - to = to.valueOf().toString(10); - } - - return { - from, - to, - }; -}; - -/** - * Saves Explore state to URL using the `left` and `right` parameters. - * If split view is not active, `right` will not be set. - */ -export function stateSave(replaceUrl = false): ThunkResult { - return (dispatch, getState) => { - const { left, right, split } = getState().explore; - const urlStates: { [index: string]: string } = {}; - const leftUrlState: ExploreUrlState = { - datasource: left.datasourceInstance.name, - queries: left.queries.map(clearQueryKeys), - range: toRawTimeRange(left.range), - ui: { - showingGraph: left.showingGraph, - showingLogs: true, - showingTable: left.showingTable, - dedupStrategy: left.dedupStrategy, - }, - }; - urlStates.left = serializeStateToUrlParam(leftUrlState, true); - if (split) { - const rightUrlState: ExploreUrlState = { - datasource: right.datasourceInstance.name, - queries: right.queries.map(clearQueryKeys), - range: toRawTimeRange(right.range), - ui: { - showingGraph: right.showingGraph, - showingLogs: true, - showingTable: right.showingTable, - dedupStrategy: right.dedupStrategy, - }, - }; - - urlStates.right = serializeStateToUrlParam(rightUrlState, true); - } - - dispatch(updateLocation({ query: urlStates, replace: replaceUrl })); + dispatch(stateSaveAction()); }; } diff --git a/public/app/features/explore/state/epics.test.ts b/public/app/features/explore/state/epics.test.ts deleted file mode 100644 index fbfb934a43ae..000000000000 --- a/public/app/features/explore/state/epics.test.ts +++ /dev/null @@ -1,550 +0,0 @@ -import { liveOption } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; -import { DataSourceApi, DataQuery } from '@grafana/ui/src/types/datasource'; - -import { ExploreId, ExploreState } from 'app/types'; -import { actionCreatorFactory } from 'app/core/redux/actionCreatorFactory'; -import { - startSubscriptionsEpic, - startSubscriptionsAction, - SubscriptionDataReceivedPayload, - startSubscriptionAction, - startSubscriptionEpic, - limitMessageRatePayloadAction, -} from './epics'; -import { makeExploreItemState } from './reducers'; -import { epicTester } from 'test/core/redux/epicTester'; -import { - resetExploreAction, - updateDatasourceInstanceAction, - changeRefreshIntervalAction, - clearQueriesAction, -} from './actionTypes'; - -const setup = (options: any = {}) => { - const url = '/api/datasources/proxy/20/api/prom/tail?query=%7Bfilename%3D%22%2Fvar%2Flog%2Fdocker.log%22%7D'; - const webSocketUrl = 'ws://localhost' + url; - const refId = options.refId || 'A'; - const exploreId = ExploreId.left; - const datasourceInstance: DataSourceApi = options.datasourceInstance || { - id: 1337, - query: jest.fn(), - name: 'test', - testDatasource: jest.fn(), - convertToStreamTargets: () => [ - { - url, - refId, - }, - ], - resultToSeriesData: data => [data], - }; - const itemState = makeExploreItemState(); - const explore: Partial = { - [exploreId]: { - ...itemState, - datasourceInstance, - refreshInterval: options.refreshInterval || liveOption.value, - queries: [{} as DataQuery], - }, - }; - const state: any = { - explore, - }; - - return { url, state, refId, webSocketUrl, exploreId }; -}; - -const dataReceivedActionCreator = actionCreatorFactory('test').create(); - -describe('startSubscriptionsEpic', () => { - describe('when startSubscriptionsAction is dispatched', () => { - describe('and datasource supports convertToStreamTargets', () => { - describe('and explore is Live', () => { - it('then correct actions should be dispatched', () => { - const { state, refId, webSocketUrl, exploreId } = setup(); - - epicTester(startSubscriptionsEpic, state) - .whenActionIsDispatched(startSubscriptionsAction({ exploreId, dataReceivedActionCreator })) - .thenResultingActionsEqual( - startSubscriptionAction({ - exploreId, - refId, - url: webSocketUrl, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and explore is not Live', () => { - it('then no actions should be dispatched', () => { - const { state, exploreId } = setup({ refreshInterval: '10s' }); - - epicTester(startSubscriptionsEpic, state) - .whenActionIsDispatched(startSubscriptionsAction({ exploreId, dataReceivedActionCreator })) - .thenNoActionsWhereDispatched(); - }); - }); - }); - - describe('and datasource does not support streaming', () => { - it('then no actions should be dispatched', () => { - const { state, exploreId } = setup({ datasourceInstance: {} }); - - epicTester(startSubscriptionsEpic, state) - .whenActionIsDispatched(startSubscriptionsAction({ exploreId, dataReceivedActionCreator })) - .thenNoActionsWhereDispatched(); - }); - }); - }); -}); - -describe('startSubscriptionEpic', () => { - describe('when startSubscriptionAction is dispatched', () => { - describe('and datasource supports resultToSeriesData', () => { - it('then correct actions should be dispatched', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ url: webSocketUrl, refId, exploreId, dataReceivedActionCreator }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and datasource does not support resultToSeriesData', () => { - it('then no actions should be dispatched', () => { - const { state, webSocketUrl, refId, exploreId } = setup({ datasourceInstance: {} }); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ url: webSocketUrl, refId, exploreId, dataReceivedActionCreator }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenNoActionsWhereDispatched(); - }); - }); - }); - - describe('when an subscription is active', () => { - describe('and resetExploreAction is dispatched', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ url: webSocketUrl, refId, exploreId, dataReceivedActionCreator }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(resetExploreAction()) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and updateDatasourceInstanceAction is dispatched', () => { - describe('and exploreId matches the websockets', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(updateDatasourceInstanceAction({ exploreId, datasourceInstance: null })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and exploreId does not match the websockets', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched( - updateDatasourceInstanceAction({ exploreId: ExploreId.right, datasourceInstance: null }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and changeRefreshIntervalAction is dispatched', () => { - describe('and exploreId matches the websockets', () => { - describe('and refreshinterval is not "Live"', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: '10s' })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and refreshinterval is "Live"', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: liveOption.value })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and exploreId does not match the websockets', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId: ExploreId.right, refreshInterval: '10s' })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and clearQueriesAction is dispatched', () => { - describe('and exploreId matches the websockets', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(clearQueriesAction({ exploreId })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - - describe('and exploreId does not match the websockets', () => { - it('then subscription should not be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched(clearQueriesAction({ exploreId: ExploreId.right })) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - - describe('and startSubscriptionAction is dispatched', () => { - describe('and exploreId and refId matches the websockets', () => { - it('then subscription should be unsubscribed', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - // This looks like we haven't stopped the subscription but we actually started the same again - ); - }); - - describe('and exploreId or refId does not match the websockets', () => { - it('then subscription should not be unsubscribed and another websocket is started', () => { - const { state, webSocketUrl, refId, exploreId } = setup(); - - epicTester(startSubscriptionEpic, state) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId, - exploreId, - dataReceivedActionCreator, - }) - ) - .thenNoActionsWhereDispatched() - .whenWebSocketReceivesData({ data: [1, 2, 3] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }) - ) - .whenActionIsDispatched( - startSubscriptionAction({ - url: webSocketUrl, - refId: 'B', - exploreId, - dataReceivedActionCreator, - }) - ) - .whenWebSocketReceivesData({ data: [4, 5, 6] }) - .thenResultingActionsEqual( - limitMessageRatePayloadAction({ - exploreId, - data: { data: [1, 2, 3] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }), - limitMessageRatePayloadAction({ - exploreId, - data: { data: [4, 5, 6] } as any, - dataReceivedActionCreator, - }) - ); - }); - }); - }); - }); - }); -}); diff --git a/public/app/features/explore/state/epics.ts b/public/app/features/explore/state/epics.ts deleted file mode 100644 index a31474f81ccf..000000000000 --- a/public/app/features/explore/state/epics.ts +++ /dev/null @@ -1,159 +0,0 @@ -import { Epic } from 'redux-observable'; -import { NEVER } from 'rxjs'; -import { takeUntil, mergeMap, tap, filter, map, throttleTime } from 'rxjs/operators'; - -import { StoreState, ExploreId } from 'app/types'; -import { ActionOf, ActionCreator, actionCreatorFactory } from '../../../core/redux/actionCreatorFactory'; -import { config } from '../../../core/config'; -import { - updateDatasourceInstanceAction, - resetExploreAction, - changeRefreshIntervalAction, - clearQueriesAction, -} from './actionTypes'; -import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; -import { SeriesData } from '@grafana/ui/src/types/data'; -import { EpicDependencies } from 'app/store/configureStore'; - -const convertToWebSocketUrl = (url: string) => { - const protocol = window.location.protocol === 'https:' ? 'wss://' : 'ws://'; - let backend = `${protocol}${window.location.host}${config.appSubUrl}`; - if (backend.endsWith('/')) { - backend = backend.slice(0, backend.length - 1); - } - return `${backend}${url}`; -}; - -export interface StartSubscriptionsPayload { - exploreId: ExploreId; - dataReceivedActionCreator: ActionCreator; -} - -export const startSubscriptionsAction = actionCreatorFactory( - 'explore/START_SUBSCRIPTIONS' -).create(); - -export interface StartSubscriptionPayload { - url: string; - refId: string; - exploreId: ExploreId; - dataReceivedActionCreator: ActionCreator; -} - -export const startSubscriptionAction = actionCreatorFactory( - 'explore/START_SUBSCRIPTION' -).create(); - -export interface SubscriptionDataReceivedPayload { - data: SeriesData; - exploreId: ExploreId; -} - -export const subscriptionDataReceivedAction = actionCreatorFactory( - 'explore/SUBSCRIPTION_DATA_RECEIVED' -).create(); - -export interface LimitMessageRatePayload { - data: SeriesData; - exploreId: ExploreId; - dataReceivedActionCreator: ActionCreator; -} - -export const limitMessageRatePayloadAction = actionCreatorFactory( - 'explore/LIMIT_MESSAGE_RATE_PAYLOAD' -).create(); - -export const startSubscriptionsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { - return action$.ofType(startSubscriptionsAction.type).pipe( - mergeMap((action: ActionOf) => { - const { exploreId, dataReceivedActionCreator } = action.payload; - const { datasourceInstance, queries, refreshInterval } = state$.value.explore[exploreId]; - - if (!datasourceInstance || !datasourceInstance.convertToStreamTargets) { - return NEVER; //do nothing if datasource does not support streaming - } - - if (!refreshInterval || !isLive(refreshInterval)) { - return NEVER; //do nothing if refresh interval is not 'LIVE' - } - - const request: any = { targets: queries }; - return datasourceInstance.convertToStreamTargets(request).map(target => - startSubscriptionAction({ - url: convertToWebSocketUrl(target.url), - refId: target.refId, - exploreId, - dataReceivedActionCreator, - }) - ); - }) - ); -}; - -export const startSubscriptionEpic: Epic, ActionOf, StoreState, EpicDependencies> = ( - action$, - state$, - { getWebSocket } -) => { - return action$.ofType(startSubscriptionAction.type).pipe( - mergeMap((action: ActionOf) => { - const { url, exploreId, refId, dataReceivedActionCreator } = action.payload; - return getWebSocket(url).pipe( - takeUntil( - action$ - .ofType( - startSubscriptionAction.type, - resetExploreAction.type, - updateDatasourceInstanceAction.type, - changeRefreshIntervalAction.type, - clearQueriesAction.type - ) - .pipe( - filter(action => { - if (action.type === resetExploreAction.type) { - return true; // stops all subscriptions if user navigates away - } - - if (action.type === updateDatasourceInstanceAction.type && action.payload.exploreId === exploreId) { - return true; // stops subscriptions if user changes data source - } - - if (action.type === changeRefreshIntervalAction.type && action.payload.exploreId === exploreId) { - return !isLive(action.payload.refreshInterval); // stops subscriptions if user changes refresh interval away from 'Live' - } - - if (action.type === clearQueriesAction.type && action.payload.exploreId === exploreId) { - return true; // stops subscriptions if user clears all queries - } - - return action.payload.exploreId === exploreId && action.payload.refId === refId; - }), - tap(value => console.log('Stopping subscription', value)) - ) - ), - mergeMap((result: any) => { - const { datasourceInstance } = state$.value.explore[exploreId]; - - if (!datasourceInstance || !datasourceInstance.resultToSeriesData) { - return [null]; //do nothing if datasource does not support streaming - } - - return datasourceInstance - .resultToSeriesData(result, refId) - .map(data => limitMessageRatePayloadAction({ exploreId, data, dataReceivedActionCreator })); - }), - filter(action => action !== null) - ); - }) - ); -}; - -export const limitMessageRateEpic: Epic, ActionOf, StoreState, EpicDependencies> = action$ => { - return action$.ofType(limitMessageRatePayloadAction.type).pipe( - throttleTime(1), - map((action: ActionOf) => { - const { exploreId, data, dataReceivedActionCreator } = action.payload; - return dataReceivedActionCreator({ exploreId, data }); - }) - ); -}; diff --git a/public/app/features/explore/state/epics/limitMessageRateEpic.ts b/public/app/features/explore/state/epics/limitMessageRateEpic.ts new file mode 100644 index 000000000000..620137069687 --- /dev/null +++ b/public/app/features/explore/state/epics/limitMessageRateEpic.ts @@ -0,0 +1,25 @@ +import { Epic } from 'redux-observable'; +import { map, throttleTime } from 'rxjs/operators'; +import { LoadingState } from '@grafana/ui'; + +import { StoreState } from 'app/types'; +import { ActionOf } from '../../../../core/redux/actionCreatorFactory'; +import { limitMessageRatePayloadAction, LimitMessageRatePayload, processQueryResultsAction } from '../actionTypes'; +import { EpicDependencies } from 'app/store/configureStore'; + +export const limitMessageRateEpic: Epic, ActionOf, StoreState, EpicDependencies> = action$ => { + return action$.ofType(limitMessageRatePayloadAction.type).pipe( + throttleTime(1), + map((action: ActionOf) => { + const { exploreId, series, datasourceId } = action.payload; + return processQueryResultsAction({ + exploreId, + latency: 0, + datasourceId, + loadingState: LoadingState.Streaming, + series: null, + delta: series, + }); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts b/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts new file mode 100644 index 000000000000..7cdaca78f7d0 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryErrorsEpic.test.ts @@ -0,0 +1,67 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { processQueryErrorsAction, queryFailureAction } from '../actionTypes'; +import { processQueryErrorsEpic } from './processQueryErrorsEpic'; + +describe('processQueryErrorsEpic', () => { + let originalConsoleError = console.error; + + beforeEach(() => { + originalConsoleError = console.error; + console.error = jest.fn(); + }); + + afterEach(() => { + console.error = originalConsoleError; + }); + + describe('when processQueryErrorsAction is dispatched', () => { + describe('and datasourceInstance is the same', () => { + describe('and the response is not cancelled', () => { + it('then queryFailureAction is dispatched', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const response = { message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) + .thenResultingActionsEqual(queryFailureAction({ exploreId, response })); + + expect(console.error).toBeCalledTimes(1); + expect(console.error).toBeCalledWith(response); + expect(eventBridge.emit).toBeCalledTimes(1); + expect(eventBridge.emit).toBeCalledWith('data-error', response); + }); + }); + + describe('and the response is cancelled', () => { + it('then no actions are dispatched', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const response = { cancelled: true, message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) + .thenNoActionsWhereDispatched(); + + expect(console.error).not.toBeCalled(); + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); + + describe('and datasourceInstance is not the same', () => { + describe('and the response is not cancelled', () => { + it('then no actions are dispatched', () => { + const { exploreId, state, eventBridge } = mockExploreState(); + const response = { message: 'Something went terribly wrong!' }; + + epicTester(processQueryErrorsEpic, state) + .whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId: 'other id', response })) + .thenNoActionsWhereDispatched(); + + expect(console.error).not.toBeCalled(); + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/processQueryErrorsEpic.ts b/public/app/features/explore/state/epics/processQueryErrorsEpic.ts new file mode 100644 index 000000000000..ea029186dc89 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryErrorsEpic.ts @@ -0,0 +1,40 @@ +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { NEVER, of } from 'rxjs'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { instanceOfDataQueryError } from 'app/core/utils/explore'; +import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; +import { processQueryErrorsAction, ProcessQueryErrorsPayload, queryFailureAction } from '../actionTypes'; + +export const processQueryErrorsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(processQueryErrorsAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, datasourceId } = action.payload; + let { response } = action.payload; + const { datasourceInstance, eventBridge } = state$.value.explore[exploreId]; + + if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { + // Navigated away, queries did not matter + return NEVER; + } + + // For Angular editors + eventBridge.emit('data-error', response); + + console.error(response); // To help finding problems with query syntax + + if (!instanceOfDataQueryError(response)) { + response = toDataQueryError(response); + } + + return of( + queryFailureAction({ + exploreId, + response, + }) + ); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts b/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts new file mode 100644 index 000000000000..c5da93081aa5 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryResultsEpic.test.ts @@ -0,0 +1,119 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { + processQueryResultsAction, + resetQueryErrorAction, + querySuccessAction, + scanStopAction, + scanRangeAction, +} from '../actionTypes'; +import { SeriesData, LoadingState } from '@grafana/ui'; +import { processQueryResultsEpic } from './processQueryResultsEpic'; +import TableModel from 'app/core/table_model'; + +const testContext = () => { + const serieA: SeriesData = { + fields: [], + refId: 'A', + rows: [], + }; + const serieB: SeriesData = { + fields: [], + refId: 'B', + rows: [], + }; + const series = [serieA, serieB]; + const latency = 0; + const loadingState = LoadingState.Done; + + return { + latency, + series, + loadingState, + }; +}; + +describe('processQueryResultsEpic', () => { + describe('when processQueryResultsAction is dispatched', () => { + describe('and datasourceInstance is the same', () => { + describe('and explore is not scanning', () => { + it('then resetQueryErrorAction and querySuccessAction are dispatched and eventBridge emits correct message', () => { + const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); + const { latency, series, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }) + ); + + expect(eventBridge.emit).toBeCalledTimes(1); + expect(eventBridge.emit).toBeCalledWith('data-received', series); + }); + }); + + describe('and explore is scanning', () => { + describe('and we have a result', () => { + it('then correct actions are dispatched', () => { + const { datasourceId, exploreId, state } = mockExploreState({ scanning: true }); + const { latency, series, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), + scanStopAction({ exploreId }) + ); + }); + }); + + describe('and we do not have a result', () => { + it('then correct actions are dispatched', () => { + const { datasourceId, exploreId, state, scanner } = mockExploreState({ scanning: true }); + const { latency, loadingState } = testContext(); + const graphResult = []; + const tableResult = new TableModel(); + const logsResult = null; + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId, loadingState, series: [], latency }) + ) + .thenResultingActionsEqual( + resetQueryErrorAction({ exploreId, refIds: [] }), + querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), + scanRangeAction({ exploreId, range: scanner() }) + ); + }); + }); + }); + }); + + describe('and datasourceInstance is not the same', () => { + it('then no actions are dispatched and eventBridge does not emit message', () => { + const { exploreId, state, eventBridge } = mockExploreState(); + const { series, loadingState } = testContext(); + + epicTester(processQueryResultsEpic, state) + .whenActionIsDispatched( + processQueryResultsAction({ exploreId, datasourceId: 'other id', loadingState, series, latency: 0 }) + ) + .thenNoActionsWhereDispatched(); + + expect(eventBridge.emit).not.toBeCalled(); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/processQueryResultsEpic.ts b/public/app/features/explore/state/epics/processQueryResultsEpic.ts new file mode 100644 index 000000000000..76e767c36a09 --- /dev/null +++ b/public/app/features/explore/state/epics/processQueryResultsEpic.ts @@ -0,0 +1,76 @@ +import _ from 'lodash'; +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { NEVER } from 'rxjs'; +import { LoadingState } from '@grafana/ui'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { getRefIds } from 'app/core/utils/explore'; +import { + processQueryResultsAction, + ProcessQueryResultsPayload, + querySuccessAction, + scanRangeAction, + resetQueryErrorAction, + scanStopAction, +} from '../actionTypes'; +import { ResultProcessor } from '../../utils/ResultProcessor'; + +export const processQueryResultsEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(processQueryResultsAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, datasourceId, latency, loadingState, series, delta } = action.payload; + const { datasourceInstance, scanning, scanner, eventBridge } = state$.value.explore[exploreId]; + + // If datasource already changed, results do not matter + if (datasourceInstance.meta.id !== datasourceId) { + return NEVER; + } + + const result = series || delta || []; + const replacePreviousResults = loadingState === LoadingState.Done && series && !delta ? true : false; + const resultProcessor = new ResultProcessor(state$.value.explore[exploreId], replacePreviousResults, result); + const graphResult = resultProcessor.getGraphResult(); + const tableResult = resultProcessor.getTableResult(); + const logsResult = resultProcessor.getLogsResult(); + const refIds = getRefIds(result); + const actions: Array> = []; + + // For Angular editors + eventBridge.emit('data-received', resultProcessor.getRawData()); + + // Clears any previous errors that now have a successful query, important so Angular editors are updated correctly + actions.push( + resetQueryErrorAction({ + exploreId, + refIds, + }) + ); + + actions.push( + querySuccessAction({ + exploreId, + latency, + loadingState, + graphResult, + tableResult, + logsResult, + }) + ); + + // Keep scanning for results if this was the last scanning transaction + if (scanning) { + if (_.size(result) === 0) { + const range = scanner(); + actions.push(scanRangeAction({ exploreId, range })); + } else { + // We can stop scanning if we have a result + actions.push(scanStopAction({ exploreId })); + } + } + + return actions; + }) + ); +}; diff --git a/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts b/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts new file mode 100644 index 000000000000..6ddada2bc32a --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesBatchEpic.test.ts @@ -0,0 +1,421 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { runQueriesBatchEpic } from './runQueriesBatchEpic'; +import { + runQueriesBatchAction, + queryStartAction, + historyUpdatedAction, + processQueryResultsAction, + processQueryErrorsAction, + limitMessageRatePayloadAction, + resetExploreAction, + updateDatasourceInstanceAction, + changeRefreshIntervalAction, + clearQueriesAction, + stateSaveAction, +} from '../actionTypes'; +import { LoadingState, DataQueryRequest, SeriesData, FieldType } from '@grafana/ui'; + +const testContext = () => { + const series: SeriesData[] = [ + { + fields: [ + { + name: 'Value', + }, + { + name: 'Time', + type: FieldType.time, + unit: 'dateTimeAsIso', + }, + ], + rows: [], + refId: 'A', + }, + ]; + const response = { data: series }; + + return { + response, + series, + }; +}; + +describe('runQueriesBatchEpic', () => { + let originalDateNow = Date.now; + beforeEach(() => { + originalDateNow = Date.now; + Date.now = () => 1337; + }); + + afterEach(() => { + Date.now = originalDateNow; + }); + + describe('when runQueriesBatchAction is dispatched', () => { + describe('and query targets are not live', () => { + describe('and query is successful', () => { + it('then correct actions are dispatched', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + }); + }); + + describe('and query is not successful', () => { + it('then correct actions are dispatched', () => { + const error = { + message: 'Error parsing line x', + }; + const { exploreId, state, datasourceId } = mockExploreState(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryThrowsError(error) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + processQueryErrorsAction({ exploreId, response: error, datasourceId }) + ); + }); + }); + }); + + describe('and query targets are live', () => { + describe('and state equals Streaming', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + const serieA = { + fields: [], + rows: [], + refId: 'A', + }; + const serieB = { + fields: [], + rows: [], + refId: 'B', + }; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Streaming, + delta: [serieA], + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Streaming, + delta: [serieB], + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + limitMessageRatePayloadAction({ exploreId, series: [serieA], datasourceId }), + limitMessageRatePayloadAction({ exploreId, series: [serieB], datasourceId }) + ); + }); + }); + + describe('and state equals Error', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + const error = { message: 'Something went really wrong!' }; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Error, + error, + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + processQueryErrorsAction({ exploreId, response: error, datasourceId }) + ); + }); + }); + + describe('and state equals Done', () => { + it('then correct actions are dispatched', () => { + const { exploreId, state, datasourceId, history } = mockExploreState(); + const unsubscribe = jest.fn(); + const serieA = { + fields: [], + rows: [], + refId: 'A', + }; + const serieB = { + fields: [], + rows: [], + refId: 'B', + }; + const delta = [serieA, serieB]; + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryObserverReceivesEvent({ + state: LoadingState.Done, + series: null, + delta, + key: 'some key', + request: {} as DataQueryRequest, + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta, + series: null, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + }); + }); + }); + + describe('and another runQueriesBatchAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) // first observable + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched( + // second observable and unsubscribes the first observable + runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 800 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .thenResultingActionsEqual( + queryStartAction({ exploreId }), // output from first observable + historyUpdatedAction({ exploreId, history }), // output from first observable + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction(), + // output from first observable + queryStartAction({ exploreId }), // output from second observable + historyUpdatedAction({ exploreId, history }), // output from second observable + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + // output from second observable + ); + + expect(unsubscribe).toBeCalledTimes(1); // first unsubscribe should be called but not second as that isn't unsubscribed + }); + }); + + describe('and resetExploreAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(resetExploreAction()) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and updateDatasourceInstanceAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId, datasourceInstance } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(updateDatasourceInstanceAction({ exploreId, datasourceInstance })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and changeRefreshIntervalAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: '' })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + + describe('and clearQueriesAction is dispatched', () => { + it('then the observable should be unsubscribed', () => { + const { response, series } = testContext(); + const { exploreId, state, history, datasourceId } = mockExploreState(); + const unsubscribe = jest.fn(); + + epicTester(runQueriesBatchEpic, state) + .whenActionIsDispatched( + runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) + ) + .whenQueryReceivesResponse(response) + .whenQueryObserverReceivesEvent({ + key: 'some key', + request: {} as DataQueryRequest, + state: LoadingState.Loading, // fake just to setup and test unsubscribe + unsubscribe, + }) + .whenActionIsDispatched(clearQueriesAction({ exploreId })) // unsubscribes the observable + .whenQueryReceivesResponse(response) // new updates will not reach anywhere + .thenResultingActionsEqual( + queryStartAction({ exploreId }), + historyUpdatedAction({ exploreId, history }), + processQueryResultsAction({ + exploreId, + delta: null, + series, + latency: 0, + datasourceId, + loadingState: LoadingState.Done, + }), + stateSaveAction() + ); + + expect(unsubscribe).toBeCalledTimes(1); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/runQueriesBatchEpic.ts b/public/app/features/explore/state/epics/runQueriesBatchEpic.ts new file mode 100644 index 000000000000..8e2642f193f8 --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesBatchEpic.ts @@ -0,0 +1,220 @@ +import { Epic } from 'redux-observable'; +import { Observable, Subject } from 'rxjs'; +import { mergeMap, catchError, takeUntil, filter } from 'rxjs/operators'; +import _, { isString } from 'lodash'; +import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; +import { DataStreamState, LoadingState, DataQueryResponse, SeriesData, DataQueryResponseData } from '@grafana/ui'; +import * as dateMath from '@grafana/ui/src/utils/datemath'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { buildQueryTransaction, updateHistory } from 'app/core/utils/explore'; +import { + clearQueriesAction, + historyUpdatedAction, + resetExploreAction, + updateDatasourceInstanceAction, + changeRefreshIntervalAction, + processQueryErrorsAction, + processQueryResultsAction, + runQueriesBatchAction, + RunQueriesBatchPayload, + queryStartAction, + limitMessageRatePayloadAction, + stateSaveAction, + changeRangeAction, +} from '../actionTypes'; +import { ExploreId, ExploreItemState } from 'app/types'; + +const publishActions = (outerObservable: Subject, actions: Array>) => { + for (const action of actions) { + outerObservable.next(action); + } +}; + +interface ProcessResponseConfig { + exploreId: ExploreId; + exploreItemState: ExploreItemState; + datasourceId: string; + now: number; + loadingState: LoadingState; + series?: DataQueryResponseData[]; + delta?: SeriesData[]; +} + +const processResponse = (config: ProcessResponseConfig) => { + const { exploreId, exploreItemState, datasourceId, now, loadingState, series, delta } = config; + const { queries, history } = exploreItemState; + const latency = Date.now() - now; + + // Side-effect: Saving history in localstorage + const nextHistory = updateHistory(history, datasourceId, queries); + return [ + historyUpdatedAction({ exploreId, history: nextHistory }), + processQueryResultsAction({ exploreId, latency, datasourceId, loadingState, series, delta }), + stateSaveAction(), + ]; +}; + +interface ProcessErrorConfig { + exploreId: ExploreId; + datasourceId: string; + error: any; +} + +const processError = (config: ProcessErrorConfig) => { + const { exploreId, datasourceId, error } = config; + + return [processQueryErrorsAction({ exploreId, response: error, datasourceId })]; +}; + +export const runQueriesBatchEpic: Epic, ActionOf, StoreState> = ( + action$, + state$, + { getQueryResponse } +) => { + return action$.ofType(runQueriesBatchAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId, queryOptions } = action.payload; + const exploreItemState = state$.value.explore[exploreId]; + const { datasourceInstance, queries, queryIntervals, range, scanning } = exploreItemState; + + // Create an observable per run queries action + // Within the observable create two subscriptions + // First subscription: 'querySubscription' subscribes to the call to query method on datasourceinstance + // Second subscription: 'streamSubscription' subscribes to events from the query methods observer callback + const observable: Observable> = Observable.create((outerObservable: Subject) => { + const datasourceId = datasourceInstance.meta.id; + const transaction = buildQueryTransaction(queries, queryOptions, range, queryIntervals, scanning); + outerObservable.next(queryStartAction({ exploreId })); + + const now = Date.now(); + let datasourceUnsubscribe: Function = null; + const streamHandler = new Subject(); + const observer = (event: DataStreamState) => { + datasourceUnsubscribe = event.unsubscribe; + if (!streamHandler.closed) { + // their might be a race condition when unsubscribing + streamHandler.next(event); + } + }; + + // observer subscription, handles datasourceInstance.query observer events and pushes that forward + const streamSubscription = streamHandler.subscribe({ + next: event => { + const { state, error, series, delta } = event; + if (!series && !delta && !error) { + return; + } + + if (state === LoadingState.Error) { + const actions = processError({ exploreId, datasourceId, error }); + publishActions(outerObservable, actions); + } + + if (state === LoadingState.Streaming) { + if (event.request && event.request.range) { + let newRange = event.request.range; + if (isString(newRange.raw.from)) { + newRange = { + from: dateMath.parse(newRange.raw.from, false), + to: dateMath.parse(newRange.raw.to, true), + raw: newRange.raw, + }; + } + outerObservable.next(changeRangeAction({ exploreId, range: newRange })); + } + outerObservable.next( + limitMessageRatePayloadAction({ + exploreId, + series: delta, + datasourceId, + }) + ); + } + + if (state === LoadingState.Done || state === LoadingState.Loading) { + const actions = processResponse({ + exploreId, + exploreItemState, + datasourceId, + now, + loadingState: state, + series: null, + delta, + }); + publishActions(outerObservable, actions); + } + }, + }); + + // query subscription, handles datasourceInstance.query response and pushes that forward + const querySubscription = getQueryResponse(datasourceInstance, transaction.options, observer) + .pipe( + mergeMap((response: DataQueryResponse) => { + return processResponse({ + exploreId, + exploreItemState, + datasourceId, + now, + loadingState: LoadingState.Done, + series: response && response.data ? response.data : [], + delta: null, + }); + }), + catchError(error => { + return processError({ exploreId, datasourceId, error }); + }) + ) + .subscribe({ next: (action: ActionOf) => outerObservable.next(action) }); + + // this unsubscribe method will be called when any of the takeUntil actions below happen + const unsubscribe = () => { + if (datasourceUnsubscribe) { + datasourceUnsubscribe(); + } + querySubscription.unsubscribe(); + streamSubscription.unsubscribe(); + streamHandler.unsubscribe(); + outerObservable.unsubscribe(); + }; + + return unsubscribe; + }); + + return observable.pipe( + takeUntil( + action$ + .ofType( + runQueriesBatchAction.type, + resetExploreAction.type, + updateDatasourceInstanceAction.type, + changeRefreshIntervalAction.type, + clearQueriesAction.type + ) + .pipe( + filter(action => { + if (action.type === resetExploreAction.type) { + return true; // stops all subscriptions if user navigates away + } + + if (action.type === updateDatasourceInstanceAction.type && action.payload.exploreId === exploreId) { + return true; // stops subscriptions if user changes data source + } + + if (action.type === changeRefreshIntervalAction.type && action.payload.exploreId === exploreId) { + return !isLive(action.payload.refreshInterval); // stops subscriptions if user changes refresh interval away from 'Live' + } + + if (action.type === clearQueriesAction.type && action.payload.exploreId === exploreId) { + return true; // stops subscriptions if user clears all queries + } + + return action.payload.exploreId === exploreId; + }) + ) + ) + ); + }) + ); +}; diff --git a/public/app/features/explore/state/epics/runQueriesEpic.test.ts b/public/app/features/explore/state/epics/runQueriesEpic.test.ts new file mode 100644 index 000000000000..87b1f86513f1 --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesEpic.test.ts @@ -0,0 +1,71 @@ +import { mockExploreState } from 'test/mocks/mockExploreState'; +import { epicTester } from 'test/core/redux/epicTester'; +import { runQueriesAction, stateSaveAction, runQueriesBatchAction, clearQueriesAction } from '../actionTypes'; +import { runQueriesEpic } from './runQueriesEpic'; + +describe('runQueriesEpic', () => { + describe('when runQueriesAction is dispatched', () => { + describe('and there is no datasourceError', () => { + describe('and we have non empty queries', () => { + describe('and explore is not live', () => { + it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { + const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; + const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ queries }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual( + runQueriesBatchAction({ + exploreId, + queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: false }, + }) + ); + }); + }); + + describe('and explore is live', () => { + it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { + const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; + const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ + queries, + isLive: true, + streaming: true, + }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual( + runQueriesBatchAction({ + exploreId, + queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: true }, + }) + ); + }); + }); + }); + + describe('and we have no queries', () => { + it('then clearQueriesAction and stateSaveAction are dispatched', () => { + const queries = []; + const { exploreId, state } = mockExploreState({ queries }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenResultingActionsEqual(clearQueriesAction({ exploreId }), stateSaveAction()); + }); + }); + }); + + describe('and there is a datasourceError', () => { + it('then no actions are dispatched', () => { + const { exploreId, state } = mockExploreState({ + datasourceError: { message: 'Some error' }, + }); + + epicTester(runQueriesEpic, state) + .whenActionIsDispatched(runQueriesAction({ exploreId, range: null })) + .thenNoActionsWhereDispatched(); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/runQueriesEpic.ts b/public/app/features/explore/state/epics/runQueriesEpic.ts new file mode 100644 index 000000000000..2102c11b103c --- /dev/null +++ b/public/app/features/explore/state/epics/runQueriesEpic.ts @@ -0,0 +1,39 @@ +import { Epic } from 'redux-observable'; +import { NEVER } from 'rxjs'; +import { mergeMap } from 'rxjs/operators'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { hasNonEmptyQuery } from 'app/core/utils/explore'; +import { + clearQueriesAction, + runQueriesAction, + RunQueriesPayload, + runQueriesBatchAction, + stateSaveAction, +} from '../actionTypes'; + +export const runQueriesEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(runQueriesAction.type).pipe( + mergeMap((action: ActionOf) => { + const { exploreId } = action.payload; + const { datasourceInstance, queries, datasourceError, containerWidth, isLive } = state$.value.explore[exploreId]; + + if (datasourceError) { + // let's not run any queries if data source is in a faulty state + return NEVER; + } + + if (!hasNonEmptyQuery(queries)) { + return [clearQueriesAction({ exploreId }), stateSaveAction()]; // Remember to save to state and update location + } + + // Some datasource's query builders allow per-query interval limits, + // but we're using the datasource interval limit for now + const interval = datasourceInstance.interval; + const live = isLive; + + return [runQueriesBatchAction({ exploreId, queryOptions: { interval, maxDataPoints: containerWidth, live } })]; + }) + ); +}; diff --git a/public/app/features/explore/state/epics/stateSaveEpic.test.ts b/public/app/features/explore/state/epics/stateSaveEpic.test.ts new file mode 100644 index 000000000000..bee12ad92a9e --- /dev/null +++ b/public/app/features/explore/state/epics/stateSaveEpic.test.ts @@ -0,0 +1,61 @@ +import { epicTester } from 'test/core/redux/epicTester'; +import { stateSaveEpic } from './stateSaveEpic'; +import { stateSaveAction, setUrlReplacedAction } from '../actionTypes'; +import { updateLocation } from 'app/core/actions/location'; +import { mockExploreState } from 'test/mocks/mockExploreState'; + +describe('stateSaveEpic', () => { + describe('when stateSaveAction is dispatched', () => { + describe('and there is a left state', () => { + describe('and no split', () => { + it('then the correct actions are dispatched', () => { + const { exploreId, state } = mockExploreState(); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { left: '["now-6h","now","test",{"ui":[true,true,true,null]}]' }, + replace: true, + }), + setUrlReplacedAction({ exploreId }) + ); + }); + }); + + describe('and explore is splitted', () => { + it('then the correct actions are dispatched', () => { + const { exploreId, state } = mockExploreState({ split: true }); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { + left: '["now-6h","now","test",{"ui":[true,true,true,null]}]', + right: '["now-6h","now","test",{"ui":[true,true,true,null]}]', + }, + replace: true, + }), + setUrlReplacedAction({ exploreId }) + ); + }); + }); + }); + + describe('and urlReplaced is true', () => { + it('then setUrlReplacedAction should not be dispatched', () => { + const { state } = mockExploreState({ urlReplaced: true }); + + epicTester(stateSaveEpic, state) + .whenActionIsDispatched(stateSaveAction()) + .thenResultingActionsEqual( + updateLocation({ + query: { left: '["now-6h","now","test",{"ui":[true,true,true,null]}]' }, + replace: false, + }) + ); + }); + }); + }); +}); diff --git a/public/app/features/explore/state/epics/stateSaveEpic.ts b/public/app/features/explore/state/epics/stateSaveEpic.ts new file mode 100644 index 000000000000..107f1de547b4 --- /dev/null +++ b/public/app/features/explore/state/epics/stateSaveEpic.ts @@ -0,0 +1,72 @@ +import { Epic } from 'redux-observable'; +import { mergeMap } from 'rxjs/operators'; +import { RawTimeRange, TimeRange } from '@grafana/ui/src/types/time'; +import { isDateTime } from '@grafana/ui/src/utils/moment_wrapper'; + +import { ActionOf } from 'app/core/redux/actionCreatorFactory'; +import { StoreState } from 'app/types/store'; +import { ExploreUrlState, ExploreId } from 'app/types/explore'; +import { clearQueryKeys, serializeStateToUrlParam } from 'app/core/utils/explore'; +import { updateLocation } from 'app/core/actions/location'; +import { setUrlReplacedAction, stateSaveAction } from '../actionTypes'; + +const toRawTimeRange = (range: TimeRange): RawTimeRange => { + let from = range.raw.from; + if (isDateTime(from)) { + from = from.valueOf().toString(10); + } + + let to = range.raw.to; + if (isDateTime(to)) { + to = to.valueOf().toString(10); + } + + return { + from, + to, + }; +}; + +export const stateSaveEpic: Epic, ActionOf, StoreState> = (action$, state$) => { + return action$.ofType(stateSaveAction.type).pipe( + mergeMap(() => { + const { left, right, split } = state$.value.explore; + const replace = left && left.urlReplaced === false; + const urlStates: { [index: string]: string } = {}; + const leftUrlState: ExploreUrlState = { + datasource: left.datasourceInstance.name, + queries: left.queries.map(clearQueryKeys), + range: toRawTimeRange(left.range), + ui: { + showingGraph: left.showingGraph, + showingLogs: true, + showingTable: left.showingTable, + dedupStrategy: left.dedupStrategy, + }, + }; + urlStates.left = serializeStateToUrlParam(leftUrlState, true); + if (split) { + const rightUrlState: ExploreUrlState = { + datasource: right.datasourceInstance.name, + queries: right.queries.map(clearQueryKeys), + range: toRawTimeRange(right.range), + ui: { + showingGraph: right.showingGraph, + showingLogs: true, + showingTable: right.showingTable, + dedupStrategy: right.dedupStrategy, + }, + }; + + urlStates.right = serializeStateToUrlParam(rightUrlState, true); + } + + const actions: Array> = [updateLocation({ query: urlStates, replace })]; + if (replace) { + actions.push(setUrlReplacedAction({ exploreId: ExploreId.left })); + } + + return actions; + }) + ); +}; diff --git a/public/app/features/explore/state/reducers.test.ts b/public/app/features/explore/state/reducers.test.ts index 0c37a4b388ea..1f553313f807 100644 --- a/public/app/features/explore/state/reducers.test.ts +++ b/public/app/features/explore/state/reducers.test.ts @@ -17,7 +17,6 @@ import { import { reducerTester } from 'test/core/redux/reducerTester'; import { scanStartAction, - scanStopAction, testDataSourcePendingAction, testDataSourceSuccessAction, testDataSourceFailureAction, @@ -25,6 +24,7 @@ import { splitOpenAction, splitCloseAction, changeModeAction, + scanStopAction, runQueriesAction, } from './actionTypes'; import { Reducer } from 'redux'; @@ -32,7 +32,7 @@ import { ActionOf } from 'app/core/redux/actionCreatorFactory'; import { updateLocation } from 'app/core/actions/location'; import { serializeStateToUrlParam } from 'app/core/utils/explore'; import TableModel from 'app/core/table_model'; -import { DataSourceApi, DataQuery, LogsModel, LogsDedupStrategy, dateTime } from '@grafana/ui'; +import { DataSourceApi, DataQuery, LogsModel, LogsDedupStrategy, LoadingState, dateTime } from '@grafana/ui'; describe('Explore item reducer', () => { describe('scanning', () => { @@ -166,9 +166,7 @@ describe('Explore item reducer', () => { queryKeys, supportedModes: [ExploreMode.Metrics, ExploreMode.Logs], mode: ExploreMode.Metrics, - graphIsLoading: false, - tableIsLoading: false, - logIsLoading: false, + loadingState: LoadingState.NotStarted, latency: 0, queryErrors: [], }; diff --git a/public/app/features/explore/state/reducers.ts b/public/app/features/explore/state/reducers.ts index 969ecd020667..67775b9626bd 100644 --- a/public/app/features/explore/state/reducers.ts +++ b/public/app/features/explore/state/reducers.ts @@ -1,6 +1,5 @@ import _ from 'lodash'; import { - calculateResultsFromQueryTransactions, getIntervals, ensureQueries, getQueryKeys, @@ -10,7 +9,7 @@ import { sortLogsResult, } from 'app/core/utils/explore'; import { ExploreItemState, ExploreState, ExploreId, ExploreUpdateState, ExploreMode } from 'app/types/explore'; -import { DataQuery, LogsModel } from '@grafana/ui'; +import { DataQuery, LoadingState } from '@grafana/ui'; import { HigherOrderAction, ActionTypes, @@ -20,10 +19,17 @@ import { splitCloseAction, SplitCloseActionPayload, loadExploreDatasources, - runQueriesAction, historyUpdatedAction, - resetQueryErrorAction, changeModeAction, + queryFailureAction, + setUrlReplacedAction, + querySuccessAction, + scanRangeAction, + scanStopAction, + resetQueryErrorAction, + queryStartAction, + runQueriesAction, + changeRangeAction, } from './actionTypes'; import { reducerFactory } from 'app/core/redux'; import { @@ -40,13 +46,8 @@ import { loadDatasourcePendingAction, loadDatasourceReadyAction, modifyQueriesAction, - queryFailureAction, - queryStartAction, - querySuccessAction, removeQueryRowAction, - scanRangeAction, scanStartAction, - scanStopAction, setQueriesAction, toggleTableAction, queriesImportedAction, @@ -57,8 +58,6 @@ import { updateLocation } from 'app/core/actions/location'; import { LocationUpdate } from 'app/types'; import TableModel from 'app/core/table_model'; import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; -import { subscriptionDataReceivedAction, startSubscriptionAction } from './epics'; -import { seriesDataToLogsModel } from 'app/core/logs_model'; export const DEFAULT_RANGE = { from: 'now-6h', @@ -100,9 +99,7 @@ export const makeExploreItemState = (): ExploreItemState => ({ scanRange: null, showingGraph: true, showingTable: true, - graphIsLoading: false, - logIsLoading: false, - tableIsLoading: false, + loadingState: LoadingState.NotStarted, queryKeys: [], urlState: null, update: makeInitialUpdateState(), @@ -111,6 +108,7 @@ export const makeExploreItemState = (): ExploreItemState => ({ supportedModes: [], mode: null, isLive: false, + urlReplaced: false, }); /** @@ -191,10 +189,8 @@ export const itemReducer = reducerFactory({} as ExploreItemSta return { ...state, - refreshInterval: refreshInterval, - graphIsLoading: live ? true : false, - tableIsLoading: live ? true : false, - logIsLoading: live ? true : false, + refreshInterval, + loadingState: live ? LoadingState.Streaming : LoadingState.NotStarted, isLive: live, logsResult, }; @@ -267,9 +263,7 @@ export const itemReducer = reducerFactory({} as ExploreItemSta datasourceInstance, queryErrors: [], latency: 0, - graphIsLoading: false, - logIsLoading: false, - tableIsLoading: false, + loadingState: LoadingState.NotStarted, StartPage, showingStartPage: Boolean(StartPage), queryKeys: getQueryKeys(state.queries, datasourceInstance), @@ -346,35 +340,29 @@ export const itemReducer = reducerFactory({} as ExploreItemSta .addMapper({ filter: queryFailureAction, mapper: (state, action): ExploreItemState => { - const { resultType, response } = action.payload; + const { response } = action.payload; const queryErrors = state.queryErrors.concat(response); return { ...state, - graphResult: resultType === 'Graph' ? null : state.graphResult, - tableResult: resultType === 'Table' ? null : state.tableResult, - logsResult: resultType === 'Logs' ? null : state.logsResult, + graphResult: null, + tableResult: null, + logsResult: null, latency: 0, queryErrors, - graphIsLoading: resultType === 'Graph' ? false : state.graphIsLoading, - logIsLoading: resultType === 'Logs' ? false : state.logIsLoading, - tableIsLoading: resultType === 'Table' ? false : state.tableIsLoading, + loadingState: LoadingState.Error, update: makeInitialUpdateState(), }; }, }) .addMapper({ filter: queryStartAction, - mapper: (state, action): ExploreItemState => { - const { resultType } = action.payload; - + mapper: (state): ExploreItemState => { return { ...state, queryErrors: [], latency: 0, - graphIsLoading: resultType === 'Graph' ? true : state.graphIsLoading, - logIsLoading: resultType === 'Logs' ? true : state.logIsLoading, - tableIsLoading: resultType === 'Table' ? true : state.tableIsLoading, + loadingState: LoadingState.Loading, update: makeInitialUpdateState(), }; }, @@ -382,80 +370,20 @@ export const itemReducer = reducerFactory({} as ExploreItemSta .addMapper({ filter: querySuccessAction, mapper: (state, action): ExploreItemState => { - const { queryIntervals, refreshInterval } = state; - const { result, resultType, latency } = action.payload; - const results = calculateResultsFromQueryTransactions(result, resultType, queryIntervals.intervalMs); - const live = isLive(refreshInterval); - - if (live) { - return state; - } - - return { - ...state, - graphResult: resultType === 'Graph' ? results.graphResult : state.graphResult, - tableResult: resultType === 'Table' ? results.tableResult : state.tableResult, - logsResult: - resultType === 'Logs' - ? sortLogsResult(results.logsResult, refreshInterval) - : sortLogsResult(state.logsResult, refreshInterval), - latency, - graphIsLoading: live ? true : false, - logIsLoading: live ? true : false, - tableIsLoading: live ? true : false, - showingStartPage: false, - update: makeInitialUpdateState(), - }; - }, - }) - .addMapper({ - filter: startSubscriptionAction, - mapper: (state): ExploreItemState => { - const logsResult = sortLogsResult(state.logsResult, state.refreshInterval); + const { latency, loadingState, graphResult, tableResult, logsResult } = action.payload; return { ...state, + loadingState, + graphResult, + tableResult, logsResult, - graphIsLoading: true, - logIsLoading: true, - tableIsLoading: true, + latency, showingStartPage: false, update: makeInitialUpdateState(), }; }, }) - .addMapper({ - filter: subscriptionDataReceivedAction, - mapper: (state, action): ExploreItemState => { - const { queryIntervals, refreshInterval } = state; - const { data } = action.payload; - const live = isLive(refreshInterval); - - if (!live) { - return state; - } - - const newResults = seriesDataToLogsModel([data], queryIntervals.intervalMs); - const rowsInState = sortLogsResult(state.logsResult, state.refreshInterval).rows; - - const processedRows = []; - for (const row of rowsInState) { - processedRows.push({ ...row, fresh: false }); - } - for (const row of newResults.rows) { - processedRows.push({ ...row, fresh: true }); - } - - const rows = processedRows.slice(processedRows.length - 1000, 1000); - - const logsResult: LogsModel = state.logsResult ? { ...state.logsResult, rows } : { hasUniqueLabels: false, rows }; - - return { - ...state, - logsResult, - }; - }, - }) .addMapper({ filter: removeQueryRowAction, mapper: (state, action): ExploreItemState => { @@ -635,6 +563,24 @@ export const itemReducer = reducerFactory({} as ExploreItemSta }; }, }) + .addMapper({ + filter: setUrlReplacedAction, + mapper: (state): ExploreItemState => { + return { + ...state, + urlReplaced: true, + }; + }, + }) + .addMapper({ + filter: changeRangeAction, + mapper: (state, action): ExploreItemState => { + return { + ...state, + range: action.payload.range, + }; + }, + }) .create(); export const updateChildRefreshState = ( diff --git a/public/app/features/explore/utils/ResultProcessor.test.ts b/public/app/features/explore/utils/ResultProcessor.test.ts new file mode 100644 index 000000000000..4979afa538cb --- /dev/null +++ b/public/app/features/explore/utils/ResultProcessor.test.ts @@ -0,0 +1,453 @@ +jest.mock('@grafana/ui/src/utils/moment_wrapper', () => ({ + dateTime: (ts: any) => { + return { + valueOf: () => ts, + fromNow: () => 'fromNow() jest mocked', + format: (fmt: string) => 'format() jest mocked', + }; + }, +})); + +import { ResultProcessor } from './ResultProcessor'; +import { ExploreItemState, ExploreMode } from 'app/types/explore'; +import TableModel from 'app/core/table_model'; +import { toFixed } from '@grafana/ui'; + +const testContext = (options: any = {}) => { + const response = [ + { + target: 'A-series', + alias: 'A-series', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + refId: 'A', + }, + { + columns: [ + { + text: 'Time', + }, + { + text: 'Message', + }, + { + text: 'Description', + }, + { + text: 'Value', + }, + ], + rows: [ + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + refId: 'B', + }, + ]; + const defaultOptions = { + mode: ExploreMode.Metrics, + replacePreviousResults: true, + result: { data: response }, + graphResult: [], + tableResult: new TableModel(), + logsResult: { hasUniqueLabels: false, rows: [] }, + }; + const combinedOptions = { ...defaultOptions, ...options }; + const state = ({ + mode: combinedOptions.mode, + graphResult: combinedOptions.graphResult, + tableResult: combinedOptions.tableResult, + logsResult: combinedOptions.logsResult, + queryIntervals: { intervalMs: 10 }, + } as any) as ExploreItemState; + const resultProcessor = new ResultProcessor(state, combinedOptions.replacePreviousResults, combinedOptions.result); + + return { + result: combinedOptions.result, + resultProcessor, + }; +}; + +describe('ResultProcessor', () => { + describe('constructed without result', () => { + describe('when calling getRawData', () => { + it('then it should return an empty array', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual([]); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return an empty array', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return an empty TableModel', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual(new TableModel()); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return null', () => { + const { resultProcessor } = testContext({ result: null }); + const theResult = resultProcessor.getLogsResult(); + + expect(theResult).toBeNull(); + }); + }); + }); + + describe('constructed with a result that is a DataQueryResponse', () => { + describe('when calling getRawData', () => { + it('then it should return result.data', () => { + const { result, resultProcessor } = testContext(); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual(result.data); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return correct graph result', () => { + const { resultProcessor } = testContext(); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return correct table result', () => { + const { resultProcessor } = testContext(); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual({ + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + type: 'table', + }); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return correct logs result', () => { + const { resultProcessor } = testContext({ mode: ExploreMode.Logs, observerResponse: null }); + const theResult = resultProcessor.getLogsResult(); + + expect(theResult).toEqual({ + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a message', + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + datapoints: [[39.91264531864214, 1559038518831], [40.35179822906545, 1559038519831]], + meta: undefined, + refId: 'A', + target: 'A-series', + unit: undefined, + }, + ], + }); + }); + }); + }); + + describe('constructed with result that is a DataQueryResponse and merging with previous results', () => { + describe('when calling getRawData', () => { + it('then it should return result.data', () => { + const { result, resultProcessor } = testContext(); + const theResult = resultProcessor.getRawData(); + + expect(theResult).toEqual(result.data); + }); + }); + + describe('when calling getGraphResult', () => { + it('then it should return correct graph result', () => { + const { resultProcessor } = testContext({ + replacePreviousResults: false, + graphResult: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[19.91264531864214, 1558038518831], [20.35179822906545, 1558038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ], + }); + const theResult = resultProcessor.getGraphResult(); + + expect(theResult).toEqual([ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [ + [19.91264531864214, 1558038518831], + [20.35179822906545, 1558038519831], + [39.91264531864214, 1559038518831], + [40.35179822906545, 1559038519831], + ], + unit: undefined, + valueFormater: toFixed, + }, + ]); + }); + }); + + describe('when calling getTableResult', () => { + it('then it should return correct table result', () => { + const { resultProcessor } = testContext({ + replacePreviousResults: false, + tableResult: { + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1], + [1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1], + ], + type: 'table', + }, + }); + const theResult = resultProcessor.getTableResult(); + + expect(theResult).toEqual({ + columnMap: {}, + columns: [{ text: 'Time' }, { text: 'Message' }, { text: 'Description' }, { text: 'Value' }], + rows: [ + [1558038518831, 'This is a previous message 1', 'Previous Description 1', 21.1], + [1558038519831, 'This is a previous message 2', 'Previous Description 2', 22.1], + [1559038518831, 'This is a message', 'Description', 23.1], + [1559038519831, 'This is a message', 'Description', 23.1], + ], + type: 'table', + }); + }); + }); + + describe('when calling getLogsResult', () => { + it('then it should return correct logs result', () => { + const { resultProcessor } = testContext({ + mode: ExploreMode.Logs, + replacePreviousResults: false, + logsResult: { + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a previous message 1', + fresh: true, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 1', + searchWords: [], + timeEpochMs: 1558038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a previous message 2', + fresh: true, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 2', + searchWords: [], + timeEpochMs: 1558038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [[37.91264531864214, 1558038518831], [38.35179822906545, 1558038519831]], + unit: undefined, + valueFormater: toFixed, + }, + ], + }, + }); + const theResult = resultProcessor.getLogsResult(); + const expected = { + hasUniqueLabels: false, + meta: [], + rows: [ + { + entry: 'This is a previous message 1', + fresh: false, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 1', + searchWords: [], + timeEpochMs: 1558038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a previous message 2', + fresh: false, + hasAnsi: false, + labels: { cluster: 'some-cluster' }, + logLevel: 'unknown', + raw: 'This is a previous message 2', + searchWords: [], + timeEpochMs: 1558038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1558038518831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + fresh: true, + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038519831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038519831, + uniqueLabels: {}, + }, + { + entry: 'This is a message', + fresh: true, + hasAnsi: false, + labels: undefined, + logLevel: 'unknown', + raw: 'This is a message', + searchWords: [], + timeEpochMs: 1559038518831, + timeFromNow: 'fromNow() jest mocked', + timeLocal: 'format() jest mocked', + timestamp: 1559038518831, + uniqueLabels: {}, + }, + ], + series: [ + { + alias: 'A-series', + aliasEscaped: 'A-series', + bars: { + fillColor: '#7EB26D', + }, + hasMsResolution: true, + id: 'A-series', + label: 'A-series', + legend: true, + stats: {}, + color: '#7EB26D', + datapoints: [ + [37.91264531864214, 1558038518831], + [38.35179822906545, 1558038519831], + [39.91264531864214, 1559038518831], + [40.35179822906545, 1559038519831], + ], + unit: undefined, + valueFormater: toFixed, + }, + ], + }; + + expect(theResult).toEqual(expected); + }); + }); + }); +}); diff --git a/public/app/features/explore/utils/ResultProcessor.ts b/public/app/features/explore/utils/ResultProcessor.ts new file mode 100644 index 000000000000..2521c4914f8e --- /dev/null +++ b/public/app/features/explore/utils/ResultProcessor.ts @@ -0,0 +1,176 @@ +import { + DataQueryResponse, + TableData, + isTableData, + LogsModel, + toSeriesData, + guessFieldTypes, + DataQueryResponseData, + TimeSeries, +} from '@grafana/ui'; + +import { ExploreItemState, ExploreMode } from 'app/types/explore'; +import { getProcessedSeriesData } from 'app/features/dashboard/state/PanelQueryState'; +import TableModel, { mergeTablesIntoModel } from 'app/core/table_model'; +import { sortLogsResult } from 'app/core/utils/explore'; +import { seriesDataToLogsModel } from 'app/core/logs_model'; +import { default as TimeSeries2 } from 'app/core/time_series2'; +import { DataProcessor } from 'app/plugins/panel/graph/data_processor'; + +export class ResultProcessor { + private rawData: DataQueryResponseData[] = []; + private metrics: TimeSeries[] = []; + private tables: TableData[] = []; + + constructor( + private state: ExploreItemState, + private replacePreviousResults: boolean, + result?: DataQueryResponse | DataQueryResponseData[] + ) { + if (result && result.hasOwnProperty('data')) { + this.rawData = (result as DataQueryResponse).data; + } else { + this.rawData = (result as DataQueryResponseData[]) || []; + } + + if (this.state.mode !== ExploreMode.Metrics) { + return; + } + + for (let index = 0; index < this.rawData.length; index++) { + const res: any = this.rawData[index]; + const isTable = isTableData(res); + if (isTable) { + this.tables.push(res); + } else { + this.metrics.push(res); + } + } + } + + getRawData = (): any[] => { + return this.rawData; + }; + + getGraphResult = (): TimeSeries[] => { + if (this.state.mode !== ExploreMode.Metrics) { + return []; + } + + const newResults = this.makeTimeSeriesList(this.metrics); + return this.mergeGraphResults(newResults, this.state.graphResult); + }; + + getTableResult = (): TableModel => { + if (this.state.mode !== ExploreMode.Metrics) { + return new TableModel(); + } + + const prevTableResults = this.state.tableResult || []; + const tablesToMerge = this.replacePreviousResults ? this.tables : [].concat(prevTableResults, this.tables); + + return mergeTablesIntoModel(new TableModel(), ...tablesToMerge); + }; + + getLogsResult = (): LogsModel => { + if (this.state.mode !== ExploreMode.Logs) { + return null; + } + const graphInterval = this.state.queryIntervals.intervalMs; + const seriesData = this.rawData.map(result => guessFieldTypes(toSeriesData(result))); + const newResults = this.rawData ? seriesDataToLogsModel(seriesData, graphInterval) : null; + + if (this.replacePreviousResults) { + return newResults; + } + + const prevLogsResult: LogsModel = this.state.logsResult || { hasUniqueLabels: false, rows: [] }; + const sortedLogResult = sortLogsResult(prevLogsResult, this.state.refreshInterval); + const rowsInState = sortedLogResult.rows; + const seriesInState = sortedLogResult.series || []; + + const processedRows = []; + for (const row of rowsInState) { + processedRows.push({ ...row, fresh: false }); + } + for (const row of newResults.rows) { + processedRows.push({ ...row, fresh: true }); + } + + const processedSeries = this.mergeGraphResults(newResults.series, seriesInState); + + const slice = -1000; + const rows = processedRows.slice(slice); + const series = processedSeries.slice(slice); + + return { ...newResults, rows, series }; + }; + + private makeTimeSeriesList = (rawData: any[]) => { + const dataList = getProcessedSeriesData(rawData); + const dataProcessor = new DataProcessor({ xaxis: {}, aliasColors: [] }); // Hack before we use GraphSeriesXY instead + const timeSeries = dataProcessor.getSeriesList({ dataList }); + + return (timeSeries as any) as TimeSeries[]; // Hack before we use GraphSeriesXY instead + }; + + private isSameTimeSeries = (a: TimeSeries | TimeSeries2, b: TimeSeries | TimeSeries2) => { + if (a.hasOwnProperty('id') && b.hasOwnProperty('id')) { + if (a['id'] !== undefined && b['id'] !== undefined && a['id'] === b['id']) { + return true; + } + } + + if (a.hasOwnProperty('alias') && b.hasOwnProperty('alias')) { + if (a['alias'] !== undefined && b['alias'] !== undefined && a['alias'] === b['alias']) { + return true; + } + } + + return false; + }; + + private mergeGraphResults = ( + newResults: TimeSeries[] | TimeSeries2[], + prevResults: TimeSeries[] | TimeSeries2[] + ): TimeSeries[] => { + if (!prevResults || prevResults.length === 0 || this.replacePreviousResults) { + return (newResults as any) as TimeSeries[]; // Hack before we use GraphSeriesXY instead + } + + const results: TimeSeries[] = prevResults.slice() as TimeSeries[]; + + // update existing results + for (let index = 0; index < results.length; index++) { + const prevResult = results[index]; + for (const newResult of newResults) { + const isSame = this.isSameTimeSeries(prevResult, newResult); + + if (isSame) { + prevResult.datapoints = prevResult.datapoints.concat(newResult.datapoints); + break; + } + } + } + + // add new results + for (const newResult of newResults) { + let isNew = true; + for (const prevResult of results) { + const isSame = this.isSameTimeSeries(prevResult, newResult); + if (isSame) { + isNew = false; + break; + } + } + + if (isNew) { + const timeSeries2Result = new TimeSeries2({ ...newResult }); + + const result = (timeSeries2Result as any) as TimeSeries; // Hack before we use GraphSeriesXY instead + results.push(result); + } + } + return results; + }; +} diff --git a/public/app/plugins/datasource/loki/datasource.ts b/public/app/plugins/datasource/loki/datasource.ts index d86e5fe1922a..b689d02ba135 100644 --- a/public/app/plugins/datasource/loki/datasource.ts +++ b/public/app/plugins/datasource/loki/datasource.ts @@ -1,5 +1,8 @@ // Libraries import _ from 'lodash'; +import { Subscription, of } from 'rxjs'; +import { webSocket } from 'rxjs/webSocket'; +import { catchError, map } from 'rxjs/operators'; // Services & Utils import * as dateMath from '@grafana/ui/src/utils/datemath'; @@ -17,11 +20,14 @@ import { DataSourceInstanceSettings, DataQueryError, LogRowModel, + DataStreamObserver, + LoadingState, + DataStreamState, } from '@grafana/ui'; import { LokiQuery, LokiOptions } from './types'; import { BackendSrv } from 'app/core/services/backend_srv'; import { TemplateSrv } from 'app/features/templating/template_srv'; -import { safeStringifyValue } from 'app/core/utils/explore'; +import { safeStringifyValue, convertToWebSocketUrl } from 'app/core/utils/explore'; export const DEFAULT_MAX_LINES = 1000; @@ -47,6 +53,7 @@ interface LokiContextQueryOptions { } export class LokiDatasource extends DataSourceApi { + private subscriptions: { [key: string]: Subscription } = null; languageProvider: LanguageProvider; maxLines: number; @@ -60,6 +67,7 @@ export class LokiDatasource extends DataSourceApi { this.languageProvider = new LanguageProvider(this); const settingsData = instanceSettings.jsonData || {}; this.maxLines = parseInt(settingsData.maxLines, 10) || DEFAULT_MAX_LINES; + this.subscriptions = {}; } _request(apiUrl: string, data?, options?: any) { @@ -73,41 +81,20 @@ export class LokiDatasource extends DataSourceApi { return this.backendSrv.datasourceRequest(req); } - convertToStreamTargets = (options: DataQueryRequest): Array<{ url: string; refId: string }> => { - return options.targets - .filter(target => target.expr && !target.hide) - .map(target => { - const interpolated = this.templateSrv.replace(target.expr); - const { query, regexp } = parseQuery(interpolated); - const refId = target.refId; - const baseUrl = this.instanceSettings.url; - const params = serializeParams({ query, regexp }); - const url = `${baseUrl}/api/prom/tail?${params}`; - - return { - url, - refId, - }; - }); - }; - - resultToSeriesData = (data: any, refId: string): SeriesData[] => { - const toSeriesData = (stream: any, refId: string) => ({ - ...logStreamToSeriesData(stream), + prepareLiveTarget(target: LokiQuery, options: DataQueryRequest) { + const interpolated = this.templateSrv.replace(target.expr); + const { query, regexp } = parseQuery(interpolated); + const refId = target.refId; + const baseUrl = this.instanceSettings.url; + const params = serializeParams({ query, regexp }); + const url = convertToWebSocketUrl(`${baseUrl}/api/prom/tail?${params}`); + return { + query, + regexp, + url, refId, - }); - - if (data.streams) { - // new Loki API purposed in https://github.com/grafana/loki/pull/590 - const series: SeriesData[] = []; - for (const stream of data.streams || []) { - series.push(toSeriesData(stream, refId)); - } - return series; - } - - return [toSeriesData(data, refId)]; - }; + }; + } prepareQueryTarget(target: LokiQuery, options: DataQueryRequest) { const interpolated = this.templateSrv.replace(target.expr); @@ -126,9 +113,106 @@ export class LokiDatasource extends DataSourceApi { }; } - async query(options: DataQueryRequest) { + unsubscribe = (refId: string) => { + const subscription = this.subscriptions[refId]; + if (subscription && !subscription.closed) { + subscription.unsubscribe(); + delete this.subscriptions[refId]; + } + }; + + processError = (err: any, target: any): DataQueryError => { + const error: DataQueryError = { + message: 'Unknown error during query transaction. Please check JS console logs.', + refId: target.refId, + }; + + if (err.data) { + if (typeof err.data === 'string') { + error.message = err.data; + } else if (err.data.error) { + error.message = safeStringifyValue(err.data.error); + } + } else if (err.message) { + error.message = err.message; + } else if (typeof err === 'string') { + error.message = err; + } + + error.status = err.status; + error.statusText = err.statusText; + + return error; + }; + + processResult = (data: any, target: any): SeriesData[] => { + const series: SeriesData[] = []; + + if (Object.keys(data).length === 0) { + return series; + } + + if (!data.streams) { + return [{ ...logStreamToSeriesData(data), refId: target.refId }]; + } + + for (const stream of data.streams || []) { + const seriesData = logStreamToSeriesData(stream); + seriesData.refId = target.refId; + seriesData.meta = { + searchWords: getHighlighterExpressionsFromQuery(formatQuery(target.query, target.regexp)), + limit: this.maxLines, + }; + series.push(seriesData); + } + + return series; + }; + + runLiveQueries = (options: DataQueryRequest, observer?: DataStreamObserver) => { + const liveTargets = options.targets + .filter(target => target.expr && !target.hide && target.live) + .map(target => this.prepareLiveTarget(target, options)); + + for (const liveTarget of liveTargets) { + const subscription = webSocket(liveTarget.url) + .pipe( + map((results: any[]) => { + const delta = this.processResult(results, liveTarget); + const state: DataStreamState = { + key: `loki-${liveTarget.refId}`, + request: options, + state: LoadingState.Streaming, + delta, + unsubscribe: () => this.unsubscribe(liveTarget.refId), + }; + + return state; + }), + catchError(err => { + const error = this.processError(err, liveTarget); + const state: DataStreamState = { + key: `loki-${liveTarget.refId}`, + request: options, + state: LoadingState.Error, + error, + unsubscribe: () => this.unsubscribe(liveTarget.refId), + }; + + return of(state); + }) + ) + .subscribe({ + next: state => observer(state), + }); + + this.subscriptions[liveTarget.refId] = subscription; + } + }; + + runQueries = async (options: DataQueryRequest) => { const queryTargets = options.targets - .filter(target => target.expr && !target.hide) + .filter(target => target.expr && !target.hide && !target.live) .map(target => this.prepareQueryTarget(target, options)); if (queryTargets.length === 0) { @@ -141,53 +225,29 @@ export class LokiDatasource extends DataSourceApi { return err; } - const error: DataQueryError = { - message: 'Unknown error during query transaction. Please check JS console logs.', - refId: target.refId, - }; - - if (err.data) { - if (typeof err.data === 'string') { - error.message = err.data; - } else if (err.data.error) { - error.message = safeStringifyValue(err.data.error); - } - } else if (err.message) { - error.message = err.message; - } else if (typeof err === 'string') { - error.message = err; - } - - error.status = err.status; - error.statusText = err.statusText; - + const error: DataQueryError = this.processError(err, target); throw error; }) ); return Promise.all(queries).then((results: any[]) => { - const series: Array = []; + let series: SeriesData[] = []; for (let i = 0; i < results.length; i++) { const result = results[i]; if (result.data) { - const refId = queryTargets[i].refId; - for (const stream of result.data.streams || []) { - const seriesData = logStreamToSeriesData(stream); - seriesData.refId = refId; - seriesData.meta = { - searchWords: getHighlighterExpressionsFromQuery( - formatQuery(queryTargets[i].query, queryTargets[i].regexp) - ), - limit: this.maxLines, - }; - series.push(seriesData); - } + series = series.concat(this.processResult(result.data, queryTargets[i])); } } return { data: series }; }); + }; + + async query(options: DataQueryRequest, observer?: DataStreamObserver) { + this.runLiveQueries(options, observer); + + return this.runQueries(options); } async importQueries(queries: LokiQuery[], originMeta: PluginMeta): Promise { diff --git a/public/app/plugins/datasource/loki/language_provider.ts b/public/app/plugins/datasource/loki/language_provider.ts index 64bf876f2c77..ff187bd88420 100644 --- a/public/app/plugins/datasource/loki/language_provider.ts +++ b/public/app/plugins/datasource/loki/language_provider.ts @@ -16,6 +16,7 @@ import { } from 'app/types/explore'; import { LokiQuery } from './types'; import { dateTime } from '@grafana/ui/src/utils/moment_wrapper'; +import { PromQuery } from '../prometheus/types'; const DEFAULT_KEYS = ['job', 'namespace']; const EMPTY_SELECTOR = '{}'; @@ -168,8 +169,9 @@ export default class LokiLanguageProvider extends LanguageProvider { return Promise.all( queries.map(async query => { const expr = await this.importPrometheusQuery(query.expr); + const { context, ...rest } = query as PromQuery; return { - ...query, + ...rest, expr, }; }) diff --git a/public/app/plugins/datasource/loki/plugin.json b/public/app/plugins/datasource/loki/plugin.json index 1c880bce8111..ca630b56bc73 100644 --- a/public/app/plugins/datasource/loki/plugin.json +++ b/public/app/plugins/datasource/loki/plugin.json @@ -8,6 +8,7 @@ "alerting": false, "annotations": false, "logs": true, + "streaming": true, "info": { "description": "Like Prometheus but for logs. OSS logging solution from Grafana Labs", diff --git a/public/app/plugins/datasource/loki/types.ts b/public/app/plugins/datasource/loki/types.ts index 4c973f8a79ed..e733c3b47cb6 100644 --- a/public/app/plugins/datasource/loki/types.ts +++ b/public/app/plugins/datasource/loki/types.ts @@ -2,6 +2,9 @@ import { DataQuery, Labels, DataSourceJsonData } from '@grafana/ui/src/types'; export interface LokiQuery extends DataQuery { expr: string; + live?: boolean; + query?: string; + regexp?: string; } export interface LokiOptions extends DataSourceJsonData { diff --git a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx index 14d03df6d388..c432e9d58b4b 100644 --- a/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx +++ b/public/app/plugins/datasource/prometheus/components/PromQueryField.tsx @@ -223,7 +223,7 @@ class PromQueryField extends React.PureComponent { type: string; @@ -83,7 +87,7 @@ export class PrometheusDatasource extends DataSourceApi } } - _request(url, data?, options?: any) { + _request(url: string, data?: any, options?: any) { options = _.defaults(options || {}, { url: this.url + url, method: this.httpMethod, @@ -119,11 +123,11 @@ export class PrometheusDatasource extends DataSourceApi } // Use this for tab completion features, wont publish response to other components - metadataRequest(url) { + metadataRequest(url: string) { return this._request(url, null, { method: 'GET', silent: true }); } - interpolateQueryExpr(value, variable, defaultFormatFn) { + interpolateQueryExpr(value: any, variable: any, defaultFormatFn: any) { // if no multi or include all do not regexEscape if (!variable.multi && !variable.includeAll) { return prometheusRegularEscape(value); @@ -141,34 +145,132 @@ export class PrometheusDatasource extends DataSourceApi return this.templateSrv.variableExists(target.expr); } - query(options: DataQueryRequest): Promise<{ data: any }> { - const start = this.getPrometheusTime(options.range.from, false); - const end = this.getPrometheusTime(options.range.to, true); + processResult = (response: any, query: PromQueryRequest, target: PromQuery, responseListLength: number) => { + // Keeping original start/end for transformers + const transformerOptions = { + format: target.format, + step: query.step, + legendFormat: target.legendFormat, + start: query.start, + end: query.end, + query: query.expr, + responseListLength, + refId: target.refId, + valueWithRefId: target.valueWithRefId, + }; + const series = this.resultTransformer.transform(response, transformerOptions); - const queries = []; - const activeTargets = []; + return series; + }; - options = _.clone(options); + runObserverQueries = ( + options: DataQueryRequest, + observer: DataStreamObserver, + queries: PromQueryRequest[], + activeTargets: PromQuery[], + end: number + ) => { + for (let index = 0; index < queries.length; index++) { + const query = queries[index]; + const target = activeTargets[index]; + let observable: Observable = null; + + if (query.instant) { + observable = from(this.performInstantQuery(query, end)); + } else { + observable = from(this.performTimeSeriesQuery(query, query.start, query.end)); + } + + observable + .pipe( + single(), // unsubscribes automatically after first result + filter((response: any) => (response.cancelled ? false : true)), + map((response: any) => { + return this.processResult(response, query, target, queries.length); + }) + ) + .subscribe({ + next: series => { + if (query.instant) { + observer({ + key: `prometheus-${target.refId}`, + state: LoadingState.Loading, + request: options, + series: null, + delta: series, + unsubscribe: () => undefined, + }); + } else { + observer({ + key: `prometheus-${target.refId}`, + state: LoadingState.Done, + request: options, + series: null, + delta: series, + unsubscribe: () => undefined, + }); + } + }, + }); + } + }; + + prepareTargets = (options: DataQueryRequest, start: number, end: number) => { + const queries: PromQueryRequest[] = []; + const activeTargets: PromQuery[] = []; for (const target of options.targets) { if (!target.expr || target.hide) { continue; } + if (target.context === 'explore') { + target.format = 'time_series'; + target.instant = false; + const instantTarget: any = _.cloneDeep(target); + instantTarget.format = 'table'; + instantTarget.instant = true; + instantTarget.valueWithRefId = true; + delete instantTarget.maxDataPoints; + instantTarget.requestId += '_instant'; + instantTarget.refId += '_instant'; + activeTargets.push(instantTarget); + queries.push(this.createQuery(instantTarget, options, start, end)); + } + activeTargets.push(target); queries.push(this.createQuery(target, options, start, end)); } + return { + queries, + activeTargets, + }; + }; + + query(options: DataQueryRequest, observer?: DataStreamObserver): Promise<{ data: any }> { + const start = this.getPrometheusTime(options.range.from, false); + const end = this.getPrometheusTime(options.range.to, true); + + options = _.clone(options); + const { queries, activeTargets } = this.prepareTargets(options, start, end); + // No valid targets, return the empty result to save a round trip. if (_.isEmpty(queries)) { return this.$q.when({ data: [] }) as Promise<{ data: any }>; } + if (observer && options.targets.filter(target => target.context === 'explore').length === options.targets.length) { + // using observer to make the instant query return immediately + this.runObserverQueries(options, observer, queries, activeTargets, end); + return this.$q.when({ data: [] }) as Promise<{ data: any }>; + } + const allQueryPromise = _.map(queries, query => { - if (!query.instant) { - return this.performTimeSeriesQuery(query, query.start, query.end); - } else { + if (query.instant) { return this.performInstantQuery(query, end); + } else { + return this.performTimeSeriesQuery(query, query.start, query.end); } }); @@ -180,19 +282,10 @@ export class PrometheusDatasource extends DataSourceApi return; } - // Keeping original start/end for transformers - const transformerOptions = { - format: activeTargets[index].format, - step: queries[index].step, - legendFormat: activeTargets[index].legendFormat, - start: queries[index].start, - end: queries[index].end, - query: queries[index].expr, - responseListLength: responseList.length, - refId: activeTargets[index].refId, - valueWithRefId: activeTargets[index].valueWithRefId, - }; - const series = this.resultTransformer.transform(response, transformerOptions); + const target = activeTargets[index]; + const query = queries[index]; + const series = this.processResult(response, query, target, queries.length); + result = [...result, ...series]; }); @@ -202,10 +295,16 @@ export class PrometheusDatasource extends DataSourceApi return allPromise as Promise<{ data: any }>; } - createQuery(target, options, start, end) { - const query: any = { + createQuery(target: PromQuery, options: DataQueryRequest, start: number, end: number) { + const query: PromQueryRequest = { hinting: target.hinting, instant: target.instant, + step: 0, + expr: '', + requestId: '', + refId: '', + start: 0, + end: 0, }; const range = Math.ceil(end - start); @@ -398,7 +497,7 @@ export class PrometheusDatasource extends DataSourceApi }; // Unsetting min interval for accurate event resolution const minStep = '1s'; - const query = this.createQuery({ expr, interval: minStep }, queryOptions, start, end); + const query = this.createQuery({ expr, interval: minStep, refId: 'X' }, queryOptions, start, end); const self = this; return this.performTimeSeriesQuery(query, query.start, query.end).then(results => { diff --git a/public/app/plugins/datasource/prometheus/types.ts b/public/app/plugins/datasource/prometheus/types.ts index e83029df8356..a256f289cfe2 100644 --- a/public/app/plugins/datasource/prometheus/types.ts +++ b/public/app/plugins/datasource/prometheus/types.ts @@ -2,6 +2,14 @@ import { DataQuery, DataSourceJsonData } from '@grafana/ui/src/types'; export interface PromQuery extends DataQuery { expr: string; + context?: 'explore' | 'panel'; + format?: string; + instant?: boolean; + hinting?: boolean; + interval?: string; + intervalFactor?: number; + legendFormat?: string; + valueWithRefId?: boolean; } export interface PromOptions extends DataSourceJsonData { @@ -10,3 +18,10 @@ export interface PromOptions extends DataSourceJsonData { httpMethod: string; directUrl: string; } + +export interface PromQueryRequest extends PromQuery { + step?: number; + requestId?: string; + start: number; + end: number; +} diff --git a/public/app/store/configureStore.ts b/public/app/store/configureStore.ts index 2d7d3288d3b9..63d8eaaf718d 100644 --- a/public/app/store/configureStore.ts +++ b/public/app/store/configureStore.ts @@ -15,8 +15,22 @@ import usersReducers from 'app/features/users/state/reducers'; import userReducers from 'app/features/profile/state/reducers'; import organizationReducers from 'app/features/org/state/reducers'; import { setStore } from './store'; -import { startSubscriptionsEpic, startSubscriptionEpic, limitMessageRateEpic } from 'app/features/explore/state/epics'; -import { WebSocketSubject, webSocket } from 'rxjs/webSocket'; +import { limitMessageRateEpic } from 'app/features/explore/state/epics/limitMessageRateEpic'; +import { stateSaveEpic } from 'app/features/explore/state/epics/stateSaveEpic'; +import { processQueryResultsEpic } from 'app/features/explore/state/epics/processQueryResultsEpic'; +import { processQueryErrorsEpic } from 'app/features/explore/state/epics/processQueryErrorsEpic'; +import { runQueriesEpic } from 'app/features/explore/state/epics/runQueriesEpic'; +import { runQueriesBatchEpic } from 'app/features/explore/state/epics/runQueriesBatchEpic'; +import { + DataSourceApi, + DataQueryResponse, + DataQuery, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, +} from '@grafana/ui'; +import { Observable } from 'rxjs'; +import { getQueryResponse } from 'app/core/utils/explore'; import { StoreState } from 'app/types/store'; import { toggleLogActionsMiddleware } from 'app/core/middlewares/application'; @@ -39,14 +53,25 @@ export function addRootReducer(reducers) { Object.assign(rootReducers, ...reducers); } -export const rootEpic: any = combineEpics(startSubscriptionsEpic, startSubscriptionEpic, limitMessageRateEpic); +export const rootEpic: any = combineEpics( + limitMessageRateEpic, + stateSaveEpic, + runQueriesEpic, + runQueriesBatchEpic, + processQueryResultsEpic, + processQueryErrorsEpic +); export interface EpicDependencies { - getWebSocket: (urlConfigOrSource: string) => WebSocketSubject; + getQueryResponse: ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver + ) => Observable; } const dependencies: EpicDependencies = { - getWebSocket: webSocket, + getQueryResponse, }; const epicMiddleware = createEpicMiddleware({ dependencies }); diff --git a/public/app/types/explore.ts b/public/app/types/explore.ts index 289ae02b0d71..98d137f1e7a5 100644 --- a/public/app/types/explore.ts +++ b/public/app/types/explore.ts @@ -3,7 +3,6 @@ import { Value } from 'slate'; import { RawTimeRange, DataQuery, - DataQueryResponseData, DataSourceSelectItem, DataSourceApi, QueryHint, @@ -13,9 +12,10 @@ import { DataQueryError, LogsModel, LogsDedupStrategy, + LoadingState, } from '@grafana/ui'; -import { Emitter, TimeSeries } from 'app/core/core'; +import { Emitter } from 'app/core/core'; import TableModel from 'app/core/table_model'; export enum ExploreMode { @@ -215,9 +215,7 @@ export interface ExploreItemState { */ showingTable: boolean; - graphIsLoading: boolean; - logIsLoading: boolean; - tableIsLoading: boolean; + loadingState: LoadingState; /** * Table model that combines all query table results into a single table. */ @@ -254,6 +252,7 @@ export interface ExploreItemState { mode: ExploreMode; isLive: boolean; + urlReplaced: boolean; } export interface ExploreUpdateState { @@ -314,11 +313,8 @@ export interface QueryIntervals { export interface QueryOptions { interval: string; - format: string; - hinting?: boolean; - instant?: boolean; - valueWithRefId?: boolean; maxDataPoints?: number; + live?: boolean; } export interface QueryTransaction { @@ -330,23 +326,14 @@ export interface QueryTransaction { options: any; queries: DataQuery[]; result?: any; // Table model / Timeseries[] / Logs - resultType: ResultType; scanning?: boolean; } export type RangeScanner = () => RawTimeRange; -export type ResultGetter = ( - result: DataQueryResponseData, - transaction: QueryTransaction, - allTransactions: QueryTransaction[] -) => TimeSeries; - export interface TextMatch { text: string; start: number; length: number; end: number; } - -export type ResultType = 'Graph' | 'Logs' | 'Table'; diff --git a/public/test/core/redux/epicTester.ts b/public/test/core/redux/epicTester.ts index 5c2a42469435..88638f556c68 100644 --- a/public/test/core/redux/epicTester.ts +++ b/public/test/core/redux/epicTester.ts @@ -1,6 +1,14 @@ import { Epic, ActionsObservable, StateObservable } from 'redux-observable'; import { Subject } from 'rxjs'; -import { WebSocketSubject } from 'rxjs/webSocket'; +import { + DataSourceApi, + DataQuery, + DataSourceJsonData, + DataQueryRequest, + DataStreamObserver, + DataQueryResponse, + DataStreamState, +} from '@grafana/ui'; import { ActionOf } from 'app/core/redux/actionCreatorFactory'; import { StoreState } from 'app/types/store'; @@ -8,21 +16,30 @@ import { EpicDependencies } from 'app/store/configureStore'; export const epicTester = ( epic: Epic, ActionOf, StoreState, EpicDependencies>, - state?: StoreState + state?: Partial ) => { const resultingActions: Array> = []; const action$ = new Subject>(); const state$ = new Subject(); const actionObservable$ = new ActionsObservable(action$); - const stateObservable$ = new StateObservable(state$, state || ({} as StoreState)); - const websockets$: Array> = []; + const stateObservable$ = new StateObservable(state$, (state as StoreState) || ({} as StoreState)); + const queryResponse$ = new Subject(); + const observer$ = new Subject(); + const getQueryResponse = ( + datasourceInstance: DataSourceApi, + options: DataQueryRequest, + observer?: DataStreamObserver + ) => { + if (observer) { + observer$.subscribe({ next: event => observer(event) }); + } + return queryResponse$; + }; + const dependencies: EpicDependencies = { - getWebSocket: () => { - const webSocket$ = new Subject(); - websockets$.push(webSocket$); - return webSocket$ as WebSocketSubject; - }, + getQueryResponse, }; + epic(actionObservable$, stateObservable$, dependencies).subscribe({ next: action => resultingActions.push(action) }); const whenActionIsDispatched = (action: ActionOf) => { @@ -31,14 +48,26 @@ export const epicTester = ( return instance; }; - const whenWebSocketReceivesData = (data: any) => { - websockets$.forEach(websocket$ => websocket$.next(data)); + const whenQueryReceivesResponse = (response: DataQueryResponse) => { + queryResponse$.next(response); + + return instance; + }; + + const whenQueryThrowsError = (error: any) => { + queryResponse$.error(error); + + return instance; + }; + + const whenQueryObserverReceivesEvent = (event: DataStreamState) => { + observer$.next(event); return instance; }; const thenResultingActionsEqual = (...actions: Array>) => { - expect(resultingActions).toEqual(actions); + expect(actions).toEqual(resultingActions); return instance; }; @@ -51,7 +80,9 @@ export const epicTester = ( const instance = { whenActionIsDispatched, - whenWebSocketReceivesData, + whenQueryReceivesResponse, + whenQueryThrowsError, + whenQueryObserverReceivesEvent, thenResultingActionsEqual, thenNoActionsWhereDispatched, }; diff --git a/public/test/mocks/mockExploreState.ts b/public/test/mocks/mockExploreState.ts new file mode 100644 index 000000000000..981f1fb2dbe4 --- /dev/null +++ b/public/test/mocks/mockExploreState.ts @@ -0,0 +1,86 @@ +import { DataSourceApi } from '@grafana/ui/src/types/datasource'; + +import { ExploreId, ExploreItemState, ExploreState } from 'app/types/explore'; +import { makeExploreItemState } from 'app/features/explore/state/reducers'; +import { StoreState } from 'app/types'; + +export const mockExploreState = (options: any = {}) => { + const isLive = options.isLive || false; + const history = []; + const eventBridge = { + emit: jest.fn(), + }; + const streaming = options.streaming || undefined; + const datasourceInterval = options.datasourceInterval || ''; + const refreshInterval = options.refreshInterval || ''; + const containerWidth = options.containerWidth || 1980; + const queries = options.queries || []; + const datasourceError = options.datasourceError || null; + const scanner = options.scanner || jest.fn(); + const scanning = options.scanning || false; + const datasourceId = options.datasourceId || '1337'; + const exploreId = ExploreId.left; + const datasourceInstance: DataSourceApi = options.datasourceInstance || { + id: 1337, + query: jest.fn(), + name: 'test', + testDatasource: jest.fn(), + meta: { + id: datasourceId, + streaming, + }, + interval: datasourceInterval, + }; + const urlReplaced = options.urlReplaced || false; + const left: ExploreItemState = options.left || { + ...makeExploreItemState(), + containerWidth, + datasourceError, + datasourceInstance, + eventBridge, + history, + isLive, + queries, + refreshInterval, + scanner, + scanning, + urlReplaced, + }; + const right: ExploreItemState = options.right || { + ...makeExploreItemState(), + containerWidth, + datasourceError, + datasourceInstance, + eventBridge, + history, + isLive, + queries, + refreshInterval, + scanner, + scanning, + urlReplaced, + }; + const split: boolean = options.split || false; + const explore: ExploreState = { + left, + right, + split, + }; + const state: Partial = { + explore, + }; + + return { + containerWidth, + datasourceId, + datasourceInstance, + datasourceInterval, + eventBridge, + exploreId, + history, + queries, + refreshInterval, + state, + scanner, + }; +}; From fbf37eb40281a5705d0824fc84df7344eadb309f Mon Sep 17 00:00:00 2001 From: Carl Bergquist Date: Mon, 3 Jun 2019 15:22:59 +0200 Subject: [PATCH 46/49] docs: configuring custom headers in the dataproxy (#17367) closes #17348 --- docs/sources/administration/provisioning.md | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/docs/sources/administration/provisioning.md b/docs/sources/administration/provisioning.md index 9b1f8a6c70f6..d09fb0bbc51b 100644 --- a/docs/sources/administration/provisioning.md +++ b/docs/sources/administration/provisioning.md @@ -179,6 +179,24 @@ Secure json data is a map of settings that will be encrypted with [secret key](/ | accessKey | string | Cloudwatch | Access key for connecting to Cloudwatch | | secretKey | string | Cloudwatch | Secret key for connecting to Cloudwatch | +#### Custom HTTP headers for datasources +Datasources managed by Grafanas provisioning can be configured to add HTTP headers to all requests +going to that datasource. The header name is configured in the `jsonData` field and the header value should be +configured in `secureJsonData`. + +```yaml +apiVersion: 1 + +datasources: +- name: Graphite + jsonData: + httpHeaderName1: "HeaderName" + httpHeaderName2: "Authorization" + secureJsonData: + httpHeaderValue1: "HeaderValue" + httpHeaderValue2: "Bearer XXXXXXXXX" +``` + ### Dashboards It's possible to manage dashboards in Grafana by adding one or more yaml config files in the [`provisioning/dashboards`](/installation/configuration/#provisioning) directory. Each config file can contain a list of `dashboards providers` that will load dashboards into Grafana from the local filesystem. @@ -204,7 +222,7 @@ providers: # enable dashboard editing editable: true # how often Grafana will scan for changed dashboards - updateIntervalSeconds: 10 + updateIntervalSeconds: 10 options: # path to dashboard files on disk. Required path: /var/lib/grafana/dashboards From b7a9533476633df8a902e347932d6cd5d71a58f1 Mon Sep 17 00:00:00 2001 From: Marcus Efraimsson Date: Mon, 3 Jun 2019 16:45:03 +0200 Subject: [PATCH 47/49] Database: Initialize xorm with an empty schema for postgres (#17357) xorm introduced some changes in https://github.com/go-xorm/xorm/pull/824 and https://github.com/go-xorm/xorm/pull/876 which by default will use public as the postgres schema and this was a breaking change compared to before. Grafana has implemented a custom postgres dialect so above changes wasn't a problem here. However, Grafana's custom database migration was using xorm dialect to check if the migration table exists or not. For those using a custom search_path (schema) in postgres configured on server, database or user level the migration table check would not find the migration table since it was looking in public schema due to xorm changes above. This had the consequence that Grafana's database migration failed the second time since migration had already run migrations in another schema. This change will make xorm use an empty default schema for postgres and by that mimic the functionality of how it was functioning before xorm's changes above. Fixes #16720 Co-Authored-By: Carl Bergquist --- pkg/services/sqlstore/sqlstore.go | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pkg/services/sqlstore/sqlstore.go b/pkg/services/sqlstore/sqlstore.go index 58bcc5578593..675af5f02bb3 100644 --- a/pkg/services/sqlstore/sqlstore.go +++ b/pkg/services/sqlstore/sqlstore.go @@ -37,6 +37,11 @@ var ( const ContextSessionName = "db-session" func init() { + // This change will make xorm use an empty default schema for postgres and + // by that mimic the functionality of how it was functioning before + // xorm's changes above. + xorm.DefaultPostgresSchema = "" + registry.Register(®istry.Descriptor{ Name: "SqlStore", Instance: &SqlStore{}, From 96ba32d0c87198adc32019b6f27925f9a59c3cde Mon Sep 17 00:00:00 2001 From: Ryan McKinley Date: Mon, 3 Jun 2019 17:55:59 +0200 Subject: [PATCH 48/49] Add a @grafana/runtime package with backendSrv interface (#16533) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit grafana-runtime/tsconfig.json imports query to avoid a build error ¯\_(ツ)_/¯ --- packages/grafana-runtime/README.md | 3 ++ packages/grafana-runtime/index.js | 7 +++ packages/grafana-runtime/package.json | 37 ++++++++++++++ packages/grafana-runtime/rollup.config.ts | 50 +++++++++++++++++++ packages/grafana-runtime/src/index.ts | 1 + .../src/services/AngularLoader.ts | 19 +++++++ .../src/services/backendSrv.ts | 42 ++++++++++++++++ .../src/services/dataSourceSrv.ts | 15 ++++++ .../grafana-runtime/src/services/index.ts | 3 ++ packages/grafana-runtime/tsconfig.build.json | 4 ++ packages/grafana-runtime/tsconfig.json | 19 +++++++ packages/grafana-runtime/tslint.json | 6 +++ packages/grafana-ui/tslint.json | 2 +- .../core/components/PluginHelp/PluginHelp.tsx | 2 +- .../SharedPreferences/SharedPreferences.tsx | 4 +- public/app/core/services/AngularLoader.ts | 21 ++------ public/app/core/services/backend_srv.ts | 18 ++----- public/app/features/admin/state/apis.ts | 2 +- public/app/features/alerting/AlertTab.tsx | 2 +- public/app/features/alerting/StateHistory.tsx | 2 +- .../app/features/alerting/TestRuleResult.tsx | 2 +- public/app/features/alerting/state/actions.ts | 2 +- .../dashboard/components/DashNav/DashNav.tsx | 2 +- .../DashboardSettings/DashboardSettings.tsx | 2 +- .../dashboard/components/SubMenu/SubMenu.tsx | 2 +- .../dashboard/dashgrid/DashboardPanel.tsx | 2 +- .../dashboard/panel_editor/GeneralTab.tsx | 2 +- .../dashboard/panel_editor/PanelEditor.tsx | 2 +- .../dashboard/panel_editor/QueryEditorRow.tsx | 2 +- .../panel_editor/VisualizationTab.tsx | 2 +- .../app/features/dashboard/state/actions.ts | 2 +- .../datasources/settings/PluginSettings.tsx | 2 +- .../app/features/datasources/state/actions.ts | 2 +- public/app/features/explore/QueryEditor.tsx | 2 +- public/app/features/org/state/actions.ts | 2 +- public/app/features/plugins/datasource_srv.ts | 11 ++-- public/app/features/plugins/plugin_loader.ts | 2 + public/app/features/plugins/state/actions.ts | 2 +- .../plugins/wrappers/AppConfigWrapper.tsx | 2 +- public/app/features/teams/state/actions.ts | 2 +- public/app/features/users/state/actions.ts | 2 +- .../prometheus/specs/completer.test.ts | 2 +- .../stackdriver/components/Filter.tsx | 2 +- .../datasource/testdata/QueryEditor.tsx | 4 +- public/app/routes/GrafanaCtrl.ts | 8 +-- scripts/grunt/default_task.js | 3 +- 46 files changed, 255 insertions(+), 74 deletions(-) create mode 100644 packages/grafana-runtime/README.md create mode 100644 packages/grafana-runtime/index.js create mode 100644 packages/grafana-runtime/package.json create mode 100644 packages/grafana-runtime/rollup.config.ts create mode 100644 packages/grafana-runtime/src/index.ts create mode 100644 packages/grafana-runtime/src/services/AngularLoader.ts create mode 100644 packages/grafana-runtime/src/services/backendSrv.ts create mode 100644 packages/grafana-runtime/src/services/dataSourceSrv.ts create mode 100644 packages/grafana-runtime/src/services/index.ts create mode 100644 packages/grafana-runtime/tsconfig.build.json create mode 100644 packages/grafana-runtime/tsconfig.json create mode 100644 packages/grafana-runtime/tslint.json diff --git a/packages/grafana-runtime/README.md b/packages/grafana-runtime/README.md new file mode 100644 index 000000000000..f01cd35537c7 --- /dev/null +++ b/packages/grafana-runtime/README.md @@ -0,0 +1,3 @@ +# Grafana Runtime library + +Interfaces that let you use the runtime... \ No newline at end of file diff --git a/packages/grafana-runtime/index.js b/packages/grafana-runtime/index.js new file mode 100644 index 000000000000..d1a4363350e9 --- /dev/null +++ b/packages/grafana-runtime/index.js @@ -0,0 +1,7 @@ +'use strict' + +if (process.env.NODE_ENV === 'production') { + module.exports = require('./index.production.js'); +} else { + module.exports = require('./index.development.js'); +} diff --git a/packages/grafana-runtime/package.json b/packages/grafana-runtime/package.json new file mode 100644 index 000000000000..ed390d63b6af --- /dev/null +++ b/packages/grafana-runtime/package.json @@ -0,0 +1,37 @@ +{ + "name": "@grafana/runtime", + "version": "6.0.1-alpha.0", + "description": "Grafana Runtime Library", + "keywords": [ + "typescript", + "react", + "react-component" + ], + "main": "src/index.ts", + "scripts": { + "tslint": "tslint -c tslint.json --project tsconfig.json", + "typecheck": "tsc --noEmit", + "clean": "rimraf ./dist ./compiled", + "build": "rollup -c rollup.config.ts" + }, + "author": "Grafana Labs", + "license": "Apache-2.0", + "dependencies": { + }, + "devDependencies": { + "awesome-typescript-loader": "^5.2.1", + "lodash": "^4.17.10", + "pretty-format": "^24.5.0", + "rollup": "1.6.0", + "rollup-plugin-commonjs": "9.2.1", + "rollup-plugin-node-resolve": "4.0.1", + "rollup-plugin-sourcemaps": "0.4.2", + "rollup-plugin-terser": "4.0.4", + "rollup-plugin-typescript2": "0.19.3", + "rollup-plugin-visualizer": "0.9.2", + "typescript": "3.4.1" + }, + "resolutions": { + "@types/lodash": "4.14.119" + } +} diff --git a/packages/grafana-runtime/rollup.config.ts b/packages/grafana-runtime/rollup.config.ts new file mode 100644 index 000000000000..a2d6da109d96 --- /dev/null +++ b/packages/grafana-runtime/rollup.config.ts @@ -0,0 +1,50 @@ +import resolve from 'rollup-plugin-node-resolve'; +import commonjs from 'rollup-plugin-commonjs'; +import sourceMaps from 'rollup-plugin-sourcemaps'; +import { terser } from 'rollup-plugin-terser'; + +const pkg = require('./package.json'); + +const libraryName = pkg.name; + +const buildCjsPackage = ({ env }) => { + return { + input: `compiled/index.js`, + output: [ + { + file: `dist/index.${env}.js`, + name: libraryName, + format: 'cjs', + sourcemap: true, + exports: 'named', + globals: {}, + }, + ], + external: ['lodash'], // Use Lodash from grafana + plugins: [ + commonjs({ + include: /node_modules/, + namedExports: { + '../../node_modules/lodash/lodash.js': [ + 'flatten', + 'find', + 'upperFirst', + 'debounce', + 'isNil', + 'isNumber', + 'flattenDeep', + 'map', + 'chunk', + 'sortBy', + 'uniqueId', + 'zip', + ], + }, + }), + resolve(), + sourceMaps(), + env === 'production' && terser(), + ], + }; +}; +export default [buildCjsPackage({ env: 'development' }), buildCjsPackage({ env: 'production' })]; diff --git a/packages/grafana-runtime/src/index.ts b/packages/grafana-runtime/src/index.ts new file mode 100644 index 000000000000..e371345e62d8 --- /dev/null +++ b/packages/grafana-runtime/src/index.ts @@ -0,0 +1 @@ +export * from './services'; diff --git a/packages/grafana-runtime/src/services/AngularLoader.ts b/packages/grafana-runtime/src/services/AngularLoader.ts new file mode 100644 index 000000000000..9565a6d41f43 --- /dev/null +++ b/packages/grafana-runtime/src/services/AngularLoader.ts @@ -0,0 +1,19 @@ +export interface AngularComponent { + destroy(): void; + digest(): void; + getScope(): any; +} + +export interface AngularLoader { + load(elem: any, scopeProps: any, template: string): AngularComponent; +} + +let instance: AngularLoader; + +export function setAngularLoader(v: AngularLoader) { + instance = v; +} + +export function getAngularLoader(): AngularLoader { + return instance; +} diff --git a/packages/grafana-runtime/src/services/backendSrv.ts b/packages/grafana-runtime/src/services/backendSrv.ts new file mode 100644 index 000000000000..a30296eca8cc --- /dev/null +++ b/packages/grafana-runtime/src/services/backendSrv.ts @@ -0,0 +1,42 @@ +/** + * Currently implemented with: + * https://docs.angularjs.org/api/ng/service/$http#usage + * but that will likely change in the future + */ +export type BackendSrvRequest = { + url: string; + retry?: number; + headers?: any; + method?: string; + + // Show a message with the result + showSuccessAlert?: boolean; + + [key: string]: any; +}; + +export interface BackendSrv { + get(url: string, params?: any): Promise; + + delete(url: string): Promise; + + post(url: string, data: any): Promise; + + patch(url: string, data: any): Promise; + + put(url: string, data: any): Promise; + + // If there is an error, set: err.isHandled = true + // otherwise the backend will show a message for you + request(options: BackendSrvRequest): Promise; +} + +let singletonInstance: BackendSrv; + +export function setBackendSrv(instance: BackendSrv) { + singletonInstance = instance; +} + +export function getBackendSrv(): BackendSrv { + return singletonInstance; +} diff --git a/packages/grafana-runtime/src/services/dataSourceSrv.ts b/packages/grafana-runtime/src/services/dataSourceSrv.ts new file mode 100644 index 000000000000..1f3bbbb8436b --- /dev/null +++ b/packages/grafana-runtime/src/services/dataSourceSrv.ts @@ -0,0 +1,15 @@ +import { ScopedVars, DataSourceApi } from '@grafana/ui'; + +export interface DataSourceSrv { + get(name?: string, scopedVars?: ScopedVars): Promise; +} + +let singletonInstance: DataSourceSrv; + +export function setDataSourceSrv(instance: DataSourceSrv) { + singletonInstance = instance; +} + +export function getDataSourceSrv(): DataSourceSrv { + return singletonInstance; +} diff --git a/packages/grafana-runtime/src/services/index.ts b/packages/grafana-runtime/src/services/index.ts new file mode 100644 index 000000000000..08517c0650b5 --- /dev/null +++ b/packages/grafana-runtime/src/services/index.ts @@ -0,0 +1,3 @@ +export * from './backendSrv'; +export * from './AngularLoader'; +export * from './dataSourceSrv'; diff --git a/packages/grafana-runtime/tsconfig.build.json b/packages/grafana-runtime/tsconfig.build.json new file mode 100644 index 000000000000..34e37b5d0b84 --- /dev/null +++ b/packages/grafana-runtime/tsconfig.build.json @@ -0,0 +1,4 @@ +{ + "extends": "./tsconfig.json", + "exclude": ["dist", "node_modules", "**/*.test.ts", "**/*.test.tsx"] +} diff --git a/packages/grafana-runtime/tsconfig.json b/packages/grafana-runtime/tsconfig.json new file mode 100644 index 000000000000..dcc4fd974360 --- /dev/null +++ b/packages/grafana-runtime/tsconfig.json @@ -0,0 +1,19 @@ +{ + "extends": "../../tsconfig.json", + "include": ["src/**/*.ts", "src/**/*.tsx", "../../public/app/types/jquery/*.ts"], + "exclude": ["dist", "node_modules"], + "compilerOptions": { + "rootDirs": ["."], + "module": "esnext", + "outDir": "compiled", + "declaration": true, + "declarationDir": "dist", + "strict": true, + "alwaysStrict": true, + "noImplicitAny": true, + "strictNullChecks": true, + "typeRoots": ["./node_modules/@types", "types"], + "skipLibCheck": true, // Temp workaround for Duplicate identifier tsc errors, + "removeComments": false + } +} diff --git a/packages/grafana-runtime/tslint.json b/packages/grafana-runtime/tslint.json new file mode 100644 index 000000000000..f51293736244 --- /dev/null +++ b/packages/grafana-runtime/tslint.json @@ -0,0 +1,6 @@ +{ + "extends": "../../tslint.json", + "rules": { + "import-blacklist": [true, ["^@grafana/runtime.*"]] + } +} diff --git a/packages/grafana-ui/tslint.json b/packages/grafana-ui/tslint.json index 937aa29800e5..1033e1962fc7 100644 --- a/packages/grafana-ui/tslint.json +++ b/packages/grafana-ui/tslint.json @@ -1,6 +1,6 @@ { "extends": "../../tslint.json", "rules": { - "import-blacklist": [true, "moment", ["^@grafana/ui.*"]] + "import-blacklist": [true, "moment", ["^@grafana/ui.*"], ["^@grafana/runtime.*"]] } } diff --git a/public/app/core/components/PluginHelp/PluginHelp.tsx b/public/app/core/components/PluginHelp/PluginHelp.tsx index 677fb254314e..40aed4a6c0c8 100644 --- a/public/app/core/components/PluginHelp/PluginHelp.tsx +++ b/public/app/core/components/PluginHelp/PluginHelp.tsx @@ -1,7 +1,7 @@ import React, { PureComponent } from 'react'; // @ts-ignore import Remarkable from 'remarkable'; -import { getBackendSrv } from '../../services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; interface Props { plugin: { diff --git a/public/app/core/components/SharedPreferences/SharedPreferences.tsx b/public/app/core/components/SharedPreferences/SharedPreferences.tsx index 3b804ba47051..b6d19f1f8af2 100644 --- a/public/app/core/components/SharedPreferences/SharedPreferences.tsx +++ b/public/app/core/components/SharedPreferences/SharedPreferences.tsx @@ -1,9 +1,9 @@ import React, { PureComponent } from 'react'; import { FormLabel, Select } from '@grafana/ui'; -import { getBackendSrv, BackendSrv } from 'app/core/services/backend_srv'; import { DashboardSearchHit, DashboardSearchHitType } from 'app/types'; +import { getBackendSrv } from 'app/core/services/backend_srv'; export interface Props { resourceUri: string; @@ -25,7 +25,7 @@ const timezones = [ ]; export class SharedPreferences extends PureComponent { - backendSrv: BackendSrv = getBackendSrv(); + backendSrv = getBackendSrv(); constructor(props: Props) { super(props); diff --git a/public/app/core/services/AngularLoader.ts b/public/app/core/services/AngularLoader.ts index 817e9c9f3985..ea4487ca2967 100644 --- a/public/app/core/services/AngularLoader.ts +++ b/public/app/core/services/AngularLoader.ts @@ -2,13 +2,9 @@ import angular from 'angular'; import coreModule from 'app/core/core_module'; import _ from 'lodash'; -export interface AngularComponent { - destroy(): void; - digest(): void; - getScope(): any; -} +import { AngularComponent, AngularLoader } from '@grafana/runtime'; -export class AngularLoader { +export class AngularLoaderClass implements AngularLoader { /** @ngInject */ constructor(private $compile: any, private $rootScope: any) {} @@ -38,15 +34,4 @@ export class AngularLoader { } } -coreModule.service('angularLoader', AngularLoader); - -let angularLoaderInstance: AngularLoader; - -export function setAngularLoader(pl: AngularLoader) { - angularLoaderInstance = pl; -} - -// away to access it from react -export function getAngularLoader(): AngularLoader { - return angularLoaderInstance; -} +coreModule.service('angularLoader', AngularLoaderClass); diff --git a/public/app/core/services/backend_srv.ts b/public/app/core/services/backend_srv.ts index e14b5f57b288..0f099c93d767 100644 --- a/public/app/core/services/backend_srv.ts +++ b/public/app/core/services/backend_srv.ts @@ -7,8 +7,9 @@ import { DashboardModel } from 'app/features/dashboard/state/DashboardModel'; import { DashboardSearchHit } from 'app/types/search'; import { ContextSrv } from './context_srv'; import { FolderInfo, DashboardDTO } from 'app/types'; +import { BackendSrv as BackendService, getBackendSrv as getBackendService, BackendSrvRequest } from '@grafana/runtime'; -export class BackendSrv { +export class BackendSrv implements BackendService { private inFlightRequests: { [key: string]: Array> } = {}; private HTTP_REQUEST_CANCELED = -1; private noBackendCache: boolean; @@ -83,7 +84,7 @@ export class BackendSrv { throw data; } - request(options: any) { + request(options: BackendSrvRequest) { options.retry = options.retry || 0; const requestIsLocal = !options.url.match(/^http/); const firstAttempt = options.retry === 0; @@ -385,16 +386,7 @@ export class BackendSrv { coreModule.service('backendSrv', BackendSrv); -// -// Code below is to expore the service to react components -// - -let singletonInstance: BackendSrv; - -export function setBackendSrv(instance: BackendSrv) { - singletonInstance = instance; -} - +// Used for testing and things that really need BackendSrv export function getBackendSrv(): BackendSrv { - return singletonInstance; + return getBackendService() as BackendSrv; } diff --git a/public/app/features/admin/state/apis.ts b/public/app/features/admin/state/apis.ts index 05321c6e7148..1166fa4dc011 100644 --- a/public/app/features/admin/state/apis.ts +++ b/public/app/features/admin/state/apis.ts @@ -1,4 +1,4 @@ -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; export interface ServerStat { name: string; diff --git a/public/app/features/alerting/AlertTab.tsx b/public/app/features/alerting/AlertTab.tsx index c7d1a8e058d9..2f293010b907 100644 --- a/public/app/features/alerting/AlertTab.tsx +++ b/public/app/features/alerting/AlertTab.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Services & Utils -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import appEvents from 'app/core/app_events'; // Components diff --git a/public/app/features/alerting/StateHistory.tsx b/public/app/features/alerting/StateHistory.tsx index c0c804c8bd1e..2a114ec00d10 100644 --- a/public/app/features/alerting/StateHistory.tsx +++ b/public/app/features/alerting/StateHistory.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import alertDef from './state/alertDef'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { DashboardModel } from '../dashboard/state/DashboardModel'; import appEvents from '../../core/app_events'; diff --git a/public/app/features/alerting/TestRuleResult.tsx b/public/app/features/alerting/TestRuleResult.tsx index e8f0551d7073..509ea1721cbd 100644 --- a/public/app/features/alerting/TestRuleResult.tsx +++ b/public/app/features/alerting/TestRuleResult.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; import { JSONFormatter } from 'app/core/components/JSONFormatter/JSONFormatter'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { DashboardModel } from '../dashboard/state/DashboardModel'; import { LoadingPlaceholder } from '@grafana/ui/src'; diff --git a/public/app/features/alerting/state/actions.ts b/public/app/features/alerting/state/actions.ts index 5ec84fe051d4..3ca51d521344 100644 --- a/public/app/features/alerting/state/actions.ts +++ b/public/app/features/alerting/state/actions.ts @@ -1,4 +1,4 @@ -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { AlertRuleDTO, StoreState } from 'app/types'; import { ThunkAction } from 'redux-thunk'; diff --git a/public/app/features/dashboard/components/DashNav/DashNav.tsx b/public/app/features/dashboard/components/DashNav/DashNav.tsx index f95e34d2d2e1..8db88e9ba55e 100644 --- a/public/app/features/dashboard/components/DashNav/DashNav.tsx +++ b/public/app/features/dashboard/components/DashNav/DashNav.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import { connect } from 'react-redux'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { appEvents } from 'app/core/app_events'; import { PlaylistSrv } from 'app/features/playlist/playlist_srv'; diff --git a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx index a043bc3e0daf..b724b89d9425 100644 --- a/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx +++ b/public/app/features/dashboard/components/DashboardSettings/DashboardSettings.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; // Types import { DashboardModel } from '../../state/DashboardModel'; diff --git a/public/app/features/dashboard/components/SubMenu/SubMenu.tsx b/public/app/features/dashboard/components/SubMenu/SubMenu.tsx index bb18481d51a7..6f2a60f624ef 100644 --- a/public/app/features/dashboard/components/SubMenu/SubMenu.tsx +++ b/public/app/features/dashboard/components/SubMenu/SubMenu.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; // Types import { DashboardModel } from '../../state/DashboardModel'; diff --git a/public/app/features/dashboard/dashgrid/DashboardPanel.tsx b/public/app/features/dashboard/dashgrid/DashboardPanel.tsx index e076ee5093cd..72977e7ebc15 100644 --- a/public/app/features/dashboard/dashgrid/DashboardPanel.tsx +++ b/public/app/features/dashboard/dashgrid/DashboardPanel.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import classNames from 'classnames'; // Utils & Services -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { importPanelPlugin } from 'app/features/plugins/plugin_loader'; // Components diff --git a/public/app/features/dashboard/panel_editor/GeneralTab.tsx b/public/app/features/dashboard/panel_editor/GeneralTab.tsx index 01a6e39cedba..ddbbb0d88798 100644 --- a/public/app/features/dashboard/panel_editor/GeneralTab.tsx +++ b/public/app/features/dashboard/panel_editor/GeneralTab.tsx @@ -1,6 +1,6 @@ import React, { PureComponent } from 'react'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { EditorTabBody } from './EditorTabBody'; import { PanelModel } from '../state/PanelModel'; diff --git a/public/app/features/dashboard/panel_editor/PanelEditor.tsx b/public/app/features/dashboard/panel_editor/PanelEditor.tsx index 722b211e4ef1..dde5f8440c17 100644 --- a/public/app/features/dashboard/panel_editor/PanelEditor.tsx +++ b/public/app/features/dashboard/panel_editor/PanelEditor.tsx @@ -9,7 +9,7 @@ import { AlertTab } from '../../alerting/AlertTab'; import config from 'app/core/config'; import { store } from 'app/store/store'; import { updateLocation } from 'app/core/actions'; -import { AngularComponent } from 'app/core/services/AngularLoader'; +import { AngularComponent } from '@grafana/runtime'; import { PanelModel } from '../state/PanelModel'; import { DashboardModel } from '../state/DashboardModel'; diff --git a/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx b/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx index 8b5f6b964f24..ca66d84ad784 100644 --- a/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx +++ b/public/app/features/dashboard/panel_editor/QueryEditorRow.tsx @@ -5,7 +5,7 @@ import _ from 'lodash'; // Utils & Services import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { Emitter } from 'app/core/utils/emitter'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; diff --git a/public/app/features/dashboard/panel_editor/VisualizationTab.tsx b/public/app/features/dashboard/panel_editor/VisualizationTab.tsx index 0eb352ca8061..f67532dd3980 100644 --- a/public/app/features/dashboard/panel_editor/VisualizationTab.tsx +++ b/public/app/features/dashboard/panel_editor/VisualizationTab.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Utils & Services -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { connectWithStore } from 'app/core/utils/connectWithReduxStore'; import { StoreState } from 'app/types'; import { updateLocation } from 'app/core/actions'; diff --git a/public/app/features/dashboard/state/actions.ts b/public/app/features/dashboard/state/actions.ts index 50f645095755..7b01975e29d3 100644 --- a/public/app/features/dashboard/state/actions.ts +++ b/public/app/features/dashboard/state/actions.ts @@ -1,5 +1,5 @@ // Services & Utils -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { actionCreatorFactory, noPayloadActionCreatorFactory } from 'app/core/redux'; import { createSuccessNotification } from 'app/core/copy/appNotification'; diff --git a/public/app/features/datasources/settings/PluginSettings.tsx b/public/app/features/datasources/settings/PluginSettings.tsx index a7462cbb45c7..58da3cc55f49 100644 --- a/public/app/features/datasources/settings/PluginSettings.tsx +++ b/public/app/features/datasources/settings/PluginSettings.tsx @@ -8,7 +8,7 @@ import { DataQuery, DataSourceJsonData, } from '@grafana/ui'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; export type GenericDataSourcePlugin = DataSourcePlugin>; diff --git a/public/app/features/datasources/state/actions.ts b/public/app/features/datasources/state/actions.ts index a09289500693..9fb003bc0c4c 100644 --- a/public/app/features/datasources/state/actions.ts +++ b/public/app/features/datasources/state/actions.ts @@ -1,6 +1,6 @@ import { ThunkAction } from 'redux-thunk'; import config from '../../../core/config'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { getDatasourceSrv } from 'app/features/plugins/datasource_srv'; import { LayoutMode } from 'app/core/components/LayoutSelector/LayoutSelector'; import { updateLocation, updateNavIndex, UpdateNavIndexAction } from 'app/core/actions'; diff --git a/public/app/features/explore/QueryEditor.tsx b/public/app/features/explore/QueryEditor.tsx index 5689f67ee13b..d29e8a0e8925 100644 --- a/public/app/features/explore/QueryEditor.tsx +++ b/public/app/features/explore/QueryEditor.tsx @@ -2,7 +2,7 @@ import React, { PureComponent } from 'react'; // Services -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { getTimeSrv } from 'app/features/dashboard/services/TimeSrv'; // Types diff --git a/public/app/features/org/state/actions.ts b/public/app/features/org/state/actions.ts index fc8742d12226..214674783cef 100644 --- a/public/app/features/org/state/actions.ts +++ b/public/app/features/org/state/actions.ts @@ -1,5 +1,5 @@ import { Organization, ThunkResult } from 'app/types'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; export enum ActionTypes { LoadOrganization = 'LOAD_ORGANIZATION', diff --git a/public/app/features/plugins/datasource_srv.ts b/public/app/features/plugins/datasource_srv.ts index 1a84355ac028..c1c45f8acc40 100644 --- a/public/app/features/plugins/datasource_srv.ts +++ b/public/app/features/plugins/datasource_srv.ts @@ -5,11 +5,12 @@ import coreModule from 'app/core/core_module'; // Services & Utils import config from 'app/core/config'; import { importDataSourcePlugin } from './plugin_loader'; +import { DataSourceSrv as DataSourceService, getDataSourceSrv as getDataSourceService } from '@grafana/runtime'; // Types import { DataSourceApi, DataSourceSelectItem, ScopedVars } from '@grafana/ui/src/types'; -export class DatasourceSrv { +export class DatasourceSrv implements DataSourceService { datasources: { [name: string]: DataSourceApi }; /** @ngInject */ @@ -175,14 +176,8 @@ export class DatasourceSrv { } } -let singleton: DatasourceSrv; - -export function setDatasourceSrv(srv: DatasourceSrv) { - singleton = srv; -} - export function getDatasourceSrv(): DatasourceSrv { - return singleton; + return getDataSourceService() as DatasourceSrv; } coreModule.service('datasourceSrv', DatasourceSrv); diff --git a/public/app/features/plugins/plugin_loader.ts b/public/app/features/plugins/plugin_loader.ts index 9c3bc4ca553b..74986466a49a 100644 --- a/public/app/features/plugins/plugin_loader.ts +++ b/public/app/features/plugins/plugin_loader.ts @@ -29,6 +29,7 @@ import impressionSrv from 'app/core/services/impression_srv'; import builtInPlugins from './built_in_plugins'; import * as d3 from 'd3'; import * as grafanaUI from '@grafana/ui'; +import * as grafanaRT from '@grafana/runtime'; // rxjs import { Observable, Subject } from 'rxjs'; @@ -68,6 +69,7 @@ function exposeToPlugin(name: string, component: any) { } exposeToPlugin('@grafana/ui', grafanaUI); +exposeToPlugin('@grafana/runtime', grafanaRT); exposeToPlugin('lodash', _); exposeToPlugin('moment', moment); exposeToPlugin('jquery', jquery); diff --git a/public/app/features/plugins/state/actions.ts b/public/app/features/plugins/state/actions.ts index 9a1dbde7bffc..da0e14717633 100644 --- a/public/app/features/plugins/state/actions.ts +++ b/public/app/features/plugins/state/actions.ts @@ -1,6 +1,6 @@ import { StoreState } from 'app/types'; import { ThunkAction } from 'redux-thunk'; -import { getBackendSrv } from '../../../core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { LayoutMode } from '../../../core/components/LayoutSelector/LayoutSelector'; import { PluginDashboard } from '../../../types/plugins'; import { PluginMeta } from '@grafana/ui'; diff --git a/public/app/features/plugins/wrappers/AppConfigWrapper.tsx b/public/app/features/plugins/wrappers/AppConfigWrapper.tsx index de6c670679d6..eb9afa9cf679 100644 --- a/public/app/features/plugins/wrappers/AppConfigWrapper.tsx +++ b/public/app/features/plugins/wrappers/AppConfigWrapper.tsx @@ -5,7 +5,7 @@ import extend from 'lodash/extend'; import { PluginMeta, AppPlugin, Button } from '@grafana/ui'; -import { AngularComponent, getAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularComponent, getAngularLoader } from '@grafana/runtime'; import { getBackendSrv } from 'app/core/services/backend_srv'; import { ButtonVariant } from '@grafana/ui/src/components/Button/AbstractButton'; import { css } from 'emotion'; diff --git a/public/app/features/teams/state/actions.ts b/public/app/features/teams/state/actions.ts index e2582839233f..cd369b86e922 100644 --- a/public/app/features/teams/state/actions.ts +++ b/public/app/features/teams/state/actions.ts @@ -1,5 +1,5 @@ import { ThunkAction } from 'redux-thunk'; -import { getBackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { StoreState, Team, TeamGroup, TeamMember } from 'app/types'; import { updateNavIndex, UpdateNavIndexAction } from 'app/core/actions'; import { buildNavModel } from './navModel'; diff --git a/public/app/features/users/state/actions.ts b/public/app/features/users/state/actions.ts index 5c50aa290965..3d69e6638596 100644 --- a/public/app/features/users/state/actions.ts +++ b/public/app/features/users/state/actions.ts @@ -1,6 +1,6 @@ import { ThunkAction } from 'redux-thunk'; import { StoreState } from '../../../types'; -import { getBackendSrv } from '../../../core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; import { Invitee, OrgUser } from 'app/types'; export enum ActionTypes { diff --git a/public/app/plugins/datasource/prometheus/specs/completer.test.ts b/public/app/plugins/datasource/prometheus/specs/completer.test.ts index 2580b87f6d7f..8a7b3b8c7c33 100644 --- a/public/app/plugins/datasource/prometheus/specs/completer.test.ts +++ b/public/app/plugins/datasource/prometheus/specs/completer.test.ts @@ -7,7 +7,7 @@ import { TemplateSrv } from 'app/features/templating/template_srv'; import { TimeSrv } from 'app/features/dashboard/services/TimeSrv'; import { IQService } from 'angular'; jest.mock('../datasource'); -jest.mock('app/core/services/backend_srv'); +jest.mock('@grafana/ui'); describe('Prometheus editor completer', () => { function getSessionStub(data) { diff --git a/public/app/plugins/datasource/stackdriver/components/Filter.tsx b/public/app/plugins/datasource/stackdriver/components/Filter.tsx index 6c63f1ed8913..08134789d3da 100644 --- a/public/app/plugins/datasource/stackdriver/components/Filter.tsx +++ b/public/app/plugins/datasource/stackdriver/components/Filter.tsx @@ -3,7 +3,7 @@ import _ from 'lodash'; import appEvents from 'app/core/app_events'; import { QueryMeta } from '../types'; -import { getAngularLoader, AngularComponent } from 'app/core/services/AngularLoader'; +import { getAngularLoader, AngularComponent } from '@grafana/runtime'; import { TemplateSrv } from 'app/features/templating/template_srv'; import StackdriverDatasource from '../datasource'; import '../query_filter_ctrl'; diff --git a/public/app/plugins/datasource/testdata/QueryEditor.tsx b/public/app/plugins/datasource/testdata/QueryEditor.tsx index f14d976ca384..324848400ffa 100644 --- a/public/app/plugins/datasource/testdata/QueryEditor.tsx +++ b/public/app/plugins/datasource/testdata/QueryEditor.tsx @@ -3,7 +3,7 @@ import React, { PureComponent } from 'react'; import _ from 'lodash'; // Services & Utils -import { getBackendSrv, BackendSrv } from 'app/core/services/backend_srv'; +import { getBackendSrv } from '@grafana/runtime'; // Components import { FormLabel, Select, SelectOptionItem } from '@grafana/ui'; @@ -21,7 +21,7 @@ interface State { type Props = QueryEditorProps; export class QueryEditor extends PureComponent { - backendSrv: BackendSrv = getBackendSrv(); + backendSrv = getBackendSrv(); state: State = { scenarioList: [], diff --git a/public/app/routes/GrafanaCtrl.ts b/public/app/routes/GrafanaCtrl.ts index a37222091d05..c3c5b71ca68e 100644 --- a/public/app/routes/GrafanaCtrl.ts +++ b/public/app/routes/GrafanaCtrl.ts @@ -5,15 +5,15 @@ import Drop from 'tether-drop'; // Utils and servies import { colors } from '@grafana/ui'; +import { setBackendSrv, BackendSrv, setDataSourceSrv } from '@grafana/runtime'; import config from 'app/core/config'; import coreModule from 'app/core/core_module'; import { profiler } from 'app/core/profiler'; import appEvents from 'app/core/app_events'; -import { BackendSrv, setBackendSrv } from 'app/core/services/backend_srv'; import { TimeSrv, setTimeSrv } from 'app/features/dashboard/services/TimeSrv'; -import { DatasourceSrv, setDatasourceSrv } from 'app/features/plugins/datasource_srv'; +import { DatasourceSrv } from 'app/features/plugins/datasource_srv'; import { KeybindingSrv, setKeybindingSrv } from 'app/core/services/keybindingSrv'; -import { AngularLoader, setAngularLoader } from 'app/core/services/AngularLoader'; +import { AngularLoader, setAngularLoader } from '@grafana/runtime'; import { configureStore } from 'app/store/configureStore'; // Types @@ -37,7 +37,7 @@ export class GrafanaCtrl { // make angular loader service available to react components setAngularLoader(angularLoader); setBackendSrv(backendSrv); - setDatasourceSrv(datasourceSrv); + setDataSourceSrv(datasourceSrv); setTimeSrv(timeSrv); setKeybindingSrv(keybindingSrv); configureStore(); diff --git a/scripts/grunt/default_task.js b/scripts/grunt/default_task.js index 95a2522ccfc5..f910941d630d 100644 --- a/scripts/grunt/default_task.js +++ b/scripts/grunt/default_task.js @@ -34,7 +34,8 @@ module.exports = function(grunt) { grunt.registerTask('no-only-tests', function() { var files = grunt.file.expand( 'public/**/*@(_specs|.test).@(ts|js|tsx|jsx)', - 'packages/grafana-ui/**/*@(_specs|.test).@(ts|js|tsx|jsx)' + 'packages/grafana-ui/**/*@(_specs|.test).@(ts|js|tsx|jsx)', + 'packages/grafana-runtime/**/*@(_specs|.test).@(ts|js|tsx|jsx)' ); grepFiles(files, '.only(', 'found only statement in test: '); }); From f43af0e64447b515dc5592713d87d4301f8697da Mon Sep 17 00:00:00 2001 From: Oleg Gaidarenko Date: Mon, 3 Jun 2019 21:09:29 +0300 Subject: [PATCH 49/49] Build: specify build flag for `docker-compose up` (#17411) Otherwise changed data will not get refreshed to the previous state. Which I think is not evident (unless you are familar with docker-compose). and therefore error-prone (at least it did for me) --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 7025dff42aa2..1d71675dbaba 100644 --- a/Makefile +++ b/Makefile @@ -103,7 +103,7 @@ devenv: devenv-down (rm -rf docker-compose.yaml; exit 1) @cd devenv; \ - docker-compose up -d + docker-compose up -d --build endif # drop down the envs