Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Request to reintroduce Key/Keys interfaces #93

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
vendor/
coverage.txt
23 changes: 16 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,18 +1,18 @@
# DataLoader
[![GoDoc](https://godoc.org/gopkg.in/graph-gophers/dataloader.v3?status.svg)](https://godoc.org/github.com/graph-gophers/dataloader)
[![GoDoc](https://godoc.org/gopkg.in/graph-gophers/dataloader.v7?status.svg)](https://godoc.org/github.com/graph-gophers/dataloader)
[![Build Status](https://travis-ci.org/graph-gophers/dataloader.svg?branch=master)](https://travis-ci.org/graph-gophers/dataloader)

This is an implementation of [Facebook's DataLoader](https://github.com/facebook/dataloader) in Golang.

## Install
`go get -u github.com/graph-gophers/dataloader`
`go get -u github.com/graph-gophers/dataloader/v8`

## Usage
```go
// setup batch function - the first Context passed to the Loader's Load
// function will be provided when the batch function is called.
batchFn := func(ctx context.Context, keys dataloader.Keys) []*dataloader.Result {
var results []*dataloader.Result
batchFn := func(ctx context.Context, keys dataloader.Keys[string]) []*dataloader.Result[any] {
var results []*dataloader.Result[any]
// do some async work to get data for specified keys
// append to this list resolved values
return results
Expand All @@ -32,7 +32,7 @@ loader := dataloader.NewBatchedLoader(batchFn)
* The first context passed to Load is the object that will be passed
* to the batch function.
*/
thunk := loader.Load(context.TODO(), dataloader.StringKey("key1")) // StringKey is a convenience method that make wraps string to implement `Key` interface
thunk := loader.Load(context.TODO(), dataloader.KeyOf("key1")) // KeyOf is a convenience method that wraps any comparable type to implement `Key` interface
result, err := thunk()
if err != nil {
// handle data error
Expand All @@ -42,10 +42,19 @@ log.Printf("value: %#v", result)
```

### Don't need/want to use context?
You're welcome to install the v1 version of this library.
You're welcome to install the `v1` version of this library.

### Don't need/want to use type parameters?
Please feel free to use `v6` version of this library.

### Don't need/want to use Key/Keys interface?
Just use the `v7` version of this library. This completely removes the need for the `Key` interface, but it limits
the key type parameter to `comparable` types only, whereas `v8` allows `any` type, as long as it is wrapped as `Key`,
and exports itself as `string`.

## Cache
This implementation contains a very basic cache that is intended only to be used for short lived DataLoaders (i.e. DataLoaders that only exist for the life of an http request). You may use your own implementation if you want.
This implementation contains a very basic cache that is intended only to be used for short-lived DataLoaders
(i.e. DataLoaders that only exist for the life of a http request). You may use your own implementation if you want.

> it also has a `NoCache` type that implements the cache interface but all methods are noop. If you do not wish to cache anything.

Expand Down
18 changes: 10 additions & 8 deletions cache.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,26 +3,28 @@ package dataloader
import "context"

// The Cache interface. If a custom cache is provided, it must implement this interface.
type Cache[K comparable, V any] interface {
Get(context.Context, K) (Thunk[V], bool)
Set(context.Context, K, Thunk[V])
Delete(context.Context, K) bool
type Cache[K any, V any] interface {
Get(context.Context, Key[K]) (Thunk[V], bool)
Set(context.Context, Key[K], Thunk[V])
Delete(context.Context, Key[K]) bool
Clear()
}

var _ Cache[any, any] = (*NoCache[any, any])(nil)

// NoCache implements Cache interface where all methods are noops.
// This is useful for when you don't want to cache items but still
// want to use a data loader
type NoCache[K comparable, V any] struct{}
type NoCache[K any, V any] struct{}

// Get is a NOOP
func (c *NoCache[K, V]) Get(context.Context, K) (Thunk[V], bool) { return nil, false }
func (c *NoCache[K, V]) Get(context.Context, Key[K]) (Thunk[V], bool) { return nil, false }

// Set is a NOOP
func (c *NoCache[K, V]) Set(context.Context, K, Thunk[V]) { return }
func (c *NoCache[K, V]) Set(context.Context, Key[K], Thunk[V]) { return }

// Delete is a NOOP
func (c *NoCache[K, V]) Delete(context.Context, K) bool { return false }
func (c *NoCache[K, V]) Delete(context.Context, Key[K]) bool { return false }

// Clear is a NOOP
func (c *NoCache[K, V]) Clear() { return }
59 changes: 29 additions & 30 deletions dataloader.go
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// Package dataloader is an implimentation of facebook's dataloader in go.
// Package dataloader is an implementation of facebook's dataloader in go.
// See https://github.com/facebook/dataloader for more information
package dataloader

Expand All @@ -20,19 +20,19 @@ import (
// used in long-lived applications or those which serve many users with
// different access permissions and consider creating a new instance per
// web request.
type Interface[K comparable, V any] interface {
Load(context.Context, K) Thunk[V]
LoadMany(context.Context, []K) ThunkMany[V]
Clear(context.Context, K) Interface[K, V]
type Interface[K any, V any] interface {
Load(context.Context, Key[K]) Thunk[V]
LoadMany(context.Context, Keys[K]) ThunkMany[V]
Clear(context.Context, Key[K]) Interface[K, V]
ClearAll() Interface[K, V]
Prime(ctx context.Context, key K, value V) Interface[K, V]
Prime(ctx context.Context, key Key[K], value V) Interface[K, V]
}

// BatchFunc is a function, which when given a slice of keys (string), returns a slice of `results`.
// It's important that the length of the input keys matches the length of the output results.
//
// The keys passed to this function are guaranteed to be unique
type BatchFunc[K comparable, V any] func(context.Context, []K) []*Result[V]
type BatchFunc[K any, V any] func(context.Context, Keys[K]) []*Result[V]

// Result is the data structure that a BatchFunc returns.
// It contains the resolved data, and any errors that may have occurred while fetching the data.
Expand Down Expand Up @@ -61,7 +61,7 @@ func (p *PanicErrorWrapper) Error() string {
}

// Loader implements the dataloader.Interface.
type Loader[K comparable, V any] struct {
type Loader[K any, V any] struct {
// the batch function to be used by this loader
batchFn BatchFunc[K, V]

Expand Down Expand Up @@ -111,46 +111,46 @@ type Thunk[V any] func() (V, error)
type ThunkMany[V any] func() ([]V, []error)

// type used to on input channel
type batchRequest[K comparable, V any] struct {
key K
type batchRequest[K any, V any] struct {
key Key[K]
channel chan *Result[V]
}

// Option allows for configuration of Loader fields.
type Option[K comparable, V any] func(*Loader[K, V])
type Option[K any, V any] func(*Loader[K, V])

// WithCache sets the BatchedLoader cache. Defaults to InMemoryCache if a Cache is not set.
func WithCache[K comparable, V any](c Cache[K, V]) Option[K, V] {
func WithCache[K any, V any](c Cache[K, V]) Option[K, V] {
return func(l *Loader[K, V]) {
l.cache = c
}
}

// WithBatchCapacity sets the batch capacity. Default is 0 (unbounded).
func WithBatchCapacity[K comparable, V any](c int) Option[K, V] {
func WithBatchCapacity[K any, V any](c int) Option[K, V] {
return func(l *Loader[K, V]) {
l.batchCap = c
}
}

// WithInputCapacity sets the input capacity. Default is 1000.
func WithInputCapacity[K comparable, V any](c int) Option[K, V] {
func WithInputCapacity[K any, V any](c int) Option[K, V] {
return func(l *Loader[K, V]) {
l.inputCap = c
}
}

// WithWait sets the amount of time to wait before triggering a batch.
// Default duration is 16 milliseconds.
func WithWait[K comparable, V any](d time.Duration) Option[K, V] {
func WithWait[K any, V any](d time.Duration) Option[K, V] {
return func(l *Loader[K, V]) {
l.wait = d
}
}

// WithClearCacheOnBatch allows batching of items but no long term caching.
// It accomplishes this by clearing the cache after each batch operation.
func WithClearCacheOnBatch[K comparable, V any]() Option[K, V] {
func WithClearCacheOnBatch[K any, V any]() Option[K, V] {
return func(l *Loader[K, V]) {
l.cacheLock.Lock()
l.clearCacheOnBatch = true
Expand All @@ -159,21 +159,21 @@ func WithClearCacheOnBatch[K comparable, V any]() Option[K, V] {
}

// withSilentLogger turns of log messages. It's used by the tests
func withSilentLogger[K comparable, V any]() Option[K, V] {
func withSilentLogger[K any, V any]() Option[K, V] {
return func(l *Loader[K, V]) {
l.silent = true
}
}

// WithTracer allows tracing of calls to Load and LoadMany
func WithTracer[K comparable, V any](tracer Tracer[K, V]) Option[K, V] {
func WithTracer[K any, V any](tracer Tracer[K, V]) Option[K, V] {
return func(l *Loader[K, V]) {
l.tracer = tracer
}
}

// NewBatchedLoader constructs a new Loader with given options.
func NewBatchedLoader[K comparable, V any](batchFn BatchFunc[K, V], opts ...Option[K, V]) *Loader[K, V] {
func NewBatchedLoader[K any, V any](batchFn BatchFunc[K, V], opts ...Option[K, V]) *Loader[K, V] {
loader := &Loader[K, V]{
batchFn: batchFn,
inputCap: 1000,
Expand All @@ -197,10 +197,10 @@ func NewBatchedLoader[K comparable, V any](batchFn BatchFunc[K, V], opts ...Opti
return loader
}

// Load load/resolves the given key, returning a channel that will contain the value and error.
// Load loads/resolves the given key, returning a channel that will contain the value and error.
// The first context passed to this function within a given batch window will be provided to
// the registered BatchFunc.
func (l *Loader[K, V]) Load(originalContext context.Context, key K) Thunk[V] {
func (l *Loader[K, V]) Load(originalContext context.Context, key Key[K]) Thunk[V] {
ctx, finish := l.tracer.TraceLoad(originalContext, key)

c := make(chan *Result[V], 1)
Expand Down Expand Up @@ -242,8 +242,7 @@ func (l *Loader[K, V]) Load(originalContext context.Context, key K) Thunk[V] {
l.cache.Set(ctx, key, thunk)
l.cacheLock.Unlock()

// this is sent to batch fn. It contains the key and the channel to return the
// the result on
// this is sent to batch fn. It contains the key and the channel to return the result on
req := &batchRequest[K, V]{key, c}

l.batchLock.Lock()
Expand Down Expand Up @@ -279,8 +278,8 @@ func (l *Loader[K, V]) Load(originalContext context.Context, key K) Thunk[V] {
return thunk
}

// LoadMany loads mulitiple keys, returning a thunk (type: ThunkMany) that will resolve the keys passed in.
func (l *Loader[K, V]) LoadMany(originalContext context.Context, keys []K) ThunkMany[V] {
// LoadMany loads multiple keys, returning a thunk (type: ThunkMany) that will resolve the keys passed in.
func (l *Loader[K, V]) LoadMany(originalContext context.Context, keys Keys[K]) ThunkMany[V] {
ctx, finish := l.tracer.TraceLoadMany(originalContext, keys)

var (
Expand Down Expand Up @@ -347,8 +346,8 @@ func (l *Loader[K, V]) LoadMany(originalContext context.Context, keys []K) Thunk
return thunkMany
}

// Clear clears the value at `key` from the cache, it it exsits. Returs self for method chaining
func (l *Loader[K, V]) Clear(ctx context.Context, key K) Interface[K, V] {
// Clear clears the value at `key` from the cache, if it exists. Returns self for method chaining
func (l *Loader[K, V]) Clear(ctx context.Context, key Key[K]) Interface[K, V] {
l.cacheLock.Lock()
l.cache.Delete(ctx, key)
l.cacheLock.Unlock()
Expand All @@ -366,7 +365,7 @@ func (l *Loader[K, V]) ClearAll() Interface[K, V] {

// Prime adds the provided key and value to the cache. If the key already exists, no change is made.
// Returns self for method chaining
func (l *Loader[K, V]) Prime(ctx context.Context, key K, value V) Interface[K, V] {
func (l *Loader[K, V]) Prime(ctx context.Context, key Key[K], value V) Interface[K, V] {
if _, ok := l.cache.Get(ctx, key); !ok {
thunk := func() (V, error) {
return value, nil
Expand All @@ -385,7 +384,7 @@ func (l *Loader[K, V]) reset() {
}
}

type batcher[K comparable, V any] struct {
type batcher[K any, V any] struct {
input chan *batchRequest[K, V]
batchFn BatchFunc[K, V]
finished bool
Expand Down Expand Up @@ -415,7 +414,7 @@ func (b *batcher[K, V]) end() {
// execute the batch of all items in queue
func (b *batcher[K, V]) batch(originalContext context.Context) {
var (
keys = make([]K, 0)
keys = make(Keys[K], 0)
reqs = make([]*batchRequest[K, V], 0)
items = make([]*Result[V], 0)
panicErr interface{}
Expand Down