graphql: update gqlgen to 0.5.1

fix #6
This commit is contained in:
Michael Muré 2018-09-14 12:40:31 +02:00
parent a3fc9abb92
commit b478cd1bcb
No known key found for this signature in database
GPG Key ID: A4457C029293126F
171 changed files with 15679 additions and 6375 deletions

72
Gopkg.lock generated
View File

@ -1,6 +1,30 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. # This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
[[projects]]
digest = "1:2af7a7a1fcb231f9cac066e51c629370834819ac3a776dc0a3f577d28cb1fc28"
name = "github.com/99designs/gqlgen"
packages = [
"codegen",
"codegen/templates",
"complexity",
"graphql",
"graphql/introspection",
"handler",
"internal/gopath",
]
pruneopts = "UT"
revision = "636435b68700211441303f1a5ed92f3768ba5774"
version = "v0.5.1"
[[projects]]
digest = "1:897d91c431ce469d35a5e6030e60e617dccd9a0e95bdffa6a80594f5c5800d29"
name = "github.com/agnivade/levenshtein"
packages = ["."]
pruneopts = "UT"
revision = "3d21ba515fe27b856f230847e856431ae1724adc"
version = "v1.0.0"
[[projects]] [[projects]]
branch = "master" branch = "master"
digest = "1:f438d91be142877c3ad83157992c91de787ddfbddcc2a7da1ef6ef61606cadc4" digest = "1:f438d91be142877c3ad83157992c91de787ddfbddcc2a7da1ef6ef61606cadc4"
@ -65,6 +89,17 @@
revision = "ea4d1f681babbce9545c9c5f3d5194a789c89f5b" revision = "ea4d1f681babbce9545c9c5f3d5194a789c89f5b"
version = "v1.2.0" version = "v1.2.0"
[[projects]]
digest = "1:8ec8d88c248041a6df5f6574b87bc00e7e0b493881dad2e7ef47b11dc69093b5"
name = "github.com/hashicorp/golang-lru"
packages = [
".",
"simplelru",
]
pruneopts = "UT"
revision = "20f1fb78b0740ba8c3cb143a61e86ba5c8669768"
version = "v0.5.0"
[[projects]] [[projects]]
branch = "master" branch = "master"
digest = "1:22725c01ecd8ed0c0f0078944305a57053340d92878b02db925c660cc4accf64" digest = "1:22725c01ecd8ed0c0f0078944305a57053340d92878b02db925c660cc4accf64"
@ -192,23 +227,20 @@
version = "v1.0.1" version = "v1.0.1"
[[projects]] [[projects]]
digest = "1:d1ef15a6bc267ffb9e3ac790591103e99d4662f5867a99f9182c43a2016884e2" branch = "master"
name = "github.com/vektah/gqlgen" digest = "1:8150271279cc160a41e9aabfee8118c20a0e88894a25b2577f93e7c868e5259c"
name = "github.com/vektah/gqlparser"
packages = [ packages = [
"codegen", ".",
"codegen/templates", "ast",
"graphql", "gqlerror",
"handler", "lexer",
"neelance/common", "parser",
"neelance/errors", "validator",
"neelance/introspection", "validator/rules",
"neelance/query",
"neelance/schema",
"neelance/validation",
] ]
pruneopts = "UT" pruneopts = "UT"
revision = "381b34691fd93829e50ba8821412dc3467ec4821" revision = "14e83ae06ec152e6d0afb9766a00e0c0918aa8fc"
version = "0.3.0"
[[projects]] [[projects]]
branch = "master" branch = "master"
@ -245,6 +277,10 @@
analyzer-name = "dep" analyzer-name = "dep"
analyzer-version = 1 analyzer-version = 1
input-imports = [ input-imports = [
"github.com/99designs/gqlgen/codegen",
"github.com/99designs/gqlgen/graphql",
"github.com/99designs/gqlgen/graphql/introspection",
"github.com/99designs/gqlgen/handler",
"github.com/cheekybits/genny/generic", "github.com/cheekybits/genny/generic",
"github.com/dustin/go-humanize", "github.com/dustin/go-humanize",
"github.com/fatih/color", "github.com/fatih/color",
@ -258,12 +294,8 @@
"github.com/skratchdot/open-golang/open", "github.com/skratchdot/open-golang/open",
"github.com/spf13/cobra", "github.com/spf13/cobra",
"github.com/spf13/cobra/doc", "github.com/spf13/cobra/doc",
"github.com/vektah/gqlgen/codegen", "github.com/vektah/gqlparser",
"github.com/vektah/gqlgen/graphql", "github.com/vektah/gqlparser/ast",
"github.com/vektah/gqlgen/handler",
"github.com/vektah/gqlgen/neelance/introspection",
"github.com/vektah/gqlgen/neelance/query",
"github.com/vektah/gqlgen/neelance/schema",
] ]
solver-name = "gps-cdcl" solver-name = "gps-cdcl"
solver-version = 1 solver-version = 1

View File

@ -57,8 +57,8 @@
branch = "master" branch = "master"
[[constraint]] [[constraint]]
name = "github.com/vektah/gqlgen" name = "github.com/99designs/gqlgen"
version = "0.3.0" version = "0.5.1"
[[constraint]] [[constraint]]
name = "github.com/jroimartin/gocui" name = "github.com/jroimartin/gocui"

View File

@ -12,6 +12,7 @@ import (
"os/signal" "os/signal"
"time" "time"
"github.com/99designs/gqlgen/handler"
"github.com/MichaelMure/git-bug/graphql" "github.com/MichaelMure/git-bug/graphql"
"github.com/MichaelMure/git-bug/repository" "github.com/MichaelMure/git-bug/repository"
"github.com/MichaelMure/git-bug/util/git" "github.com/MichaelMure/git-bug/util/git"
@ -20,7 +21,6 @@ import (
"github.com/phayes/freeport" "github.com/phayes/freeport"
"github.com/skratchdot/open-golang/open" "github.com/skratchdot/open-golang/open"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/vektah/gqlgen/handler"
) )
var port int var port int

View File

@ -9,7 +9,7 @@ import (
"os" "os"
"path" "path"
"github.com/vektah/gqlgen/codegen" "github.com/99designs/gqlgen/codegen"
) )
func main() { func main() {
@ -22,9 +22,9 @@ func main() {
fmt.Println("Generating graphql code ...") fmt.Println("Generating graphql code ...")
log.SetOutput(ioutil.Discard) log.SetOutput(os.Stdout)
config, err := codegen.LoadDefaultConfig() config, err := codegen.LoadConfigFromDefaultLocations()
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }

File diff suppressed because it is too large Load Diff

View File

@ -3,10 +3,10 @@
package graphql package graphql
import ( import (
"github.com/99designs/gqlgen/handler"
"github.com/MichaelMure/git-bug/graphql/graph" "github.com/MichaelMure/git-bug/graphql/graph"
"github.com/MichaelMure/git-bug/graphql/resolvers" "github.com/MichaelMure/git-bug/graphql/resolvers"
"github.com/MichaelMure/git-bug/repository" "github.com/MichaelMure/git-bug/repository"
"github.com/vektah/gqlgen/handler"
"net/http" "net/http"
) )
@ -25,7 +25,11 @@ func NewHandler(repo repository.Repo) (Handler, error) {
return Handler{}, err return Handler{}, err
} }
h.HandlerFunc = handler.GraphQL(graph.NewExecutableSchema(h.RootResolver)) config := graph.Config{
Resolvers: h.RootResolver,
}
h.HandlerFunc = handler.GraphQL(graph.NewExecutableSchema(config))
return h, nil return h, nil
} }

View File

@ -1,4 +1,4 @@
// Code generated by github.com/vektah/gqlgen, DO NOT EDIT. // Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
package models package models
@ -10,37 +10,48 @@ import (
bug "github.com/MichaelMure/git-bug/bug" bug "github.com/MichaelMure/git-bug/bug"
) )
// An object that has an author.
type Authored interface{} type Authored interface{}
// The connection type for Bug.
type BugConnection struct { type BugConnection struct {
Edges []BugEdge `json:"edges"` Edges []BugEdge `json:"edges"`
Nodes []bug.Snapshot `json:"nodes"` Nodes []bug.Snapshot `json:"nodes"`
PageInfo PageInfo `json:"pageInfo"` PageInfo PageInfo `json:"pageInfo"`
TotalCount int `json:"totalCount"` TotalCount int `json:"totalCount"`
} }
// An edge in a connection.
type BugEdge struct { type BugEdge struct {
Cursor string `json:"cursor"` Cursor string `json:"cursor"`
Node bug.Snapshot `json:"node"` Node bug.Snapshot `json:"node"`
} }
type CommentConnection struct { type CommentConnection struct {
Edges []CommentEdge `json:"edges"` Edges []CommentEdge `json:"edges"`
Nodes []bug.Comment `json:"nodes"` Nodes []bug.Comment `json:"nodes"`
PageInfo PageInfo `json:"pageInfo"` PageInfo PageInfo `json:"pageInfo"`
TotalCount int `json:"totalCount"` TotalCount int `json:"totalCount"`
} }
type CommentEdge struct { type CommentEdge struct {
Cursor string `json:"cursor"` Cursor string `json:"cursor"`
Node bug.Comment `json:"node"` Node bug.Comment `json:"node"`
} }
type OperationConnection struct { type OperationConnection struct {
Edges []OperationEdge `json:"edges"` Edges []OperationEdge `json:"edges"`
Nodes []bug.Operation `json:"nodes"` Nodes []bug.Operation `json:"nodes"`
PageInfo PageInfo `json:"pageInfo"` PageInfo PageInfo `json:"pageInfo"`
TotalCount int `json:"totalCount"` TotalCount int `json:"totalCount"`
} }
type OperationEdge struct { type OperationEdge struct {
Cursor string `json:"cursor"` Cursor string `json:"cursor"`
Node bug.Operation `json:"node"` Node bug.Operation `json:"node"`
} }
// Information about pagination in a connection.
type PageInfo struct { type PageInfo struct {
HasNextPage bool `json:"hasNextPage"` HasNextPage bool `json:"hasNextPage"`
HasPreviousPage bool `json:"hasPreviousPage"` HasPreviousPage bool `json:"hasPreviousPage"`

View File

@ -2,24 +2,24 @@ scalar Time
scalar Label scalar Label
scalar Hash scalar Hash
# Information about pagination in a connection. """Information about pagination in a connection."""
type PageInfo { type PageInfo {
# When paginating forwards, are there more items? """When paginating forwards, are there more items?"""
hasNextPage: Boolean! hasNextPage: Boolean!
# When paginating backwards, are there more items? """When paginating backwards, are there more items?"""
hasPreviousPage: Boolean! hasPreviousPage: Boolean!
# When paginating backwards, the cursor to continue. """When paginating backwards, the cursor to continue."""
startCursor: String! startCursor: String!
# When paginating forwards, the cursor to continue. """When paginating forwards, the cursor to continue."""
endCursor: String! endCursor: String!
} }
# Represents an person in a git object. """Represents an person in a git object."""
type Person { type Person {
# The email of the person. """The email of the person."""
email: String email: String
# The name of the person. """The name of the person."""
name: String name: String
} }
@ -35,15 +35,15 @@ type CommentEdge {
node: Comment! node: Comment!
} }
# Represents a comment on a bug. """Represents a comment on a bug."""
type Comment implements Authored { type Comment implements Authored {
# The author of this comment. """The author of this comment."""
author: Person! author: Person!
# The message of this comment. """The message of this comment."""
message: String! message: String!
# All media's hash referenced in this comment """All media's hash referenced in this comment"""
files: [Hash!]! files: [Hash!]!
} }
@ -52,9 +52,9 @@ enum Status {
CLOSED CLOSED
} }
# An object that has an author. """An object that has an author."""
interface Authored { interface Authored {
# The author of this object. """The author of this object."""
author: Person! author: Person!
} }
@ -70,15 +70,15 @@ type OperationEdge {
node: Operation! node: Operation!
} }
# An operation applied to a bug. """An operation applied to a bug."""
interface Operation { interface Operation {
# The operations author. """The operations author."""
author: Person! author: Person!
# The datetime when this operation was issued. """The datetime when this operation was issued."""
date: Time! date: Time!
} }
type CreateOperation implements Operation, Authored { type CreateOperation implements Operation & Authored {
author: Person! author: Person!
date: Time! date: Time!
@ -87,7 +87,7 @@ type CreateOperation implements Operation, Authored {
files: [Hash!]! files: [Hash!]!
} }
type SetTitleOperation implements Operation, Authored { type SetTitleOperation implements Operation & Authored {
author: Person! author: Person!
date: Time! date: Time!
@ -95,7 +95,7 @@ type SetTitleOperation implements Operation, Authored {
was: String! was: String!
} }
type AddCommentOperation implements Operation, Authored { type AddCommentOperation implements Operation & Authored {
author: Person! author: Person!
date: Time! date: Time!
@ -103,14 +103,14 @@ type AddCommentOperation implements Operation, Authored {
files: [Hash!]! files: [Hash!]!
} }
type SetStatusOperation implements Operation, Authored { type SetStatusOperation implements Operation & Authored {
author: Person! author: Person!
date: Time! date: Time!
status: Status! status: Status!
} }
type LabelChangeOperation implements Operation, Authored { type LabelChangeOperation implements Operation & Authored {
author: Person! author: Person!
date: Time! date: Time!
@ -118,22 +118,22 @@ type LabelChangeOperation implements Operation, Authored {
removed: [Label!]! removed: [Label!]!
} }
# The connection type for Bug. """The connection type for Bug."""
type BugConnection { type BugConnection {
# A list of edges. """A list of edges."""
edges: [BugEdge!]! edges: [BugEdge!]!
nodes: [Bug!]! nodes: [Bug!]!
# Information to aid in pagination. """Information to aid in pagination."""
pageInfo: PageInfo! pageInfo: PageInfo!
# Identifies the total count of items in the connection. """Identifies the total count of items in the connection."""
totalCount: Int! totalCount: Int!
} }
# An edge in a connection. """An edge in a connection."""
type BugEdge { type BugEdge {
# A cursor for use in pagination. """A cursor for use in pagination."""
cursor: String! cursor: String!
# The item at the end of the edge. """The item at the end of the edge."""
node: Bug! node: Bug!
} }
@ -148,39 +148,39 @@ type Bug {
lastEdit: Time! lastEdit: Time!
comments( comments(
# Returns the elements in the list that come after the specified cursor. """Returns the elements in the list that come after the specified cursor."""
after: String after: String
# Returns the elements in the list that come before the specified cursor. """Returns the elements in the list that come before the specified cursor."""
before: String before: String
# Returns the first _n_ elements from the list. """Returns the first _n_ elements from the list."""
first: Int first: Int
# Returns the last _n_ elements from the list. """Returns the last _n_ elements from the list."""
last: Int last: Int
): CommentConnection! ): CommentConnection!
operations( operations(
# Returns the elements in the list that come after the specified cursor. """Returns the elements in the list that come after the specified cursor."""
after: String after: String
# Returns the elements in the list that come before the specified cursor. """Returns the elements in the list that come before the specified cursor."""
before: String before: String
# Returns the first _n_ elements from the list. """Returns the first _n_ elements from the list."""
first: Int first: Int
# Returns the last _n_ elements from the list. """Returns the last _n_ elements from the list."""
last: Int last: Int
): OperationConnection! ): OperationConnection!
} }
type Repository { type Repository {
allBugs( allBugs(
# Returns the elements in the list that come after the specified cursor. """Returns the elements in the list that come after the specified cursor."""
after: String after: String
# Returns the elements in the list that come before the specified cursor. """Returns the elements in the list that come before the specified cursor."""
before: String before: String
# Returns the first _n_ elements from the list. """Returns the first _n_ elements from the list."""
first: Int first: Int
# Returns the last _n_ elements from the list. """Returns the last _n_ elements from the list."""
last: Int last: Int
# A query to select and order bugs """A query to select and order bugs"""
query: String query: String
): BugConnection! ): BugConnection!
bug(prefix: String!): Bug bug(prefix: String!): Bug

214
vendor/github.com/99designs/gqlgen/codegen/build.go generated vendored Normal file
View File

@ -0,0 +1,214 @@
package codegen
import (
"fmt"
"go/build"
"go/types"
"os"
"github.com/pkg/errors"
"golang.org/x/tools/go/loader"
)
type Build struct {
PackageName string
Objects Objects
Inputs Objects
Interfaces []*Interface
Imports []*Import
QueryRoot *Object
MutationRoot *Object
SubscriptionRoot *Object
SchemaRaw string
SchemaFilename string
Directives []*Directive
}
type ModelBuild struct {
PackageName string
Imports []*Import
Models []Model
Enums []Enum
}
type ResolverBuild struct {
PackageName string
Imports []*Import
ResolverType string
Objects Objects
ResolverFound bool
}
type ServerBuild struct {
PackageName string
Imports []*Import
ExecPackageName string
ResolverPackageName string
}
// Create a list of models that need to be generated
func (cfg *Config) models() (*ModelBuild, error) {
namedTypes := cfg.buildNamedTypes()
progLoader := newLoader(namedTypes, true)
prog, err := progLoader.Load()
if err != nil {
return nil, errors.Wrap(err, "loading failed")
}
imports := buildImports(namedTypes, cfg.Model.Dir())
cfg.bindTypes(imports, namedTypes, cfg.Model.Dir(), prog)
models, err := cfg.buildModels(namedTypes, prog, imports)
if err != nil {
return nil, err
}
return &ModelBuild{
PackageName: cfg.Model.Package,
Models: models,
Enums: cfg.buildEnums(namedTypes),
Imports: imports.finalize(),
}, nil
}
// bind a schema together with some code to generate a Build
func (cfg *Config) resolver() (*ResolverBuild, error) {
progLoader := newLoader(cfg.buildNamedTypes(), true)
progLoader.Import(cfg.Resolver.ImportPath())
prog, err := progLoader.Load()
if err != nil {
return nil, err
}
destDir := cfg.Resolver.Dir()
namedTypes := cfg.buildNamedTypes()
imports := buildImports(namedTypes, destDir)
imports.add(cfg.Exec.ImportPath())
imports.add("github.com/99designs/gqlgen/handler") // avoid import github.com/vektah/gqlgen/handler
cfg.bindTypes(imports, namedTypes, destDir, prog)
objects, err := cfg.buildObjects(namedTypes, prog, imports)
if err != nil {
return nil, err
}
def, _ := findGoType(prog, cfg.Resolver.ImportPath(), cfg.Resolver.Type)
resolverFound := def != nil
return &ResolverBuild{
PackageName: cfg.Resolver.Package,
Imports: imports.finalize(),
Objects: objects,
ResolverType: cfg.Resolver.Type,
ResolverFound: resolverFound,
}, nil
}
func (cfg *Config) server(destDir string) *ServerBuild {
imports := buildImports(NamedTypes{}, destDir)
imports.add(cfg.Exec.ImportPath())
imports.add(cfg.Resolver.ImportPath())
return &ServerBuild{
PackageName: cfg.Resolver.Package,
Imports: imports.finalize(),
ExecPackageName: cfg.Exec.Package,
ResolverPackageName: cfg.Resolver.Package,
}
}
// bind a schema together with some code to generate a Build
func (cfg *Config) bind() (*Build, error) {
namedTypes := cfg.buildNamedTypes()
progLoader := newLoader(namedTypes, true)
prog, err := progLoader.Load()
if err != nil {
return nil, errors.Wrap(err, "loading failed")
}
imports := buildImports(namedTypes, cfg.Exec.Dir())
cfg.bindTypes(imports, namedTypes, cfg.Exec.Dir(), prog)
objects, err := cfg.buildObjects(namedTypes, prog, imports)
if err != nil {
return nil, err
}
inputs, err := cfg.buildInputs(namedTypes, prog, imports)
if err != nil {
return nil, err
}
directives, err := cfg.buildDirectives(namedTypes)
if err != nil {
return nil, err
}
b := &Build{
PackageName: cfg.Exec.Package,
Objects: objects,
Interfaces: cfg.buildInterfaces(namedTypes, prog),
Inputs: inputs,
Imports: imports.finalize(),
SchemaRaw: cfg.SchemaStr,
SchemaFilename: cfg.SchemaFilename,
Directives: directives,
}
if cfg.schema.Query != nil {
b.QueryRoot = b.Objects.ByName(cfg.schema.Query.Name)
} else {
return b, fmt.Errorf("query entry point missing")
}
if cfg.schema.Mutation != nil {
b.MutationRoot = b.Objects.ByName(cfg.schema.Mutation.Name)
}
if cfg.schema.Subscription != nil {
b.SubscriptionRoot = b.Objects.ByName(cfg.schema.Subscription.Name)
}
return b, nil
}
func (cfg *Config) validate() error {
progLoader := newLoader(cfg.buildNamedTypes(), false)
_, err := progLoader.Load()
return err
}
func newLoader(namedTypes NamedTypes, allowErrors bool) loader.Config {
conf := loader.Config{}
if allowErrors {
conf = loader.Config{
AllowErrors: true,
TypeChecker: types.Config{
Error: func(e error) {},
},
}
}
for _, imp := range ambientImports {
conf.Import(imp)
}
for _, imp := range namedTypes {
if imp.Package != "" {
conf.Import(imp.Package)
}
}
return conf
}
func resolvePkg(pkgName string) (string, error) {
cwd, _ := os.Getwd()
pkg, err := build.Default.Import(pkgName, cwd, build.FindOnly)
if err != nil {
return "", err
}
return pkg.ImportPath, nil
}

174
vendor/github.com/99designs/gqlgen/codegen/codegen.go generated vendored Normal file
View File

@ -0,0 +1,174 @@
package codegen
import (
"log"
"os"
"path/filepath"
"regexp"
"syscall"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/pkg/errors"
"github.com/vektah/gqlparser"
"github.com/vektah/gqlparser/ast"
"github.com/vektah/gqlparser/gqlerror"
)
func Generate(cfg Config) error {
if err := cfg.normalize(); err != nil {
return err
}
_ = syscall.Unlink(cfg.Exec.Filename)
_ = syscall.Unlink(cfg.Model.Filename)
modelsBuild, err := cfg.models()
if err != nil {
return errors.Wrap(err, "model plan failed")
}
if len(modelsBuild.Models) > 0 || len(modelsBuild.Enums) > 0 {
if err = templates.RenderToFile("models.gotpl", cfg.Model.Filename, modelsBuild); err != nil {
return err
}
for _, model := range modelsBuild.Models {
modelCfg := cfg.Models[model.GQLType]
modelCfg.Model = cfg.Model.ImportPath() + "." + model.GoType
cfg.Models[model.GQLType] = modelCfg
}
for _, enum := range modelsBuild.Enums {
modelCfg := cfg.Models[enum.GQLType]
modelCfg.Model = cfg.Model.ImportPath() + "." + enum.GoType
cfg.Models[enum.GQLType] = modelCfg
}
}
build, err := cfg.bind()
if err != nil {
return errors.Wrap(err, "exec plan failed")
}
if err := templates.RenderToFile("generated.gotpl", cfg.Exec.Filename, build); err != nil {
return err
}
if cfg.Resolver.IsDefined() {
if err := generateResolver(cfg); err != nil {
return errors.Wrap(err, "generating resolver failed")
}
}
if err := cfg.validate(); err != nil {
return errors.Wrap(err, "validation failed")
}
return nil
}
func GenerateServer(cfg Config, filename string) error {
if err := cfg.Exec.normalize(); err != nil {
return errors.Wrap(err, "exec")
}
if err := cfg.Resolver.normalize(); err != nil {
return errors.Wrap(err, "resolver")
}
serverFilename := abs(filename)
serverBuild := cfg.server(filepath.Dir(serverFilename))
if _, err := os.Stat(serverFilename); os.IsNotExist(errors.Cause(err)) {
err = templates.RenderToFile("server.gotpl", serverFilename, serverBuild)
if err != nil {
return errors.Wrap(err, "generate server failed")
}
} else {
log.Printf("Skipped server: %s already exists\n", serverFilename)
}
return nil
}
func generateResolver(cfg Config) error {
resolverBuild, err := cfg.resolver()
if err != nil {
return errors.Wrap(err, "resolver build failed")
}
filename := cfg.Resolver.Filename
if resolverBuild.ResolverFound {
log.Printf("Skipped resolver: %s.%s already exists\n", cfg.Resolver.ImportPath(), cfg.Resolver.Type)
return nil
}
if _, err := os.Stat(filename); os.IsNotExist(errors.Cause(err)) {
if err := templates.RenderToFile("resolver.gotpl", filename, resolverBuild); err != nil {
return err
}
} else {
log.Printf("Skipped resolver: %s already exists\n", filename)
}
return nil
}
func (cfg *Config) normalize() error {
if err := cfg.Model.normalize(); err != nil {
return errors.Wrap(err, "model")
}
if err := cfg.Exec.normalize(); err != nil {
return errors.Wrap(err, "exec")
}
if cfg.Resolver.IsDefined() {
if err := cfg.Resolver.normalize(); err != nil {
return errors.Wrap(err, "resolver")
}
}
builtins := TypeMap{
"__Directive": {Model: "github.com/99designs/gqlgen/graphql/introspection.Directive"},
"__Type": {Model: "github.com/99designs/gqlgen/graphql/introspection.Type"},
"__Field": {Model: "github.com/99designs/gqlgen/graphql/introspection.Field"},
"__EnumValue": {Model: "github.com/99designs/gqlgen/graphql/introspection.EnumValue"},
"__InputValue": {Model: "github.com/99designs/gqlgen/graphql/introspection.InputValue"},
"__Schema": {Model: "github.com/99designs/gqlgen/graphql/introspection.Schema"},
"Int": {Model: "github.com/99designs/gqlgen/graphql.Int"},
"Float": {Model: "github.com/99designs/gqlgen/graphql.Float"},
"String": {Model: "github.com/99designs/gqlgen/graphql.String"},
"Boolean": {Model: "github.com/99designs/gqlgen/graphql.Boolean"},
"ID": {Model: "github.com/99designs/gqlgen/graphql.ID"},
"Time": {Model: "github.com/99designs/gqlgen/graphql.Time"},
"Map": {Model: "github.com/99designs/gqlgen/graphql.Map"},
}
if cfg.Models == nil {
cfg.Models = TypeMap{}
}
for typeName, entry := range builtins {
if !cfg.Models.Exists(typeName) {
cfg.Models[typeName] = entry
}
}
var err *gqlerror.Error
cfg.schema, err = gqlparser.LoadSchema(&ast.Source{Name: cfg.SchemaFilename, Input: cfg.SchemaStr})
if err != nil {
return err
}
return nil
}
var invalidPackageNameChar = regexp.MustCompile(`[^\w]`)
func sanitizePackageName(pkg string) string {
return invalidPackageNameChar.ReplaceAllLiteralString(filepath.Base(pkg), "_")
}
func abs(path string) string {
absPath, err := filepath.Abs(path)
if err != nil {
panic(err)
}
return filepath.ToSlash(absPath)
}

View File

@ -8,26 +8,29 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/99designs/gqlgen/internal/gopath"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/vektah/gqlgen/neelance/schema" "github.com/vektah/gqlparser/ast"
"gopkg.in/yaml.v2" "gopkg.in/yaml.v2"
) )
var defaults = Config{ var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"}
// DefaultConfig creates a copy of the default config
func DefaultConfig() *Config {
return &Config{
SchemaFilename: "schema.graphql", SchemaFilename: "schema.graphql",
Model: PackageConfig{Filename: "models_gen.go"}, Model: PackageConfig{Filename: "models_gen.go"},
Exec: PackageConfig{Filename: "generated.go"}, Exec: PackageConfig{Filename: "generated.go"},
}
} }
var cfgFilenames = []string{".gqlgen.yml", "gqlgen.yml", "gqlgen.yaml"} // LoadConfigFromDefaultLocations looks for a config file in the current directory, and all parent directories
// LoadDefaultConfig looks for a config file in the current directory, and all parent directories
// walking up the tree. The closest config file will be returned. // walking up the tree. The closest config file will be returned.
func LoadDefaultConfig() (*Config, error) { func LoadConfigFromDefaultLocations() (*Config, error) {
cfgFile, err := findCfg() cfgFile, err := findCfg()
if err != nil || cfgFile == "" { if err != nil {
cpy := defaults return nil, err
return &cpy, err
} }
err = os.Chdir(filepath.Dir(cfgFile)) err = os.Chdir(filepath.Dir(cfgFile))
@ -39,18 +42,20 @@ func LoadDefaultConfig() (*Config, error) {
// LoadConfig reads the gqlgen.yml config file // LoadConfig reads the gqlgen.yml config file
func LoadConfig(filename string) (*Config, error) { func LoadConfig(filename string) (*Config, error) {
config := defaults config := DefaultConfig()
b, err := ioutil.ReadFile(filename) b, err := ioutil.ReadFile(filename)
if err != nil { if err != nil {
return nil, errors.Wrap(err, "unable to read config") return nil, errors.Wrap(err, "unable to read config")
} }
if err := yaml.UnmarshalStrict(b, &config); err != nil { if err := yaml.UnmarshalStrict(b, config); err != nil {
return nil, errors.Wrap(err, "unable to parse config") return nil, errors.Wrap(err, "unable to parse config")
} }
return &config, nil config.FilePath = filename
return config, nil
} }
type Config struct { type Config struct {
@ -58,14 +63,19 @@ type Config struct {
SchemaStr string `yaml:"-"` SchemaStr string `yaml:"-"`
Exec PackageConfig `yaml:"exec"` Exec PackageConfig `yaml:"exec"`
Model PackageConfig `yaml:"model"` Model PackageConfig `yaml:"model"`
Resolver PackageConfig `yaml:"resolver,omitempty"`
Models TypeMap `yaml:"models,omitempty"` Models TypeMap `yaml:"models,omitempty"`
StructTag string `yaml:"struct_tag,omitempty"`
schema *schema.Schema `yaml:"-"` FilePath string `yaml:"-"`
schema *ast.Schema `yaml:"-"`
} }
type PackageConfig struct { type PackageConfig struct {
Filename string `yaml:"filename,omitempty"` Filename string `yaml:"filename,omitempty"`
Package string `yaml:"package,omitempty"` Package string `yaml:"package,omitempty"`
Type string `yaml:"type,omitempty"`
} }
type TypeMapEntry struct { type TypeMapEntry struct {
@ -75,6 +85,7 @@ type TypeMapEntry struct {
type TypeMapField struct { type TypeMapField struct {
Resolver bool `yaml:"resolver"` Resolver bool `yaml:"resolver"`
FieldName string `yaml:"fieldName"`
} }
func (c *PackageConfig) normalize() error { func (c *PackageConfig) normalize() error {
@ -98,22 +109,11 @@ func (c *PackageConfig) normalize() error {
} }
func (c *PackageConfig) ImportPath() string { func (c *PackageConfig) ImportPath() string {
dir := filepath.ToSlash(c.Dir()) return gopath.MustDir2Import(c.Dir())
for _, gopath := range filepath.SplitList(build.Default.GOPATH) {
gopath = filepath.ToSlash(gopath) + "/src/"
if len(gopath) > len(dir) {
continue
}
if strings.EqualFold(gopath, dir[0:len(gopath)]) {
dir = dir[len(gopath):]
break
}
}
return dir
} }
func (c *PackageConfig) Dir() string { func (c *PackageConfig) Dir() string {
return filepath.ToSlash(filepath.Dir(c.Filename)) return filepath.Dir(c.Filename)
} }
func (c *PackageConfig) Check() error { func (c *PackageConfig) Check() error {
@ -126,6 +126,10 @@ func (c *PackageConfig) Check() error {
return nil return nil
} }
func (c *PackageConfig) IsDefined() bool {
return c.Filename != ""
}
func (cfg *Config) Check() error { func (cfg *Config) Check() error {
if err := cfg.Models.Check(); err != nil { if err := cfg.Models.Check(); err != nil {
return errors.Wrap(err, "config.models") return errors.Wrap(err, "config.models")
@ -136,6 +140,9 @@ func (cfg *Config) Check() error {
if err := cfg.Model.Check(); err != nil { if err := cfg.Model.Check(); err != nil {
return errors.Wrap(err, "config.model") return errors.Wrap(err, "config.model")
} }
if err := cfg.Resolver.Check(); err != nil {
return errors.Wrap(err, "config.resolver")
}
return nil return nil
} }
@ -170,6 +177,10 @@ func findCfg() (string, error) {
cfg = findCfgInDir(dir) cfg = findCfgInDir(dir)
} }
if cfg == "" {
return "", os.ErrNotExist
}
return cfg, nil return cfg, nil
} }

View File

@ -0,0 +1,41 @@
package codegen
import (
"fmt"
"strconv"
"strings"
)
type Directive struct {
Name string
Args []FieldArgument
}
func (d *Directive) ArgsFunc() string {
if len(d.Args) == 0 {
return ""
}
return "dir_" + d.Name + "_args"
}
func (d *Directive) CallArgs() string {
args := []string{"ctx", "obj", "n"}
for _, arg := range d.Args {
args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
}
return strings.Join(args, ", ")
}
func (d *Directive) Declaration() string {
res := ucFirst(d.Name) + " func(ctx context.Context, obj interface{}, next graphql.Resolver"
for _, arg := range d.Args {
res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
}
res += ") (res interface{}, err error)"
return res
}

View File

@ -0,0 +1,49 @@
package codegen
import (
"sort"
"github.com/pkg/errors"
)
func (cfg *Config) buildDirectives(types NamedTypes) ([]*Directive, error) {
var directives []*Directive
for name, dir := range cfg.schema.Directives {
if name == "skip" || name == "include" || name == "deprecated" {
continue
}
var args []FieldArgument
for _, arg := range dir.Arguments {
newArg := FieldArgument{
GQLName: arg.Name,
Type: types.getType(arg.Type),
GoVarName: sanitizeArgName(arg.Name),
}
if !newArg.Type.IsInput && !newArg.Type.IsScalar {
return nil, errors.Errorf("%s cannot be used as argument of directive %s(%s) only input and scalar types are allowed", arg.Type, dir.Name, arg.Name)
}
if arg.DefaultValue != nil {
var err error
newArg.Default, err = arg.DefaultValue.Value(nil)
if err != nil {
return nil, errors.Errorf("default value for directive argument %s(%s) is not valid: %s", dir.Name, arg.Name, err.Error())
}
newArg.StripPtr()
}
args = append(args, newArg)
}
directives = append(directives, &Directive{
Name: name,
Args: args,
})
}
sort.Slice(directives, func(i, j int) bool { return directives[i].Name < directives[j].Name })
return directives, nil
}

View File

@ -2,7 +2,7 @@ package codegen
type Enum struct { type Enum struct {
*NamedType *NamedType
Description string
Values []EnumValue Values []EnumValue
} }

View File

@ -0,0 +1,39 @@
package codegen
import (
"sort"
"strings"
"github.com/99designs/gqlgen/codegen/templates"
"github.com/vektah/gqlparser/ast"
)
func (cfg *Config) buildEnums(types NamedTypes) []Enum {
var enums []Enum
for _, typ := range cfg.schema.Types {
namedType := types[typ.Name]
if typ.Kind != ast.Enum || strings.HasPrefix(typ.Name, "__") || namedType.IsUserDefined {
continue
}
var values []EnumValue
for _, v := range typ.EnumValues {
values = append(values, EnumValue{v.Name, v.Description})
}
enum := Enum{
NamedType: namedType,
Values: values,
Description: typ.Description,
}
enum.GoType = templates.ToCamel(enum.GQLType)
enums = append(enums, enum)
}
sort.Slice(enums, func(i, j int) bool {
return enums[i].GQLType < enums[j].GQLType
})
return enums
}

View File

@ -5,7 +5,14 @@ import (
"go/build" "go/build"
"sort" "sort"
"strconv" "strconv"
"strings"
// Import and ignore the ambient imports listed below so dependency managers
// don't prune unused code for us. Both lists should be kept in sync.
_ "github.com/99designs/gqlgen/graphql"
_ "github.com/99designs/gqlgen/graphql/introspection"
"github.com/99designs/gqlgen/internal/gopath"
_ "github.com/vektah/gqlparser"
_ "github.com/vektah/gqlparser/ast"
) )
// These imports are referenced by the generated code, and are assumed to have the // These imports are referenced by the generated code, and are assumed to have the
@ -18,12 +25,12 @@ var ambientImports = []string{
"strconv", "strconv",
"time", "time",
"sync", "sync",
"github.com/vektah/gqlgen/neelance/introspection", "errors",
"github.com/vektah/gqlgen/neelance/errors",
"github.com/vektah/gqlgen/neelance/query", "github.com/vektah/gqlparser",
"github.com/vektah/gqlgen/neelance/schema", "github.com/vektah/gqlparser/ast",
"github.com/vektah/gqlgen/neelance/validation", "github.com/99designs/gqlgen/graphql",
"github.com/vektah/gqlgen/graphql", "github.com/99designs/gqlgen/graphql/introspection",
} }
func buildImports(types NamedTypes, destDir string) *Imports { func buildImports(types NamedTypes, destDir string) *Imports {
@ -48,7 +55,8 @@ func (s *Imports) add(path string) *Import {
return nil return nil
} }
if stringHasSuffixFold(s.destDir, path) { // if we are referencing our own package we dont need an import
if gopath.MustDir2Import(s.destDir) == path {
return nil return nil
} }
@ -70,10 +78,6 @@ func (s *Imports) add(path string) *Import {
return imp return imp
} }
func stringHasSuffixFold(s, suffix string) bool {
return len(s) >= len(suffix) && strings.EqualFold(s[len(s)-len(suffix):], suffix)
}
func (s Imports) finalize() []*Import { func (s Imports) finalize() []*Import {
// ensure stable ordering by sorting // ensure stable ordering by sorting
sort.Slice(s.imports, func(i, j int) bool { sort.Slice(s.imports, func(i, j int) bool {

View File

@ -3,10 +3,9 @@ package codegen
import ( import (
"go/types" "go/types"
"sort" "sort"
"strings"
"github.com/pkg/errors" "github.com/pkg/errors"
"github.com/vektah/gqlgen/neelance/schema" "github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader" "golang.org/x/tools/go/loader"
) )
@ -14,9 +13,9 @@ func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program, impo
var inputs Objects var inputs Objects
for _, typ := range cfg.schema.Types { for _, typ := range cfg.schema.Types {
switch typ := typ.(type) { switch typ.Kind {
case *schema.InputObject: case ast.InputObject:
input, err := buildInput(namedTypes, typ) input, err := cfg.buildInput(namedTypes, typ)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -27,7 +26,7 @@ func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program, impo
} }
if def != nil { if def != nil {
input.Marshaler = buildInputMarshaler(typ, def) input.Marshaler = buildInputMarshaler(typ, def)
bindErrs := bindObject(def.Type(), input, imports) bindErrs := bindObject(def.Type(), input, imports, cfg.StructTag)
if len(bindErrs) > 0 { if len(bindErrs) > 0 {
return nil, bindErrs return nil, bindErrs
} }
@ -38,24 +37,35 @@ func (cfg *Config) buildInputs(namedTypes NamedTypes, prog *loader.Program, impo
} }
sort.Slice(inputs, func(i, j int) bool { sort.Slice(inputs, func(i, j int) bool {
return strings.Compare(inputs[i].GQLType, inputs[j].GQLType) == -1 return inputs[i].GQLType < inputs[j].GQLType
}) })
return inputs, nil return inputs, nil
} }
func buildInput(types NamedTypes, typ *schema.InputObject) (*Object, error) { func (cfg *Config) buildInput(types NamedTypes, typ *ast.Definition) (*Object, error) {
obj := &Object{NamedType: types[typ.TypeName()]} obj := &Object{NamedType: types[typ.Name]}
typeEntry, entryExists := cfg.Models[typ.Name]
for _, field := range typ.Values { for _, field := range typ.Fields {
newField := Field{ newField := Field{
GQLName: field.Name.Name, GQLName: field.Name,
Type: types.getType(field.Type), Type: types.getType(field.Type),
Object: obj, Object: obj,
} }
if field.Default != nil { if entryExists {
newField.Default = field.Default.Value(nil) if typeField, ok := typeEntry.Fields[field.Name]; ok {
newField.GoFieldName = typeField.FieldName
}
}
if field.DefaultValue != nil {
var err error
newField.Default, err = field.DefaultValue.Value(nil)
if err != nil {
return nil, errors.Errorf("default value for %s.%s is not valid: %s", typ.Name, field.Name, err.Error())
}
} }
if !newField.Type.IsInput && !newField.Type.IsScalar { if !newField.Type.IsInput && !newField.Type.IsScalar {
@ -70,7 +80,7 @@ func buildInput(types NamedTypes, typ *schema.InputObject) (*Object, error) {
// if user has implemented an UnmarshalGQL method on the input type manually, use it // if user has implemented an UnmarshalGQL method on the input type manually, use it
// otherwise we will generate one. // otherwise we will generate one.
func buildInputMarshaler(typ *schema.InputObject, def types.Object) *Ref { func buildInputMarshaler(typ *ast.Definition, def types.Object) *Ref {
switch def := def.(type) { switch def := def.(type) {
case *types.TypeName: case *types.TypeName:
namedType := def.Type().(*types.Named) namedType := def.Type().(*types.Named)

View File

@ -5,38 +5,31 @@ import (
"go/types" "go/types"
"os" "os"
"sort" "sort"
"strings"
"github.com/vektah/gqlgen/neelance/schema" "github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader" "golang.org/x/tools/go/loader"
) )
func (cfg *Config) buildInterfaces(types NamedTypes, prog *loader.Program) []*Interface { func (cfg *Config) buildInterfaces(types NamedTypes, prog *loader.Program) []*Interface {
var interfaces []*Interface var interfaces []*Interface
for _, typ := range cfg.schema.Types { for _, typ := range cfg.schema.Types {
switch typ := typ.(type) { if typ.Kind == ast.Union || typ.Kind == ast.Interface {
case *schema.Union, *schema.Interface:
interfaces = append(interfaces, cfg.buildInterface(types, typ, prog)) interfaces = append(interfaces, cfg.buildInterface(types, typ, prog))
default:
continue
} }
} }
sort.Slice(interfaces, func(i, j int) bool { sort.Slice(interfaces, func(i, j int) bool {
return strings.Compare(interfaces[i].GQLType, interfaces[j].GQLType) == -1 return interfaces[i].GQLType < interfaces[j].GQLType
}) })
return interfaces return interfaces
} }
func (cfg *Config) buildInterface(types NamedTypes, typ schema.NamedType, prog *loader.Program) *Interface { func (cfg *Config) buildInterface(types NamedTypes, typ *ast.Definition, prog *loader.Program) *Interface {
switch typ := typ.(type) { i := &Interface{NamedType: types[typ.Name]}
case *schema.Union: for _, implementor := range cfg.schema.GetPossibleTypes(typ) {
i := &Interface{NamedType: types[typ.TypeName()]} t := types[implementor.Name]
for _, implementor := range typ.PossibleTypes {
t := types[implementor.TypeName()]
i.Implementors = append(i.Implementors, InterfaceImplementor{ i.Implementors = append(i.Implementors, InterfaceImplementor{
NamedType: t, NamedType: t,
@ -45,23 +38,6 @@ func (cfg *Config) buildInterface(types NamedTypes, typ schema.NamedType, prog *
} }
return i return i
case *schema.Interface:
i := &Interface{NamedType: types[typ.TypeName()]}
for _, implementor := range typ.PossibleTypes {
t := types[implementor.TypeName()]
i.Implementors = append(i.Implementors, InterfaceImplementor{
NamedType: t,
ValueReceiver: cfg.isValueReceiver(types[typ.Name], t, prog),
})
}
return i
default:
panic(fmt.Errorf("unknown interface %#v", typ))
}
} }
func (cfg *Config) isValueReceiver(intf *NamedType, implementor *NamedType, prog *loader.Program) bool { func (cfg *Config) isValueReceiver(intf *NamedType, implementor *NamedType, prog *loader.Program) bool {

16
vendor/github.com/99designs/gqlgen/codegen/model.go generated vendored Normal file
View File

@ -0,0 +1,16 @@
package codegen
type Model struct {
*NamedType
Description string
Fields []ModelField
}
type ModelField struct {
*Type
GQLName string
GoFieldName string
GoFKName string
GoFKType string
Description string
}

View File

@ -2,20 +2,19 @@ package codegen
import ( import (
"sort" "sort"
"strings"
"github.com/vektah/gqlgen/neelance/schema" "github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader" "golang.org/x/tools/go/loader"
) )
func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model, error) { func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program, imports *Imports) ([]Model, error) {
var models []Model var models []Model
for _, typ := range cfg.schema.Types { for _, typ := range cfg.schema.Types {
var model Model var model Model
switch typ := typ.(type) { switch typ.Kind {
case *schema.Object: case ast.Object:
obj, err := cfg.buildObject(types, typ) obj, err := cfg.buildObject(types, typ, imports)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -23,8 +22,8 @@ func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model,
continue continue
} }
model = cfg.obj2Model(obj) model = cfg.obj2Model(obj)
case *schema.InputObject: case ast.InputObject:
obj, err := buildInput(types, typ) obj, err := cfg.buildInput(types, typ)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -32,7 +31,7 @@ func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model,
continue continue
} }
model = cfg.obj2Model(obj) model = cfg.obj2Model(obj)
case *schema.Interface, *schema.Union: case ast.Interface, ast.Union:
intf := cfg.buildInterface(types, typ, prog) intf := cfg.buildInterface(types, typ, prog)
if intf.IsUserDefined { if intf.IsUserDefined {
continue continue
@ -41,12 +40,13 @@ func (cfg *Config) buildModels(types NamedTypes, prog *loader.Program) ([]Model,
default: default:
continue continue
} }
model.Description = typ.Description // It's this or change both obj2Model and buildObject
models = append(models, model) models = append(models, model)
} }
sort.Slice(models, func(i, j int) bool { sort.Slice(models, func(i, j int) bool {
return strings.Compare(models[i].GQLType, models[j].GQLType) == -1 return models[i].GQLType < models[j].GQLType
}) })
return models, nil return models, nil
@ -65,11 +65,10 @@ func (cfg *Config) obj2Model(obj *Object) Model {
field := &obj.Fields[i] field := &obj.Fields[i]
mf := ModelField{Type: field.Type, GQLName: field.GQLName} mf := ModelField{Type: field.Type, GQLName: field.GQLName}
mf.GoVarName = ucFirst(field.GQLName) if field.GoFieldName != "" {
if mf.IsScalar { mf.GoFieldName = field.GoFieldName
if mf.GoVarName == "Id" { } else {
mf.GoVarName = "ID" mf.GoFieldName = field.GoNameExported()
}
} }
model.Fields = append(model.Fields, mf) model.Fields = append(model.Fields, mf)

464
vendor/github.com/99designs/gqlgen/codegen/object.go generated vendored Normal file
View File

@ -0,0 +1,464 @@
package codegen
import (
"bytes"
"fmt"
"strconv"
"strings"
"text/template"
"unicode"
"github.com/vektah/gqlparser/ast"
)
type GoFieldType int
const (
GoFieldUndefined GoFieldType = iota
GoFieldMethod
GoFieldVariable
)
type Object struct {
*NamedType
Fields []Field
Satisfies []string
ResolverInterface *Ref
Root bool
DisableConcurrency bool
Stream bool
}
type Field struct {
*Type
Description string // Description of a field
GQLName string // The name of the field in graphql
GoFieldType GoFieldType // The field type in go, if any
GoReceiverName string // The name of method & var receiver in go, if any
GoFieldName string // The name of the method or var in go, if any
Args []FieldArgument // A list of arguments to be passed to this field
ForceResolver bool // Should be emit Resolver method
NoErr bool // If this is bound to a go method, does that method have an error as the second argument
Object *Object // A link back to the parent object
Default interface{} // The default value
}
type FieldArgument struct {
*Type
GQLName string // The name of the argument in graphql
GoVarName string // The name of the var in go
Object *Object // A link back to the parent object
Default interface{} // The default value
}
type Objects []*Object
func (o *Object) Implementors() string {
satisfiedBy := strconv.Quote(o.GQLType)
for _, s := range o.Satisfies {
satisfiedBy += ", " + strconv.Quote(s)
}
return "[]string{" + satisfiedBy + "}"
}
func (o *Object) HasResolvers() bool {
for _, f := range o.Fields {
if f.IsResolver() {
return true
}
}
return false
}
func (o *Object) IsConcurrent() bool {
for _, f := range o.Fields {
if f.IsConcurrent() {
return true
}
}
return false
}
func (o *Object) IsReserved() bool {
return strings.HasPrefix(o.GQLType, "__")
}
func (f *Field) IsResolver() bool {
return f.GoFieldName == ""
}
func (f *Field) IsReserved() bool {
return strings.HasPrefix(f.GQLName, "__")
}
func (f *Field) IsMethod() bool {
return f.GoFieldType == GoFieldMethod
}
func (f *Field) IsVariable() bool {
return f.GoFieldType == GoFieldVariable
}
func (f *Field) IsConcurrent() bool {
return f.IsResolver() && !f.Object.DisableConcurrency
}
func (f *Field) GoNameExported() string {
return lintName(ucFirst(f.GQLName))
}
func (f *Field) GoNameUnexported() string {
return lintName(f.GQLName)
}
func (f *Field) ShortInvocation() string {
if !f.IsResolver() {
return ""
}
return fmt.Sprintf("%s().%s(%s)", f.Object.GQLType, f.GoNameExported(), f.CallArgs())
}
func (f *Field) ArgsFunc() string {
if len(f.Args) == 0 {
return ""
}
return "field_" + f.Object.GQLType + "_" + f.GQLName + "_args"
}
func (f *Field) ResolverType() string {
if !f.IsResolver() {
return ""
}
return fmt.Sprintf("%s().%s(%s)", f.Object.GQLType, f.GoNameExported(), f.CallArgs())
}
func (f *Field) ShortResolverDeclaration() string {
if !f.IsResolver() {
return ""
}
res := fmt.Sprintf("%s(ctx context.Context", f.GoNameExported())
if !f.Object.Root {
res += fmt.Sprintf(", obj *%s", f.Object.FullName())
}
for _, arg := range f.Args {
res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
}
result := f.Signature()
if f.Object.Stream {
result = "<-chan " + result
}
res += fmt.Sprintf(") (%s, error)", result)
return res
}
func (f *Field) ResolverDeclaration() string {
if !f.IsResolver() {
return ""
}
res := fmt.Sprintf("%s_%s(ctx context.Context", f.Object.GQLType, f.GoNameUnexported())
if !f.Object.Root {
res += fmt.Sprintf(", obj *%s", f.Object.FullName())
}
for _, arg := range f.Args {
res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
}
result := f.Signature()
if f.Object.Stream {
result = "<-chan " + result
}
res += fmt.Sprintf(") (%s, error)", result)
return res
}
func (f *Field) ComplexitySignature() string {
res := fmt.Sprintf("func(childComplexity int")
for _, arg := range f.Args {
res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
}
res += ") int"
return res
}
func (f *Field) ComplexityArgs() string {
var args []string
for _, arg := range f.Args {
args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
}
return strings.Join(args, ", ")
}
func (f *Field) CallArgs() string {
var args []string
if f.IsResolver() {
args = append(args, "ctx")
if !f.Object.Root {
args = append(args, "obj")
}
}
for _, arg := range f.Args {
args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
}
return strings.Join(args, ", ")
}
// should be in the template, but its recursive and has a bunch of args
func (f *Field) WriteJson() string {
return f.doWriteJson("res", f.Type.Modifiers, f.ASTType, false, 1)
}
func (f *Field) doWriteJson(val string, remainingMods []string, astType *ast.Type, isPtr bool, depth int) string {
switch {
case len(remainingMods) > 0 && remainingMods[0] == modPtr:
return tpl(`
if {{.val}} == nil {
{{- if .nonNull }}
if !ec.HasError(rctx) {
ec.Errorf(ctx, "must not be null")
}
{{- end }}
return graphql.Null
}
{{.next }}`, map[string]interface{}{
"val": val,
"nonNull": astType.NonNull,
"next": f.doWriteJson(val, remainingMods[1:], astType, true, depth+1),
})
case len(remainingMods) > 0 && remainingMods[0] == modList:
if isPtr {
val = "*" + val
}
var arr = "arr" + strconv.Itoa(depth)
var index = "idx" + strconv.Itoa(depth)
var usePtr bool
if len(remainingMods) == 1 && !isPtr {
usePtr = true
}
return tpl(`
{{.arr}} := make(graphql.Array, len({{.val}}))
{{ if and .top (not .isScalar) }} var wg sync.WaitGroup {{ end }}
{{ if not .isScalar }}
isLen1 := len({{.val}}) == 1
if !isLen1 {
wg.Add(len({{.val}}))
}
{{ end }}
for {{.index}} := range {{.val}} {
{{- if not .isScalar }}
{{.index}} := {{.index}}
rctx := &graphql.ResolverContext{
Index: &{{.index}},
Result: {{ if .usePtr }}&{{end}}{{.val}}[{{.index}}],
}
ctx := graphql.WithResolverContext(ctx, rctx)
f := func({{.index}} int) {
if !isLen1 {
defer wg.Done()
}
{{.arr}}[{{.index}}] = func() graphql.Marshaler {
{{ .next }}
}()
}
if isLen1 {
f({{.index}})
} else {
go f({{.index}})
}
{{ else }}
{{.arr}}[{{.index}}] = func() graphql.Marshaler {
{{ .next }}
}()
{{- end}}
}
{{ if and .top (not .isScalar) }} wg.Wait() {{ end }}
return {{.arr}}`, map[string]interface{}{
"val": val,
"arr": arr,
"index": index,
"top": depth == 1,
"arrayLen": len(val),
"isScalar": f.IsScalar,
"usePtr": usePtr,
"next": f.doWriteJson(val+"["+index+"]", remainingMods[1:], astType.Elem, false, depth+1),
})
case f.IsScalar:
if isPtr {
val = "*" + val
}
return f.Marshal(val)
default:
if !isPtr {
val = "&" + val
}
return tpl(`
return ec._{{.type}}(ctx, field.Selections, {{.val}})`, map[string]interface{}{
"type": f.GQLType,
"val": val,
})
}
}
func (f *FieldArgument) Stream() bool {
return f.Object != nil && f.Object.Stream
}
func (os Objects) ByName(name string) *Object {
for i, o := range os {
if strings.EqualFold(o.GQLType, name) {
return os[i]
}
}
return nil
}
func tpl(tpl string, vars map[string]interface{}) string {
b := &bytes.Buffer{}
err := template.Must(template.New("inline").Parse(tpl)).Execute(b, vars)
if err != nil {
panic(err)
}
return b.String()
}
func ucFirst(s string) string {
if s == "" {
return ""
}
r := []rune(s)
r[0] = unicode.ToUpper(r[0])
return string(r)
}
// copy from https://github.com/golang/lint/blob/06c8688daad7faa9da5a0c2f163a3d14aac986ca/lint.go#L679
// lintName returns a different name if it should be different.
func lintName(name string) (should string) {
// Fast path for simple cases: "_" and all lowercase.
if name == "_" {
return name
}
allLower := true
for _, r := range name {
if !unicode.IsLower(r) {
allLower = false
break
}
}
if allLower {
return name
}
// Split camelCase at any lower->upper transition, and split on underscores.
// Check each word for common initialisms.
runes := []rune(name)
w, i := 0, 0 // index of start of word, scan
for i+1 <= len(runes) {
eow := false // whether we hit the end of a word
if i+1 == len(runes) {
eow = true
} else if runes[i+1] == '_' {
// underscore; shift the remainder forward over any run of underscores
eow = true
n := 1
for i+n+1 < len(runes) && runes[i+n+1] == '_' {
n++
}
// Leave at most one underscore if the underscore is between two digits
if i+n+1 < len(runes) && unicode.IsDigit(runes[i]) && unicode.IsDigit(runes[i+n+1]) {
n--
}
copy(runes[i+1:], runes[i+n+1:])
runes = runes[:len(runes)-n]
} else if unicode.IsLower(runes[i]) && !unicode.IsLower(runes[i+1]) {
// lower->non-lower
eow = true
}
i++
if !eow {
continue
}
// [w,i) is a word.
word := string(runes[w:i])
if u := strings.ToUpper(word); commonInitialisms[u] {
// Keep consistent case, which is lowercase only at the start.
if w == 0 && unicode.IsLower(runes[w]) {
u = strings.ToLower(u)
}
// All the common initialisms are ASCII,
// so we can replace the bytes exactly.
copy(runes[w:], []rune(u))
} else if w > 0 && strings.ToLower(word) == word {
// already all lowercase, and not the first word, so uppercase the first character.
runes[w] = unicode.ToUpper(runes[w])
}
w = i
}
return string(runes)
}
// commonInitialisms is a set of common initialisms.
// Only add entries that are highly unlikely to be non-initialisms.
// For instance, "ID" is fine (Freudian code is rare), but "AND" is not.
var commonInitialisms = map[string]bool{
"ACL": true,
"API": true,
"ASCII": true,
"CPU": true,
"CSS": true,
"DNS": true,
"EOF": true,
"GUID": true,
"HTML": true,
"HTTP": true,
"HTTPS": true,
"ID": true,
"IP": true,
"JSON": true,
"LHS": true,
"QPS": true,
"RAM": true,
"RHS": true,
"RPC": true,
"SLA": true,
"SMTP": true,
"SQL": true,
"SSH": true,
"TCP": true,
"TLS": true,
"TTL": true,
"UDP": true,
"UI": true,
"UID": true,
"UUID": true,
"URI": true,
"URL": true,
"UTF8": true,
"VM": true,
"XML": true,
"XMPP": true,
"XSRF": true,
"XSS": true,
}

View File

@ -0,0 +1,181 @@
package codegen
import (
"log"
"sort"
"github.com/pkg/errors"
"github.com/vektah/gqlparser/ast"
"golang.org/x/tools/go/loader"
)
func (cfg *Config) buildObjects(types NamedTypes, prog *loader.Program, imports *Imports) (Objects, error) {
var objects Objects
for _, typ := range cfg.schema.Types {
if typ.Kind != ast.Object {
continue
}
obj, err := cfg.buildObject(types, typ, imports)
if err != nil {
return nil, err
}
def, err := findGoType(prog, obj.Package, obj.GoType)
if err != nil {
return nil, err
}
if def != nil {
for _, bindErr := range bindObject(def.Type(), obj, imports, cfg.StructTag) {
log.Println(bindErr.Error())
log.Println(" Adding resolver method")
}
}
objects = append(objects, obj)
}
sort.Slice(objects, func(i, j int) bool {
return objects[i].GQLType < objects[j].GQLType
})
return objects, nil
}
var keywords = []string{
"break",
"default",
"func",
"interface",
"select",
"case",
"defer",
"go",
"map",
"struct",
"chan",
"else",
"goto",
"package",
"switch",
"const",
"fallthrough",
"if",
"range",
"type",
"continue",
"for",
"import",
"return",
"var",
}
// sanitizeArgName prevents collisions with go keywords for arguments to resolver functions
func sanitizeArgName(name string) string {
for _, k := range keywords {
if name == k {
return name + "Arg"
}
}
return name
}
func (cfg *Config) buildObject(types NamedTypes, typ *ast.Definition, imports *Imports) (*Object, error) {
obj := &Object{NamedType: types[typ.Name]}
typeEntry, entryExists := cfg.Models[typ.Name]
imp := imports.findByPath(cfg.Exec.ImportPath())
obj.ResolverInterface = &Ref{GoType: obj.GQLType + "Resolver", Import: imp}
if typ == cfg.schema.Query {
obj.Root = true
}
if typ == cfg.schema.Mutation {
obj.Root = true
obj.DisableConcurrency = true
}
if typ == cfg.schema.Subscription {
obj.Root = true
obj.Stream = true
}
obj.Satisfies = append(obj.Satisfies, typ.Interfaces...)
for _, field := range typ.Fields {
if typ == cfg.schema.Query && field.Name == "__type" {
obj.Fields = append(obj.Fields, Field{
Type: &Type{types["__Schema"], []string{modPtr}, ast.NamedType("__Schema", nil), nil},
GQLName: "__schema",
NoErr: true,
GoFieldType: GoFieldMethod,
GoReceiverName: "ec",
GoFieldName: "introspectSchema",
Object: obj,
Description: field.Description,
})
continue
}
if typ == cfg.schema.Query && field.Name == "__schema" {
obj.Fields = append(obj.Fields, Field{
Type: &Type{types["__Type"], []string{modPtr}, ast.NamedType("__Schema", nil), nil},
GQLName: "__type",
NoErr: true,
GoFieldType: GoFieldMethod,
GoReceiverName: "ec",
GoFieldName: "introspectType",
Args: []FieldArgument{
{GQLName: "name", Type: &Type{types["String"], []string{}, ast.NamedType("String", nil), nil}, Object: &Object{}},
},
Object: obj,
})
continue
}
var forceResolver bool
var goName string
if entryExists {
if typeField, ok := typeEntry.Fields[field.Name]; ok {
goName = typeField.FieldName
forceResolver = typeField.Resolver
}
}
var args []FieldArgument
for _, arg := range field.Arguments {
newArg := FieldArgument{
GQLName: arg.Name,
Type: types.getType(arg.Type),
Object: obj,
GoVarName: sanitizeArgName(arg.Name),
}
if !newArg.Type.IsInput && !newArg.Type.IsScalar {
return nil, errors.Errorf("%s cannot be used as argument of %s.%s. only input and scalar types are allowed", arg.Type, obj.GQLType, field.Name)
}
if arg.DefaultValue != nil {
var err error
newArg.Default, err = arg.DefaultValue.Value(nil)
if err != nil {
return nil, errors.Errorf("default value for %s.%s is not valid: %s", typ.Name, field.Name, err.Error())
}
newArg.StripPtr()
}
args = append(args, newArg)
}
obj.Fields = append(obj.Fields, Field{
GQLName: field.Name,
Type: types.getType(field.Type),
Args: args,
Object: obj,
GoFieldName: goName,
ForceResolver: forceResolver,
})
}
return obj, nil
}

View File

@ -0,0 +1,13 @@
args := map[string]interface{}{}
{{- range $i, $arg := . }}
var arg{{$i}} {{$arg.Signature }}
if tmp, ok := rawArgs[{{$arg.GQLName|quote}}]; ok {
var err error
{{$arg.Unmarshal (print "arg" $i) "tmp" }}
if err != nil {
return nil, err
}
}
args[{{$arg.GQLName|quote}}] = arg{{$i}}
{{- end }}
return args, nil

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,74 @@
{{ $field := . }}
{{ $object := $field.Object }}
{{- if $object.Stream }}
func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {
{{- if $field.Args }}
rawArgs := field.ArgumentMap(ec.Variables)
args, err := {{ $field.ArgsFunc }}(rawArgs)
if err != nil {
ec.Error(ctx, err)
return nil
}
{{- end }}
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Field: field,
})
results, err := ec.resolvers.{{ $field.ShortInvocation }}
if err != nil {
ec.Error(ctx, err)
return nil
}
return func() graphql.Marshaler {
res, ok := <-results
if !ok {
return nil
}
var out graphql.OrderedMap
out.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())
return &out
}
}
{{ else }}
// nolint: vetshadow
func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {
{{- if $field.Args }}
rawArgs := field.ArgumentMap(ec.Variables)
args, err := {{ $field.ArgsFunc }}(rawArgs)
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
{{- end }}
rctx := &graphql.ResolverContext{
Object: {{$object.GQLType|quote}},
Args: {{if $field.Args }}args{{else}}nil{{end}},
Field: field,
}
ctx = graphql.WithResolverContext(ctx, rctx)
resTmp := ec.FieldMiddleware(ctx, {{if $object.Root}}nil{{else}}obj{{end}}, func(ctx context.Context) (interface{}, error) {
{{- if $field.IsResolver }}
return ec.resolvers.{{ $field.ShortInvocation }}
{{- else if $field.IsMethod }}
{{- if $field.NoErr }}
return {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }}), nil
{{- else }}
return {{$field.GoReceiverName}}.{{$field.GoFieldName}}({{ $field.CallArgs }})
{{- end }}
{{- else if $field.IsVariable }}
return {{$field.GoReceiverName}}.{{$field.GoFieldName}}, nil
{{- end }}
})
if resTmp == nil {
{{- if $field.ASTType.NonNull }}
if !ec.HasError(rctx) {
ec.Errorf(ctx, "must not be null")
}
{{- end }}
return graphql.Null
}
res := resTmp.({{$field.Signature}})
rctx.Result = res
{{ $field.WriteJson }}
}
{{ end }}

View File

@ -0,0 +1,263 @@
// Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
package {{ .PackageName }}
import (
{{- range $import := .Imports }}
{{- $import.Write }}
{{ end }}
)
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
func NewExecutableSchema(cfg Config) graphql.ExecutableSchema {
return &executableSchema{
resolvers: cfg.Resolvers,
directives: cfg.Directives,
complexity: cfg.Complexity,
}
}
type Config struct {
Resolvers ResolverRoot
Directives DirectiveRoot
Complexity ComplexityRoot
}
type ResolverRoot interface {
{{- range $object := .Objects -}}
{{ if $object.HasResolvers -}}
{{$object.GQLType}}() {{$object.GQLType}}Resolver
{{ end }}
{{- end }}
}
type DirectiveRoot struct {
{{ range $directive := .Directives }}
{{ $directive.Declaration }}
{{ end }}
}
type ComplexityRoot struct {
{{ range $object := .Objects }}
{{ if not $object.IsReserved -}}
{{ $object.GQLType|toCamel }} struct {
{{ range $field := $object.Fields -}}
{{ if not $field.IsReserved -}}
{{ $field.GQLName|toCamel }} {{ $field.ComplexitySignature }}
{{ end }}
{{- end }}
}
{{- end }}
{{ end }}
}
{{ range $object := .Objects -}}
{{ if $object.HasResolvers }}
type {{$object.GQLType}}Resolver interface {
{{ range $field := $object.Fields -}}
{{ $field.ShortResolverDeclaration }}
{{ end }}
}
{{- end }}
{{- end }}
{{ range $object := .Objects -}}
{{ range $field := $object.Fields -}}
{{ if $field.Args }}
func {{ $field.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {
{{ template "args.gotpl" $field.Args }}
}
{{ end }}
{{ end }}
{{- end }}
{{ range $directive := .Directives }}
{{ if $directive.Args }}
func {{ $directive.ArgsFunc }}(rawArgs map[string]interface{}) (map[string]interface{}, error) {
{{ template "args.gotpl" $directive.Args }}
}
{{ end }}
{{ end }}
type executableSchema struct {
resolvers ResolverRoot
directives DirectiveRoot
complexity ComplexityRoot
}
func (e *executableSchema) Schema() *ast.Schema {
return parsedSchema
}
func (e *executableSchema) Complexity(typeName, field string, childComplexity int, rawArgs map[string]interface{}) (int, bool) {
switch typeName + "." + field {
{{ range $object := .Objects }}
{{ if not $object.IsReserved }}
{{ range $field := $object.Fields }}
{{ if not $field.IsReserved }}
case "{{$object.GQLType}}.{{$field.GQLName}}":
if e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}} == nil {
break
}
{{ if $field.Args }}
args, err := {{ $field.ArgsFunc }}(rawArgs)
if err != nil {
return 0, false
}
{{ end }}
return e.complexity.{{$object.GQLType|toCamel}}.{{$field.GQLName|toCamel}}(childComplexity{{if $field.Args}}, {{$field.ComplexityArgs}} {{end}}), true
{{ end }}
{{ end }}
{{ end }}
{{ end }}
}
return 0, false
}
func (e *executableSchema) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
{{- if .QueryRoot }}
ec := executionContext{graphql.GetRequestContext(ctx), e}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
data := ec._{{.QueryRoot.GQLType}}(ctx, op.SelectionSet)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
})
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
}
{{- else }}
return graphql.ErrorResponse(ctx, "queries are not supported")
{{- end }}
}
func (e *executableSchema) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
{{- if .MutationRoot }}
ec := executionContext{graphql.GetRequestContext(ctx), e}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
data := ec._{{.MutationRoot.GQLType}}(ctx, op.SelectionSet)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
})
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
}
{{- else }}
return graphql.ErrorResponse(ctx, "mutations are not supported")
{{- end }}
}
func (e *executableSchema) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {
{{- if .SubscriptionRoot }}
ec := executionContext{graphql.GetRequestContext(ctx), e}
next := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.SelectionSet)
if ec.Errors != nil {
return graphql.OneShot(&graphql.Response{Data: []byte("null"), Errors: ec.Errors})
}
var buf bytes.Buffer
return func() *graphql.Response {
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
buf.Reset()
data := next()
if data == nil {
return nil
}
data.MarshalGQL(&buf)
return buf.Bytes()
})
if buf == nil {
return nil
}
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
}
}
{{- else }}
return graphql.OneShot(graphql.ErrorResponse(ctx, "subscriptions are not supported"))
{{- end }}
}
type executionContext struct {
*graphql.RequestContext
*executableSchema
}
{{- range $object := .Objects }}
{{ template "object.gotpl" $object }}
{{- range $field := $object.Fields }}
{{ template "field.gotpl" $field }}
{{ end }}
{{- end}}
{{- range $interface := .Interfaces }}
{{ template "interface.gotpl" $interface }}
{{- end }}
{{- range $input := .Inputs }}
{{ template "input.gotpl" $input }}
{{- end }}
func (ec *executionContext) FieldMiddleware(ctx context.Context, obj interface{}, next graphql.Resolver) (ret interface{}) {
defer func() {
if r := recover(); r != nil {
ec.Error(ctx, ec.Recover(ctx, r))
ret = nil
}
}()
{{- if .Directives }}
rctx := graphql.GetResolverContext(ctx)
for _, d := range rctx.Field.Definition.Directives {
switch d.Name {
{{- range $directive := .Directives }}
case "{{$directive.Name}}":
if ec.directives.{{$directive.Name|ucFirst}} != nil {
{{- if $directive.Args }}
rawArgs := d.ArgumentMap(ec.Variables)
args, err := {{ $directive.ArgsFunc }}(rawArgs)
if err != nil {
ec.Error(ctx, err)
return nil
}
{{- end }}
n := next
next = func(ctx context.Context) (interface{}, error) {
return ec.directives.{{$directive.Name|ucFirst}}({{$directive.CallArgs}})
}
}
{{- end }}
}
}
{{- end }}
res, err := ec.ResolverMiddleware(ctx, next)
if err != nil {
ec.Error(ctx, err)
return nil
}
return res
}
func (ec *executionContext) introspectSchema() *introspection.Schema {
return introspection.WrapSchema(parsedSchema)
}
func (ec *executionContext) introspectType(name string) *introspection.Type {
return introspection.WrapTypeFromDef(parsedSchema, parsedSchema.Types[name])
}
var parsedSchema = gqlparser.MustLoadSchema(
&ast.Source{Name: {{.SchemaFilename|quote}}, Input: {{.SchemaRaw|rawQuote}}},
)

View File

@ -15,7 +15,7 @@
{{- range $field := .Fields }} {{- range $field := .Fields }}
case {{$field.GQLName|quote}}: case {{$field.GQLName|quote}}:
var err error var err error
{{ $field.Unmarshal (print "it." $field.GoVarName) "v" }} {{ $field.Unmarshal (print "it." $field.GoFieldName) "v" }}
if err != nil { if err != nil {
return it, err return it, err
} }

View File

@ -1,6 +1,6 @@
{{- $interface := . }} {{- $interface := . }}
func (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel []query.Selection, obj *{{$interface.FullName}}) graphql.Marshaler { func (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel ast.SelectionSet, obj *{{$interface.FullName}}) graphql.Marshaler {
switch obj := (*obj).(type) { switch obj := (*obj).(type) {
case nil: case nil:
return graphql.Null return graphql.Null

View File

@ -1,4 +1,4 @@
// Code generated by github.com/vektah/gqlgen, DO NOT EDIT. // Code generated by github.com/99designs/gqlgen, DO NOT EDIT.
package {{ .PackageName }} package {{ .PackageName }}
@ -9,13 +9,17 @@ import (
) )
{{ range $model := .Models }} {{ range $model := .Models }}
{{with .Description}} {{.|prefixLines "// "}} {{end}}
{{- if .IsInterface }} {{- if .IsInterface }}
type {{.GoType}} interface {} type {{.GoType}} interface {}
{{- else }} {{- else }}
type {{.GoType}} struct { type {{.GoType}} struct {
{{- range $field := .Fields }} {{- range $field := .Fields }}
{{- if $field.GoVarName }} {{- with .Description}}
{{ $field.GoVarName }} {{$field.Signature}} `json:"{{$field.GQLName}}"` {{.|prefixLines "// "}}
{{- end}}
{{- if $field.GoFieldName }}
{{ $field.GoFieldName }} {{$field.Signature}} `json:"{{$field.GQLName}}"`
{{- else }} {{- else }}
{{ $field.GoFKName }} {{$field.GoFKType}} {{ $field.GoFKName }} {{$field.GoFKType}}
{{- end }} {{- end }}
@ -25,10 +29,13 @@ import (
{{- end}} {{- end}}
{{ range $enum := .Enums }} {{ range $enum := .Enums }}
{{with .Description}}{{.|prefixLines "// "}} {{end}}
type {{.GoType}} string type {{.GoType}} string
const ( const (
{{ range $value := .Values -}} {{- range $value := .Values}}
{{with .Description}} {{.|prefixLines "// "}} {{end}} {{- with .Description}}
{{.|prefixLines "// "}}
{{- end}}
{{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}} {{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}
{{- end }} {{- end }}
) )

View File

@ -4,8 +4,8 @@ var {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}
// nolint: gocyclo, errcheck, gas, goconst // nolint: gocyclo, errcheck, gas, goconst
{{- if .Stream }} {{- if .Stream }}
func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection) func() graphql.Marshaler { func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet) func() graphql.Marshaler {
fields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables) fields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Object: {{$object.GQLType|quote}}, Object: {{$object.GQLType|quote}},
}) })
@ -24,14 +24,17 @@ func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []quer
} }
} }
{{- else }} {{- else }}
func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler { func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel ast.SelectionSet{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {
fields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables) fields := graphql.CollectFields(ctx, sel, {{$object.GQLType|lcFirst}}Implementors)
{{if $object.Root}} {{if $object.Root}}
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{ ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Object: {{$object.GQLType|quote}}, Object: {{$object.GQLType|quote}},
}) })
{{end}} {{end}}
{{if $object.IsConcurrent}} var wg sync.WaitGroup {{end}}
out := graphql.NewOrderedMap(len(fields)) out := graphql.NewOrderedMap(len(fields))
invalid := false
for i, field := range fields { for i, field := range fields {
out.Keys[i] = field.Alias out.Keys[i] = field.Alias
@ -40,13 +43,27 @@ func (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []quer
out.Values[i] = graphql.MarshalString({{$object.GQLType|quote}}) out.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})
{{- range $field := $object.Fields }} {{- range $field := $object.Fields }}
case "{{$field.GQLName}}": case "{{$field.GQLName}}":
{{- if $field.IsConcurrent }}
wg.Add(1)
go func(i int, field graphql.CollectedField) {
{{- end }}
out.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}}) out.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})
{{- if $field.ASTType.NonNull }}
if out.Values[i] == graphql.Null {
invalid = true
}
{{- end }}
{{- if $field.IsConcurrent }}
wg.Done()
}(i, field)
{{- end }}
{{- end }} {{- end }}
default: default:
panic("unknown field " + strconv.Quote(field.Name)) panic("unknown field " + strconv.Quote(field.Name))
} }
} }
{{if $object.IsConcurrent}} wg.Wait() {{end}}
if invalid { return graphql.Null }
return out return out
} }
{{- end }} {{- end }}

View File

@ -0,0 +1,33 @@
//go:generate gorunpkg github.com/99designs/gqlgen
package {{ .PackageName }}
import (
{{- range $import := .Imports }}
{{- $import.Write }}
{{ end }}
)
type {{.ResolverType}} struct {}
{{ range $object := .Objects -}}
{{- if $object.HasResolvers -}}
func (r *{{$.ResolverType}}) {{$object.GQLType}}() {{ $object.ResolverInterface.FullName }} {
return &{{lcFirst $object.GQLType}}Resolver{r}
}
{{ end -}}
{{ end }}
{{ range $object := .Objects -}}
{{- if $object.HasResolvers -}}
type {{lcFirst $object.GQLType}}Resolver struct { *Resolver }
{{ range $field := $object.Fields -}}
{{- if $field.IsResolver -}}
func (r *{{lcFirst $object.GQLType}}Resolver) {{ $field.ShortResolverDeclaration }} {
panic("not implemented")
}
{{ end -}}
{{ end -}}
{{ end -}}
{{ end }}

View File

@ -0,0 +1,22 @@
package main
import (
{{- range $import := .Imports }}
{{- $import.Write }}
{{ end }}
)
const defaultPort = "8080"
func main() {
port := os.Getenv("PORT")
if port == "" {
port = defaultPort
}
http.Handle("/", handler.Playground("GraphQL playground", "/query"))
http.Handle("/query", handler.GraphQL({{.ExecPackageName}}.NewExecutableSchema({{.ExecPackageName}}.Config{Resolvers: &{{.ResolverPackageName}}.Resolver{}})))
log.Printf("connect to http://localhost:%s/ for GraphQL playground", port)
log.Fatal(http.ListenAndServe(":" + port, nil))
}

View File

@ -5,11 +5,19 @@ package templates
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"io/ioutil"
"os"
"path/filepath"
"sort" "sort"
"strconv" "strconv"
"strings" "strings"
"text/template" "text/template"
"unicode" "unicode"
"log"
"github.com/pkg/errors"
"golang.org/x/tools/imports"
) )
func Run(name string, tpldata interface{}) (*bytes.Buffer, error) { func Run(name string, tpldata interface{}) (*bytes.Buffer, error) {
@ -96,6 +104,8 @@ func dump(val interface{}) string {
switch val := val.(type) { switch val := val.(type) {
case int: case int:
return strconv.Itoa(val) return strconv.Itoa(val)
case int64:
return fmt.Sprintf("%d", val)
case float64: case float64:
return fmt.Sprintf("%f", val) return fmt.Sprintf("%f", val)
case string: case string:
@ -137,3 +147,47 @@ func dump(val interface{}) string {
func prefixLines(prefix, s string) string { func prefixLines(prefix, s string) string {
return prefix + strings.Replace(s, "\n", "\n"+prefix, -1) return prefix + strings.Replace(s, "\n", "\n"+prefix, -1)
} }
func RenderToFile(tpl string, filename string, data interface{}) error {
var buf *bytes.Buffer
buf, err := Run(tpl, data)
if err != nil {
return errors.Wrap(err, filename+" generation failed")
}
if err := write(filename, buf.Bytes()); err != nil {
return err
}
log.Println(filename)
return nil
}
func gofmt(filename string, b []byte) ([]byte, error) {
out, err := imports.Process(filename, b, nil)
if err != nil {
return b, errors.Wrap(err, "unable to gofmt")
}
return out, nil
}
func write(filename string, b []byte) error {
err := os.MkdirAll(filepath.Dir(filename), 0755)
if err != nil {
return errors.Wrap(err, "failed to create directory")
}
formatted, err := gofmt(filename, b)
if err != nil {
fmt.Fprintf(os.Stderr, "gofmt failed: %s\n", err.Error())
formatted = b
}
err = ioutil.WriteFile(filename, formatted, 0644)
if err != nil {
return errors.Wrapf(err, "failed to write %s", filename)
}
return nil
}

View File

@ -3,6 +3,8 @@ package codegen
import ( import (
"strconv" "strconv"
"strings" "strings"
"github.com/vektah/gqlparser/ast"
) )
type NamedTypes map[string]*NamedType type NamedTypes map[string]*NamedType
@ -27,7 +29,8 @@ type Type struct {
*NamedType *NamedType
Modifiers []string Modifiers []string
CastType *Ref // the type to cast to when unmarshalling ASTType *ast.Type
AliasedType *Ref
} }
const ( const (
@ -47,6 +50,9 @@ func (t Ref) PkgDot() string {
} }
func (t Type) Signature() string { func (t Type) Signature() string {
if t.AliasedType != nil {
return strings.Join(t.Modifiers, "") + t.AliasedType.FullName()
}
return strings.Join(t.Modifiers, "") + t.FullName() return strings.Join(t.Modifiers, "") + t.FullName()
} }
@ -109,6 +115,8 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
if {{.raw}} != nil { if {{.raw}} != nil {
if tmp1, ok := {{.raw}}.([]interface{}); ok { if tmp1, ok := {{.raw}}.([]interface{}); ok {
{{.rawSlice}} = tmp1 {{.rawSlice}} = tmp1
} else {
{{.rawSlice}} = []interface{}{ {{.raw}} }
} }
} }
{{.result}} = make({{.type}}, len({{.rawSlice}})) {{.result}} = make({{.type}}, len({{.rawSlice}}))
@ -125,11 +133,11 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
} }
realResult := result realResult := result
if t.CastType != nil { if t.AliasedType != nil {
result = "castTmp" result = "castTmp"
} }
return tpl(`{{- if .t.CastType }} return tpl(`{{- if .t.AliasedType }}
var castTmp {{.t.FullName}} var castTmp {{.t.FullName}}
{{ end }} {{ end }}
{{- if eq .t.GoType "map[string]interface{}" }} {{- if eq .t.GoType "map[string]interface{}" }}
@ -139,8 +147,8 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
{{- else -}} {{- else -}}
err = (&{{.result}}).UnmarshalGQL({{.raw}}) err = (&{{.result}}).UnmarshalGQL({{.raw}})
{{- end }} {{- end }}
{{- if .t.CastType }} {{- if .t.AliasedType }}
{{ .realResult }} = {{.t.CastType.FullName}}(castTmp) {{ .realResult }} = {{.t.AliasedType.FullName}}(castTmp)
{{- end }}`, map[string]interface{}{ {{- end }}`, map[string]interface{}{
"realResult": realResult, "realResult": realResult,
"result": result, "result": result,
@ -150,7 +158,7 @@ func (t Type) unmarshal(result, raw string, remainingMods []string, depth int) s
} }
func (t Type) Marshal(val string) string { func (t Type) Marshal(val string) string {
if t.CastType != nil { if t.AliasedType != nil {
val = t.GoType + "(" + val + ")" val = t.GoType + "(" + val + ")"
} }

View File

@ -1,12 +1,10 @@
package codegen package codegen
import ( import (
"fmt"
"go/types" "go/types"
"strings" "strings"
"github.com/vektah/gqlgen/neelance/common" "github.com/vektah/gqlparser/ast"
"github.com/vektah/gqlgen/neelance/schema"
"golang.org/x/tools/go/loader" "golang.org/x/tools/go/loader"
) )
@ -20,7 +18,7 @@ func (cfg *Config) buildNamedTypes() NamedTypes {
t.IsUserDefined = true t.IsUserDefined = true
t.Package, t.GoType = pkgAndType(userEntry.Model) t.Package, t.GoType = pkgAndType(userEntry.Model)
} else if t.IsScalar { } else if t.IsScalar {
t.Package = "github.com/vektah/gqlgen/graphql" t.Package = "github.com/99designs/gqlgen/graphql"
t.GoType = "String" t.GoType = "String"
} }
@ -50,16 +48,16 @@ func (cfg *Config) bindTypes(imports *Imports, namedTypes NamedTypes, destDir st
// namedTypeFromSchema objects for every graphql type, including primitives. // namedTypeFromSchema objects for every graphql type, including primitives.
// don't recurse into object fields or interfaces yet, lets make sure we have collected everything first. // don't recurse into object fields or interfaces yet, lets make sure we have collected everything first.
func namedTypeFromSchema(schemaType schema.NamedType) *NamedType { func namedTypeFromSchema(schemaType *ast.Definition) *NamedType {
switch val := schemaType.(type) { switch schemaType.Kind {
case *schema.Scalar, *schema.Enum: case ast.Scalar, ast.Enum:
return &NamedType{GQLType: val.TypeName(), IsScalar: true} return &NamedType{GQLType: schemaType.Name, IsScalar: true}
case *schema.Interface, *schema.Union: case ast.Interface, ast.Union:
return &NamedType{GQLType: val.TypeName(), IsInterface: true} return &NamedType{GQLType: schemaType.Name, IsInterface: true}
case *schema.InputObject: case ast.InputObject:
return &NamedType{GQLType: val.TypeName(), IsInput: true} return &NamedType{GQLType: schemaType.Name, IsInput: true}
default: default:
return &NamedType{GQLType: val.TypeName()} return &NamedType{GQLType: schemaType.Name}
} }
} }
@ -73,40 +71,31 @@ func pkgAndType(name string) (string, string) {
return normalizeVendor(strings.Join(parts[:len(parts)-1], ".")), parts[len(parts)-1] return normalizeVendor(strings.Join(parts[:len(parts)-1], ".")), parts[len(parts)-1]
} }
func (n NamedTypes) getType(t common.Type) *Type { func (n NamedTypes) getType(t *ast.Type) *Type {
orig := t
var modifiers []string var modifiers []string
usePtr := true
for { for {
if _, nonNull := t.(*common.NonNull); nonNull { if t.Elem != nil {
usePtr = false modifiers = append(modifiers, modList)
} else if _, nonNull := t.(*common.List); nonNull { t = t.Elem
usePtr = true
} else { } else {
if usePtr { if !t.NonNull {
modifiers = append(modifiers, modPtr) modifiers = append(modifiers, modPtr)
} }
usePtr = true if n[t.NamedType] == nil {
panic("missing type " + t.NamedType)
} }
res := &Type{
switch val := t.(type) { NamedType: n[t.NamedType],
case *common.NonNull:
t = val.OfType
case *common.List:
modifiers = append(modifiers, modList)
t = val.OfType
case schema.NamedType:
t := &Type{
NamedType: n[val.TypeName()],
Modifiers: modifiers, Modifiers: modifiers,
ASTType: orig,
} }
if t.IsInterface { if res.IsInterface {
t.StripPtr() res.StripPtr()
} }
return t return res
default:
panic(fmt.Errorf("unknown type %T", t))
} }
} }
} }

View File

@ -3,6 +3,7 @@ package codegen
import ( import (
"fmt" "fmt"
"go/types" "go/types"
"reflect"
"regexp" "regexp"
"strings" "strings"
@ -104,19 +105,50 @@ func findMethod(typ *types.Named, name string) *types.Func {
return nil return nil
} }
func findField(typ *types.Struct, name string) *types.Var { // findField attempts to match the name to a struct field with the following
// priorites:
// 1. If struct tag is passed then struct tag has highest priority
// 2. Field in an embedded struct
// 3. Actual Field name
func findField(typ *types.Struct, name, structTag string) (*types.Var, error) {
var foundField *types.Var
foundFieldWasTag := false
for i := 0; i < typ.NumFields(); i++ { for i := 0; i < typ.NumFields(); i++ {
field := typ.Field(i) field := typ.Field(i)
if structTag != "" {
tags := reflect.StructTag(typ.Tag(i))
if val, ok := tags.Lookup(structTag); ok {
if strings.EqualFold(val, name) {
if foundField != nil && foundFieldWasTag {
return nil, errors.Errorf("tag %s is ambigious; multiple fields have the same tag value of %s", structTag, val)
}
foundField = field
foundFieldWasTag = true
}
}
}
if field.Anonymous() { if field.Anonymous() {
if named, ok := field.Type().(*types.Struct); ok { if named, ok := field.Type().(*types.Struct); ok {
if f := findField(named, name); f != nil { f, err := findField(named, name, structTag)
return f if err != nil && !strings.HasPrefix(err.Error(), "no field named") {
return nil, err
}
if f != nil && foundField == nil {
foundField = f
} }
} }
if named, ok := field.Type().Underlying().(*types.Struct); ok { if named, ok := field.Type().Underlying().(*types.Struct); ok {
if f := findField(named, name); f != nil { f, err := findField(named, name, structTag)
return f if err != nil && !strings.HasPrefix(err.Error(), "no field named") {
return nil, err
}
if f != nil && foundField == nil {
foundField = f
} }
} }
} }
@ -125,11 +157,16 @@ func findField(typ *types.Struct, name string) *types.Var {
continue continue
} }
if strings.EqualFold(field.Name(), name) { if strings.EqualFold(field.Name(), name) && foundField == nil {
return field foundField = field
} }
} }
return nil
if foundField == nil {
return nil, fmt.Errorf("no field named %s", name)
}
return foundField, nil
} }
type BindError struct { type BindError struct {
@ -161,11 +198,15 @@ func (b BindErrors) Error() string {
return strings.Join(errs, "\n\n") return strings.Join(errs, "\n\n")
} }
func bindObject(t types.Type, object *Object, imports *Imports) BindErrors { func bindObject(t types.Type, object *Object, imports *Imports, structTag string) BindErrors {
var errs BindErrors var errs BindErrors
for i := range object.Fields { for i := range object.Fields {
field := &object.Fields[i] field := &object.Fields[i]
if field.ForceResolver {
continue
}
// first try binding to a method // first try binding to a method
methodErr := bindMethod(imports, t, field) methodErr := bindMethod(imports, t, field)
if methodErr == nil { if methodErr == nil {
@ -173,7 +214,7 @@ func bindObject(t types.Type, object *Object, imports *Imports) BindErrors {
} }
// otherwise try binding to a var // otherwise try binding to a var
varErr := bindVar(imports, t, field) varErr := bindVar(imports, t, field, structTag)
if varErr != nil { if varErr != nil {
errs = append(errs, BindError{ errs = append(errs, BindError{
@ -194,7 +235,11 @@ func bindMethod(imports *Imports, t types.Type, field *Field) error {
return fmt.Errorf("not a named type") return fmt.Errorf("not a named type")
} }
method := findMethod(namedType, field.GQLName) goName := field.GQLName
if field.GoFieldName != "" {
goName = field.GoFieldName
}
method := findMethod(namedType, goName)
if method == nil { if method == nil {
return fmt.Errorf("no method named %s", field.GQLName) return fmt.Errorf("no method named %s", field.GQLName)
} }
@ -216,20 +261,26 @@ func bindMethod(imports *Imports, t types.Type, field *Field) error {
} }
// success, args and return type match. Bind to method // success, args and return type match. Bind to method
field.GoMethodName = "obj." + method.Name() field.GoFieldType = GoFieldMethod
field.GoReceiverName = "obj"
field.GoFieldName = method.Name()
field.Args = newArgs field.Args = newArgs
return nil return nil
} }
func bindVar(imports *Imports, t types.Type, field *Field) error { func bindVar(imports *Imports, t types.Type, field *Field, structTag string) error {
underlying, ok := t.Underlying().(*types.Struct) underlying, ok := t.Underlying().(*types.Struct)
if !ok { if !ok {
return fmt.Errorf("not a struct") return fmt.Errorf("not a struct")
} }
structField := findField(underlying, field.GQLName) goName := field.GQLName
if structField == nil { if field.GoFieldName != "" {
return fmt.Errorf("no field named %s", field.GQLName) goName = field.GoFieldName
}
structField, err := findField(underlying, goName, structTag)
if err != nil {
return err
} }
if err := validateTypeBinding(imports, field, structField.Type()); err != nil { if err := validateTypeBinding(imports, field, structField.Type()); err != nil {
@ -237,7 +288,9 @@ func bindVar(imports *Imports, t types.Type, field *Field) error {
} }
// success, bind to var // success, bind to var
field.GoVarName = structField.Name() field.GoFieldType = GoFieldVariable
field.GoReceiverName = "obj"
field.GoFieldName = structField.Name()
return nil return nil
} }
@ -249,7 +302,9 @@ nextArg:
param := params.At(j) param := params.At(j)
for _, oldArg := range field.Args { for _, oldArg := range field.Args {
if strings.EqualFold(oldArg.GQLName, param.Name()) { if strings.EqualFold(oldArg.GQLName, param.Name()) {
if !field.ForceResolver {
oldArg.Type.Modifiers = modifiersFromGoType(param.Type()) oldArg.Type.Modifiers = modifiersFromGoType(param.Type())
}
newArgs = append(newArgs, oldArg) newArgs = append(newArgs, oldArg)
continue nextArg continue nextArg
} }
@ -276,7 +331,7 @@ func validateTypeBinding(imports *Imports, field *Field, goType types.Type) erro
field.Type.Modifiers = modifiersFromGoType(goType) field.Type.Modifiers = modifiersFromGoType(goType)
pkg, typ := pkgAndType(goType.String()) pkg, typ := pkgAndType(goType.String())
imp := imports.findByPath(pkg) imp := imports.findByPath(pkg)
field.CastType = &Ref{GoType: typ, Import: imp} field.AliasedType = &Ref{GoType: typ, Import: imp}
return nil return nil
} }

View File

@ -0,0 +1,104 @@
package complexity
import (
"github.com/99designs/gqlgen/graphql"
"github.com/vektah/gqlparser/ast"
)
func Calculate(es graphql.ExecutableSchema, op *ast.OperationDefinition, vars map[string]interface{}) int {
walker := complexityWalker{
es: es,
schema: es.Schema(),
vars: vars,
}
return walker.selectionSetComplexity(op.SelectionSet)
}
type complexityWalker struct {
es graphql.ExecutableSchema
schema *ast.Schema
vars map[string]interface{}
}
func (cw complexityWalker) selectionSetComplexity(selectionSet ast.SelectionSet) int {
var complexity int
for _, selection := range selectionSet {
switch s := selection.(type) {
case *ast.Field:
fieldDefinition := cw.schema.Types[s.Definition.Type.Name()]
var childComplexity int
switch fieldDefinition.Kind {
case ast.Object, ast.Interface, ast.Union:
childComplexity = cw.selectionSetComplexity(s.SelectionSet)
}
args := s.ArgumentMap(cw.vars)
var fieldComplexity int
if s.ObjectDefinition.Kind == ast.Interface {
fieldComplexity = cw.interfaceFieldComplexity(s.ObjectDefinition, s.Name, childComplexity, args)
} else {
fieldComplexity = cw.fieldComplexity(s.ObjectDefinition.Name, s.Name, childComplexity, args)
}
complexity = safeAdd(complexity, fieldComplexity)
case *ast.FragmentSpread:
complexity = safeAdd(complexity, cw.selectionSetComplexity(s.Definition.SelectionSet))
case *ast.InlineFragment:
complexity = safeAdd(complexity, cw.selectionSetComplexity(s.SelectionSet))
}
}
return complexity
}
func (cw complexityWalker) interfaceFieldComplexity(def *ast.Definition, field string, childComplexity int, args map[string]interface{}) int {
// Interfaces don't have their own separate field costs, so they have to assume the worst case.
// We iterate over all implementors and choose the most expensive one.
maxComplexity := 0
implementors := cw.schema.GetPossibleTypes(def)
for _, t := range implementors {
fieldComplexity := cw.fieldComplexity(t.Name, field, childComplexity, args)
if fieldComplexity > maxComplexity {
maxComplexity = fieldComplexity
}
}
return maxComplexity
}
func (cw complexityWalker) fieldComplexity(object, field string, childComplexity int, args map[string]interface{}) int {
if customComplexity, ok := cw.es.Complexity(object, field, childComplexity, args); ok && customComplexity >= childComplexity {
return customComplexity
}
// default complexity calculation
return safeAdd(1, childComplexity)
}
const maxInt = int(^uint(0) >> 1)
// safeAdd is a saturating add of a and b that ignores negative operands.
// If a + b would overflow through normal Go addition,
// it returns the maximum integer value instead.
//
// Adding complexities with this function prevents attackers from intentionally
// overflowing the complexity calculation to allow overly-complex queries.
//
// It also helps mitigate the impact of custom complexities that accidentally
// return negative values.
func safeAdd(a, b int) int {
// Ignore negative operands.
if a < 0 {
if b < 0 {
return 1
}
return b
} else if b < 0 {
return a
}
c := a + b
if c < a {
// Set c to maximum integer instead of overflowing.
c = maxInt
}
return c
}

View File

@ -5,42 +5,49 @@ import (
"fmt" "fmt"
"sync" "sync"
"github.com/vektah/gqlgen/neelance/query" "github.com/vektah/gqlparser/ast"
"github.com/vektah/gqlparser/gqlerror"
) )
type Resolver func(ctx context.Context) (res interface{}, err error) type Resolver func(ctx context.Context) (res interface{}, err error)
type ResolverMiddleware func(ctx context.Context, next Resolver) (res interface{}, err error) type FieldMiddleware func(ctx context.Context, next Resolver) (res interface{}, err error)
type RequestMiddleware func(ctx context.Context, next func(ctx context.Context) []byte) []byte type RequestMiddleware func(ctx context.Context, next func(ctx context.Context) []byte) []byte
type RequestContext struct { type RequestContext struct {
RawQuery string RawQuery string
Variables map[string]interface{} Variables map[string]interface{}
Doc *query.Document Doc *ast.QueryDocument
// ErrorPresenter will be used to generate the error // ErrorPresenter will be used to generate the error
// message from errors given to Error(). // message from errors given to Error().
ErrorPresenter ErrorPresenterFunc ErrorPresenter ErrorPresenterFunc
Recover RecoverFunc Recover RecoverFunc
ResolverMiddleware ResolverMiddleware ResolverMiddleware FieldMiddleware
DirectiveMiddleware FieldMiddleware
RequestMiddleware RequestMiddleware RequestMiddleware RequestMiddleware
errorsMu sync.Mutex errorsMu sync.Mutex
Errors []*Error Errors gqlerror.List
} }
func DefaultResolverMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) { func DefaultResolverMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) {
return next(ctx) return next(ctx)
} }
func DefaultDirectiveMiddleware(ctx context.Context, next Resolver) (res interface{}, err error) {
return next(ctx)
}
func DefaultRequestMiddleware(ctx context.Context, next func(ctx context.Context) []byte) []byte { func DefaultRequestMiddleware(ctx context.Context, next func(ctx context.Context) []byte) []byte {
return next(ctx) return next(ctx)
} }
func NewRequestContext(doc *query.Document, query string, variables map[string]interface{}) *RequestContext { func NewRequestContext(doc *ast.QueryDocument, query string, variables map[string]interface{}) *RequestContext {
return &RequestContext{ return &RequestContext{
Doc: doc, Doc: doc,
RawQuery: query, RawQuery: query,
Variables: variables, Variables: variables,
ResolverMiddleware: DefaultResolverMiddleware, ResolverMiddleware: DefaultResolverMiddleware,
DirectiveMiddleware: DefaultDirectiveMiddleware,
RequestMiddleware: DefaultRequestMiddleware, RequestMiddleware: DefaultRequestMiddleware,
Recover: DefaultRecover, Recover: DefaultRecover,
ErrorPresenter: DefaultErrorPresenter, ErrorPresenter: DefaultErrorPresenter,
@ -68,54 +75,52 @@ func WithRequestContext(ctx context.Context, rc *RequestContext) context.Context
} }
type ResolverContext struct { type ResolverContext struct {
Parent *ResolverContext
// The name of the type this field belongs to // The name of the type this field belongs to
Object string Object string
// These are the args after processing, they can be mutated in middleware to change what the resolver will get. // These are the args after processing, they can be mutated in middleware to change what the resolver will get.
Args map[string]interface{} Args map[string]interface{}
// The raw field // The raw field
Field CollectedField Field CollectedField
// The path of fields to get to this resolver // The index of array in path.
Path []interface{} Index *int
// The result object of resolver
Result interface{}
} }
func (r *ResolverContext) PushField(alias string) { func (r *ResolverContext) Path() []interface{} {
r.Path = append(r.Path, alias) var path []interface{}
} for it := r; it != nil; it = it.Parent {
if it.Index != nil {
path = append(path, *it.Index)
} else if it.Field.Field != nil {
path = append(path, it.Field.Alias)
}
}
func (r *ResolverContext) PushIndex(index int) { // because we are walking up the chain, all the elements are backwards, do an inplace flip.
r.Path = append(r.Path, index) for i := len(path)/2 - 1; i >= 0; i-- {
} opp := len(path) - 1 - i
path[i], path[opp] = path[opp], path[i]
}
func (r *ResolverContext) Pop() { return path
r.Path = r.Path[0 : len(r.Path)-1]
} }
func GetResolverContext(ctx context.Context) *ResolverContext { func GetResolverContext(ctx context.Context) *ResolverContext {
val := ctx.Value(resolver) val, _ := ctx.Value(resolver).(*ResolverContext)
if val == nil { return val
return nil
}
return val.(*ResolverContext)
} }
func WithResolverContext(ctx context.Context, rc *ResolverContext) context.Context { func WithResolverContext(ctx context.Context, rc *ResolverContext) context.Context {
parent := GetResolverContext(ctx) rc.Parent = GetResolverContext(ctx)
rc.Path = nil
if parent != nil {
rc.Path = append(rc.Path, parent.Path...)
}
if rc.Field.Alias != "" {
rc.PushField(rc.Field.Alias)
}
return context.WithValue(ctx, resolver, rc) return context.WithValue(ctx, resolver, rc)
} }
// This is just a convenient wrapper method for CollectFields // This is just a convenient wrapper method for CollectFields
func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField { func CollectFieldsCtx(ctx context.Context, satisfies []string) []CollectedField {
reqctx := GetRequestContext(ctx)
resctx := GetResolverContext(ctx) resctx := GetResolverContext(ctx)
return CollectFields(reqctx.Doc, resctx.Field.Selections, satisfies, reqctx.Variables) return CollectFields(ctx, resctx.Field.Selections, satisfies)
} }
// Errorf sends an error string to the client, passing it through the formatter. // Errorf sends an error string to the client, passing it through the formatter.
@ -134,6 +139,34 @@ func (c *RequestContext) Error(ctx context.Context, err error) {
c.Errors = append(c.Errors, c.ErrorPresenter(ctx, err)) c.Errors = append(c.Errors, c.ErrorPresenter(ctx, err))
} }
// HasError returns true if the current field has already errored
func (c *RequestContext) HasError(rctx *ResolverContext) bool {
c.errorsMu.Lock()
defer c.errorsMu.Unlock()
path := rctx.Path()
for _, err := range c.Errors {
if equalPath(err.Path, path) {
return true
}
}
return false
}
func equalPath(a []interface{}, b []interface{}) bool {
if len(a) != len(b) {
return false
}
for i := 0; i < len(a); i++ {
if a[i] != b[i] {
return false
}
}
return true
}
// AddError is a convenience method for adding an error to the current response // AddError is a convenience method for adding an error to the current response
func AddError(ctx context.Context, err error) { func AddError(ctx context.Context, err error) {
GetRequestContext(ctx).Error(ctx, err) GetRequestContext(ctx).Error(ctx, err)

31
vendor/github.com/99designs/gqlgen/graphql/error.go generated vendored Normal file
View File

@ -0,0 +1,31 @@
package graphql
import (
"context"
"github.com/vektah/gqlparser/gqlerror"
)
type ErrorPresenterFunc func(context.Context, error) *gqlerror.Error
type ExtendedError interface {
Extensions() map[string]interface{}
}
func DefaultErrorPresenter(ctx context.Context, err error) *gqlerror.Error {
if gqlerr, ok := err.(*gqlerror.Error); ok {
gqlerr.Path = GetResolverContext(ctx).Path()
return gqlerr
}
var extensions map[string]interface{}
if ee, ok := err.(ExtendedError); ok {
extensions = ee.Extensions()
}
return &gqlerror.Error{
Message: err.Error(),
Path: GetResolverContext(ctx).Path(),
Extensions: extensions,
}
}

135
vendor/github.com/99designs/gqlgen/graphql/exec.go generated vendored Normal file
View File

@ -0,0 +1,135 @@
package graphql
import (
"context"
"fmt"
"github.com/vektah/gqlparser/ast"
)
type ExecutableSchema interface {
Schema() *ast.Schema
Complexity(typeName, fieldName string, childComplexity int, args map[string]interface{}) (int, bool)
Query(ctx context.Context, op *ast.OperationDefinition) *Response
Mutation(ctx context.Context, op *ast.OperationDefinition) *Response
Subscription(ctx context.Context, op *ast.OperationDefinition) func() *Response
}
func CollectFields(ctx context.Context, selSet ast.SelectionSet, satisfies []string) []CollectedField {
return collectFields(GetRequestContext(ctx), selSet, satisfies, map[string]bool{})
}
func collectFields(reqCtx *RequestContext, selSet ast.SelectionSet, satisfies []string, visited map[string]bool) []CollectedField {
var groupedFields []CollectedField
for _, sel := range selSet {
switch sel := sel.(type) {
case *ast.Field:
if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
continue
}
f := getOrCreateField(&groupedFields, sel.Alias, func() CollectedField {
return CollectedField{Field: sel}
})
f.Selections = append(f.Selections, sel.SelectionSet...)
case *ast.InlineFragment:
if !shouldIncludeNode(sel.Directives, reqCtx.Variables) || !instanceOf(sel.TypeCondition, satisfies) {
continue
}
for _, childField := range collectFields(reqCtx, sel.SelectionSet, satisfies, visited) {
f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
f.Selections = append(f.Selections, childField.Selections...)
}
case *ast.FragmentSpread:
if !shouldIncludeNode(sel.Directives, reqCtx.Variables) {
continue
}
fragmentName := sel.Name
if _, seen := visited[fragmentName]; seen {
continue
}
visited[fragmentName] = true
fragment := reqCtx.Doc.Fragments.ForName(fragmentName)
if fragment == nil {
// should never happen, validator has already run
panic(fmt.Errorf("missing fragment %s", fragmentName))
}
if !instanceOf(fragment.TypeCondition, satisfies) {
continue
}
for _, childField := range collectFields(reqCtx, fragment.SelectionSet, satisfies, visited) {
f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
f.Selections = append(f.Selections, childField.Selections...)
}
default:
panic(fmt.Errorf("unsupported %T", sel))
}
}
return groupedFields
}
type CollectedField struct {
*ast.Field
Selections ast.SelectionSet
}
func instanceOf(val string, satisfies []string) bool {
for _, s := range satisfies {
if val == s {
return true
}
}
return false
}
func getOrCreateField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
for i, cf := range *c {
if cf.Alias == name {
return &(*c)[i]
}
}
f := creator()
*c = append(*c, f)
return &(*c)[len(*c)-1]
}
func shouldIncludeNode(directives ast.DirectiveList, variables map[string]interface{}) bool {
skip, include := false, true
if d := directives.ForName("skip"); d != nil {
skip = resolveIfArgument(d, variables)
}
if d := directives.ForName("include"); d != nil {
include = resolveIfArgument(d, variables)
}
return !skip && include
}
func resolveIfArgument(d *ast.Directive, variables map[string]interface{}) bool {
arg := d.Arguments.ForName("if")
if arg == nil {
panic(fmt.Sprintf("%s: argument 'if' not defined", d.Name))
}
value, err := arg.Value.Value(variables)
if err != nil {
panic(err)
}
ret, ok := value.(bool)
if !ok {
panic(fmt.Sprintf("%s: argument 'if' is not a boolean", d.Name))
}
return ret
}

View File

@ -1,6 +1,7 @@
package graphql package graphql
import ( import (
"encoding/json"
"fmt" "fmt"
"io" "io"
"strconv" "strconv"
@ -18,8 +19,12 @@ func UnmarshalFloat(v interface{}) (float64, error) {
return strconv.ParseFloat(v, 64) return strconv.ParseFloat(v, 64)
case int: case int:
return float64(v), nil return float64(v), nil
case int64:
return float64(v), nil
case float64: case float64:
return v, nil return v, nil
case json.Number:
return strconv.ParseFloat(string(v), 64)
default: default:
return 0, fmt.Errorf("%T is not an float", v) return 0, fmt.Errorf("%T is not an float", v)
} }

View File

@ -1,6 +1,7 @@
package graphql package graphql
import ( import (
"encoding/json"
"fmt" "fmt"
"io" "io"
"strconv" "strconv"
@ -15,6 +16,8 @@ func UnmarshalID(v interface{}) (string, error) {
switch v := v.(type) { switch v := v.(type) {
case string: case string:
return v, nil return v, nil
case json.Number:
return string(v), nil
case int: case int:
return strconv.Itoa(v), nil return strconv.Itoa(v), nil
case float64: case float64:

View File

@ -1,6 +1,7 @@
package graphql package graphql
import ( import (
"encoding/json"
"fmt" "fmt"
"io" "io"
"strconv" "strconv"
@ -18,8 +19,10 @@ func UnmarshalInt(v interface{}) (int, error) {
return strconv.Atoi(v) return strconv.Atoi(v)
case int: case int:
return v, nil return v, nil
case float64: case int64:
return int(v), nil return int(v), nil
case json.Number:
return strconv.Atoi(string(v))
default: default:
return 0, fmt.Errorf("%T is not an int", v) return 0, fmt.Errorf("%T is not an int", v)
} }

View File

@ -0,0 +1,58 @@
// introspection implements the spec defined in https://github.com/facebook/graphql/blob/master/spec/Section%204%20--%20Introspection.md#schema-introspection
package introspection
import "github.com/vektah/gqlparser/ast"
type (
Directive struct {
Name string
Description string
Locations []string
Args []InputValue
}
EnumValue struct {
Name string
Description string
IsDeprecated bool
DeprecationReason string
}
Field struct {
Name string
Description string
Type *Type
Args []InputValue
IsDeprecated bool
DeprecationReason string
}
InputValue struct {
Name string
Description string
DefaultValue *string
Type *Type
}
)
func WrapSchema(schema *ast.Schema) *Schema {
return &Schema{schema: schema}
}
func isDeprecated(directives ast.DirectiveList) bool {
return directives.ForName("deprecated") != nil
}
func deprecationReason(directives ast.DirectiveList) string {
deprecation := directives.ForName("deprecated")
if deprecation == nil {
return ""
}
reason := deprecation.Arguments.ForName("reason")
if reason == nil {
return ""
}
return reason.Value.Raw
}

View File

@ -0,0 +1,68 @@
package introspection
import (
"strings"
"github.com/vektah/gqlparser/ast"
)
type Schema struct {
schema *ast.Schema
}
func (s *Schema) Types() []Type {
var types []Type
for _, typ := range s.schema.Types {
if strings.HasPrefix(typ.Name, "__") {
continue
}
types = append(types, *WrapTypeFromDef(s.schema, typ))
}
return types
}
func (s *Schema) QueryType() *Type {
return WrapTypeFromDef(s.schema, s.schema.Query)
}
func (s *Schema) MutationType() *Type {
return WrapTypeFromDef(s.schema, s.schema.Mutation)
}
func (s *Schema) SubscriptionType() *Type {
return WrapTypeFromDef(s.schema, s.schema.Subscription)
}
func (s *Schema) Directives() []Directive {
var res []Directive
for _, d := range s.schema.Directives {
res = append(res, s.directiveFromDef(d))
}
return res
}
func (s *Schema) directiveFromDef(d *ast.DirectiveDefinition) Directive {
var locs []string
for _, loc := range d.Locations {
locs = append(locs, string(loc))
}
var args []InputValue
for _, arg := range d.Arguments {
args = append(args, InputValue{
Name: arg.Name,
Description: arg.Description,
DefaultValue: defaultValue(arg.DefaultValue),
Type: WrapTypeFromType(s.schema, arg.Type),
})
}
return Directive{
Name: d.Name,
Description: d.Description,
Locations: locs,
Args: args,
}
}

View File

@ -0,0 +1,174 @@
package introspection
import (
"strings"
"github.com/vektah/gqlparser/ast"
)
type Type struct {
schema *ast.Schema
def *ast.Definition
typ *ast.Type
}
func WrapTypeFromDef(s *ast.Schema, def *ast.Definition) *Type {
if def == nil {
return nil
}
return &Type{schema: s, def: def}
}
func WrapTypeFromType(s *ast.Schema, typ *ast.Type) *Type {
if typ == nil {
return nil
}
if !typ.NonNull && typ.NamedType != "" {
return &Type{schema: s, def: s.Types[typ.NamedType]}
}
return &Type{schema: s, typ: typ}
}
func (t *Type) Kind() string {
if t.typ != nil {
if t.typ.NonNull {
return "NON_NULL"
}
if t.typ.Elem != nil {
return "LIST"
}
} else {
return string(t.def.Kind)
}
panic("UNKNOWN")
}
func (t *Type) Name() *string {
if t.def == nil {
return nil
}
return &t.def.Name
}
func (t *Type) Description() string {
if t.def == nil {
return ""
}
return t.def.Description
}
func (t *Type) Fields(includeDeprecated bool) []Field {
if t.def == nil || (t.def.Kind != ast.Object && t.def.Kind != ast.Interface) {
return nil
}
var fields []Field
for _, f := range t.def.Fields {
if strings.HasPrefix(f.Name, "__") {
continue
}
var args []InputValue
for _, arg := range f.Arguments {
args = append(args, InputValue{
Type: WrapTypeFromType(t.schema, arg.Type),
Name: arg.Name,
Description: arg.Description,
DefaultValue: defaultValue(arg.DefaultValue),
})
}
fields = append(fields, Field{
Name: f.Name,
Description: f.Description,
Args: args,
Type: WrapTypeFromType(t.schema, f.Type),
IsDeprecated: isDeprecated(f.Directives),
DeprecationReason: deprecationReason(f.Directives),
})
}
return fields
}
func (t *Type) InputFields() []InputValue {
if t.def == nil || t.def.Kind != ast.InputObject {
return nil
}
var res []InputValue
for _, f := range t.def.Fields {
res = append(res, InputValue{
Name: f.Name,
Description: f.Description,
Type: WrapTypeFromType(t.schema, f.Type),
DefaultValue: defaultValue(f.DefaultValue),
})
}
return res
}
func defaultValue(value *ast.Value) *string {
if value == nil {
return nil
}
val := value.String()
return &val
}
func (t *Type) Interfaces() []Type {
if t.def == nil || t.def.Kind != ast.Object {
return nil
}
var res []Type
for _, intf := range t.def.Interfaces {
res = append(res, *WrapTypeFromDef(t.schema, t.schema.Types[intf]))
}
return res
}
func (t *Type) PossibleTypes() []Type {
if t.def == nil || (t.def.Kind != ast.Interface && t.def.Kind != ast.Union) {
return nil
}
var res []Type
for _, pt := range t.schema.GetPossibleTypes(t.def) {
res = append(res, *WrapTypeFromDef(t.schema, pt))
}
return res
}
func (t *Type) EnumValues(includeDeprecated bool) []EnumValue {
if t.def == nil || t.def.Kind != ast.Enum {
return nil
}
var res []EnumValue
for _, val := range t.def.EnumValues {
res = append(res, EnumValue{
Name: val.Name,
Description: val.Description,
IsDeprecated: isDeprecated(val.Directives),
DeprecationReason: deprecationReason(val.Directives),
})
}
return res
}
func (t *Type) OfType() *Type {
if t.typ == nil {
return nil
}
if t.typ.NonNull {
// fake non null nodes
cpy := *t.typ
cpy.NonNull = false
return WrapTypeFromType(t.schema, &cpy)
}
return WrapTypeFromType(t.schema, t.typ.Elem)
}

View File

@ -15,9 +15,9 @@ var closeBracket = []byte(`]`)
var colon = []byte(`:`) var colon = []byte(`:`)
var comma = []byte(`,`) var comma = []byte(`,`)
var Null = lit(nullLit) var Null = &lit{nullLit}
var True = lit(trueLit) var True = &lit{trueLit}
var False = lit(falseLit) var False = &lit{falseLit}
type Marshaler interface { type Marshaler interface {
MarshalGQL(w io.Writer) MarshalGQL(w io.Writer)
@ -76,8 +76,8 @@ func (a Array) MarshalGQL(writer io.Writer) {
writer.Write(closeBracket) writer.Write(closeBracket)
} }
func lit(b []byte) Marshaler { type lit struct{ b []byte }
return WriterFunc(func(w io.Writer) {
w.Write(b) func (l lit) MarshalGQL(w io.Writer) {
}) w.Write(l.b)
} }

View File

@ -4,15 +4,17 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/vektah/gqlparser/gqlerror"
) )
type Response struct { type Response struct {
Data json.RawMessage `json:"data"` Data json.RawMessage `json:"data"`
Errors []*Error `json:"errors,omitempty"` Errors gqlerror.List `json:"errors,omitempty"`
} }
func ErrorResponse(ctx context.Context, messagef string, args ...interface{}) *Response { func ErrorResponse(ctx context.Context, messagef string, args ...interface{}) *Response {
return &Response{ return &Response{
Errors: []*Error{{Message: fmt.Sprintf(messagef, args...)}}, Errors: gqlerror.List{{Message: fmt.Sprintf(messagef, args...)}},
} }
} }

View File

@ -0,0 +1,3 @@
package graphql
const Version = "v0.5.1"

View File

@ -4,14 +4,18 @@ import (
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"net/http" "net/http"
"strings" "strings"
"github.com/99designs/gqlgen/complexity"
"github.com/99designs/gqlgen/graphql"
"github.com/gorilla/websocket" "github.com/gorilla/websocket"
"github.com/vektah/gqlgen/graphql" "github.com/hashicorp/golang-lru"
"github.com/vektah/gqlgen/neelance/errors" "github.com/vektah/gqlparser"
"github.com/vektah/gqlgen/neelance/query" "github.com/vektah/gqlparser/ast"
"github.com/vektah/gqlgen/neelance/validation" "github.com/vektah/gqlparser/gqlerror"
"github.com/vektah/gqlparser/validator"
) )
type params struct { type params struct {
@ -21,14 +25,16 @@ type params struct {
} }
type Config struct { type Config struct {
cacheSize int
upgrader websocket.Upgrader upgrader websocket.Upgrader
recover graphql.RecoverFunc recover graphql.RecoverFunc
errorPresenter graphql.ErrorPresenterFunc errorPresenter graphql.ErrorPresenterFunc
resolverHook graphql.ResolverMiddleware resolverHook graphql.FieldMiddleware
requestHook graphql.RequestMiddleware requestHook graphql.RequestMiddleware
complexityLimit int
} }
func (c *Config) newRequestContext(doc *query.Document, query string, variables map[string]interface{}) *graphql.RequestContext { func (c *Config) newRequestContext(doc *ast.QueryDocument, query string, variables map[string]interface{}) *graphql.RequestContext {
reqCtx := graphql.NewRequestContext(doc, query, variables) reqCtx := graphql.NewRequestContext(doc, query, variables)
if hook := c.recover; hook != nil { if hook := c.recover; hook != nil {
reqCtx.Recover = hook reqCtx.Recover = hook
@ -72,11 +78,17 @@ func ErrorPresenter(f graphql.ErrorPresenterFunc) Option {
} }
} }
// ComplexityLimit sets a maximum query complexity that is allowed to be executed.
// If a query is submitted that exceeds the limit, a 422 status code will be returned.
func ComplexityLimit(limit int) Option {
return func(cfg *Config) {
cfg.complexityLimit = limit
}
}
// ResolverMiddleware allows you to define a function that will be called around every resolver, // ResolverMiddleware allows you to define a function that will be called around every resolver,
// useful for tracing and logging. // useful for tracing and logging.
// It will only be called for user defined resolvers, any direct binding to models is assumed func ResolverMiddleware(middleware graphql.FieldMiddleware) Option {
// to cost nothing.
func ResolverMiddleware(middleware graphql.ResolverMiddleware) Option {
return func(cfg *Config) { return func(cfg *Config) {
if cfg.resolverHook == nil { if cfg.resolverHook == nil {
cfg.resolverHook = middleware cfg.resolverHook = middleware
@ -110,8 +122,19 @@ func RequestMiddleware(middleware graphql.RequestMiddleware) Option {
} }
} }
// CacheSize sets the maximum size of the query cache.
// If size is less than or equal to 0, the cache is disabled.
func CacheSize(size int) Option {
return func(cfg *Config) {
cfg.cacheSize = size
}
}
const DefaultCacheSize = 1000
func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc { func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc {
cfg := Config{ cfg := Config{
cacheSize: DefaultCacheSize,
upgrader: websocket.Upgrader{ upgrader: websocket.Upgrader{
ReadBufferSize: 1024, ReadBufferSize: 1024,
WriteBufferSize: 1024, WriteBufferSize: 1024,
@ -122,6 +145,17 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
option(&cfg) option(&cfg)
} }
var cache *lru.Cache
if cfg.cacheSize > 0 {
var err error
cache, err = lru.New(DefaultCacheSize)
if err != nil {
// An error is only returned for non-positive cache size
// and we already checked for that.
panic("unexpected error creating cache: " + err.Error())
}
}
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodOptions { if r.Method == http.MethodOptions {
w.Header().Set("Allow", "OPTIONS, GET, POST") w.Header().Set("Allow", "OPTIONS, GET, POST")
@ -141,13 +175,13 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
reqParams.OperationName = r.URL.Query().Get("operationName") reqParams.OperationName = r.URL.Query().Get("operationName")
if variables := r.URL.Query().Get("variables"); variables != "" { if variables := r.URL.Query().Get("variables"); variables != "" {
if err := json.Unmarshal([]byte(variables), &reqParams.Variables); err != nil { if err := jsonDecode(strings.NewReader(variables), &reqParams.Variables); err != nil {
sendErrorf(w, http.StatusBadRequest, "variables could not be decoded") sendErrorf(w, http.StatusBadRequest, "variables could not be decoded")
return return
} }
} }
case http.MethodPost: case http.MethodPost:
if err := json.NewDecoder(r.Body).Decode(&reqParams); err != nil { if err := jsonDecode(r.Body, &reqParams); err != nil {
sendErrorf(w, http.StatusBadRequest, "json body could not be decoded: "+err.Error()) sendErrorf(w, http.StatusBadRequest, "json body could not be decoded: "+err.Error())
return return
} }
@ -157,25 +191,42 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
} }
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
doc, qErr := query.Parse(reqParams.Query) var doc *ast.QueryDocument
if qErr != nil { if cache != nil {
sendError(w, http.StatusUnprocessableEntity, qErr) val, ok := cache.Get(reqParams.Query)
if ok {
doc = val.(*ast.QueryDocument)
}
}
if doc == nil {
var qErr gqlerror.List
doc, qErr = gqlparser.LoadQuery(exec.Schema(), reqParams.Query)
if len(qErr) > 0 {
sendError(w, http.StatusUnprocessableEntity, qErr...)
return
}
if cache != nil {
cache.Add(reqParams.Query, doc)
}
}
op := doc.Operations.ForName(reqParams.OperationName)
if op == nil {
sendErrorf(w, http.StatusUnprocessableEntity, "operation %s not found", reqParams.OperationName)
return return
} }
errs := validation.Validate(exec.Schema(), doc) if op.Operation != ast.Query && r.Method == http.MethodGet {
if len(errs) != 0 { sendErrorf(w, http.StatusUnprocessableEntity, "GET requests only allow query operations")
sendError(w, http.StatusUnprocessableEntity, errs...)
return return
} }
op, err := doc.GetOperation(reqParams.OperationName) vars, err := validator.VariableValues(exec.Schema(), op, reqParams.Variables)
if err != nil { if err != nil {
sendErrorf(w, http.StatusUnprocessableEntity, err.Error()) sendError(w, http.StatusUnprocessableEntity, err)
return return
} }
reqCtx := cfg.newRequestContext(doc, reqParams.Query, vars)
reqCtx := cfg.newRequestContext(doc, reqParams.Query, reqParams.Variables)
ctx := graphql.WithRequestContext(r.Context(), reqCtx) ctx := graphql.WithRequestContext(r.Context(), reqCtx)
defer func() { defer func() {
@ -185,14 +236,22 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
} }
}() }()
switch op.Type { if cfg.complexityLimit > 0 {
case query.Query: queryComplexity := complexity.Calculate(exec, op, vars)
if queryComplexity > cfg.complexityLimit {
sendErrorf(w, http.StatusUnprocessableEntity, "query has complexity %d, which exceeds the limit of %d", queryComplexity, cfg.complexityLimit)
return
}
}
switch op.Operation {
case ast.Query:
b, err := json.Marshal(exec.Query(ctx, op)) b, err := json.Marshal(exec.Query(ctx, op))
if err != nil { if err != nil {
panic(err) panic(err)
} }
w.Write(b) w.Write(b)
case query.Mutation: case ast.Mutation:
b, err := json.Marshal(exec.Mutation(ctx, op)) b, err := json.Marshal(exec.Mutation(ctx, op))
if err != nil { if err != nil {
panic(err) panic(err)
@ -204,26 +263,15 @@ func GraphQL(exec graphql.ExecutableSchema, options ...Option) http.HandlerFunc
}) })
} }
func sendError(w http.ResponseWriter, code int, errors ...*errors.QueryError) { func jsonDecode(r io.Reader, val interface{}) error {
w.WriteHeader(code) dec := json.NewDecoder(r)
var errs []*graphql.Error dec.UseNumber()
for _, err := range errors { return dec.Decode(val)
var locations []graphql.ErrorLocation }
for _, l := range err.Locations {
fmt.Println(graphql.ErrorLocation(l))
locations = append(locations, graphql.ErrorLocation{
Line: l.Line,
Column: l.Column,
})
}
errs = append(errs, &graphql.Error{ func sendError(w http.ResponseWriter, code int, errors ...*gqlerror.Error) {
Message: err.Message, w.WriteHeader(code)
Path: err.Path, b, err := json.Marshal(&graphql.Response{Errors: errors})
Locations: locations,
})
}
b, err := json.Marshal(&graphql.Response{Errors: errs})
if err != nil { if err != nil {
panic(err) panic(err)
} }
@ -231,5 +279,5 @@ func sendError(w http.ResponseWriter, code int, errors ...*errors.QueryError) {
} }
func sendErrorf(w http.ResponseWriter, code int, format string, args ...interface{}) { func sendErrorf(w http.ResponseWriter, code int, format string, args ...interface{}) {
sendError(w, code, &errors.QueryError{Message: fmt.Sprintf(format, args...)}) sendError(w, code, &gqlerror.Error{Message: fmt.Sprintf(format, args...)})
} }

View File

@ -30,6 +30,9 @@ var page = template.Must(template.New("graphiql").Parse(`<!DOCTYPE html>
GraphQLPlayground.init(root, { GraphQLPlayground.init(root, {
endpoint: location.protocol + '//' + location.host + '{{.endpoint}}', endpoint: location.protocol + '//' + location.host + '{{.endpoint}}',
subscriptionsEndpoint: wsProto + '//' + location.host + '{{.endpoint }}', subscriptionsEndpoint: wsProto + '//' + location.host + '{{.endpoint }}',
settings: {
'request.credentials': 'same-origin'
}
}) })
}) })
</script> </script>
@ -42,7 +45,7 @@ func Playground(title string, endpoint string) http.HandlerFunc {
err := page.Execute(w, map[string]string{ err := page.Execute(w, map[string]string{
"title": title, "title": title,
"endpoint": endpoint, "endpoint": endpoint,
"version": "1.4.3", "version": "1.6.2",
}) })
if err != nil { if err != nil {
panic(err) panic(err)

51
vendor/github.com/99designs/gqlgen/handler/stub.go generated vendored Normal file
View File

@ -0,0 +1,51 @@
package handler
import (
"context"
"github.com/99designs/gqlgen/graphql"
"github.com/vektah/gqlparser"
"github.com/vektah/gqlparser/ast"
)
type executableSchemaStub struct {
NextResp chan struct{}
}
var _ graphql.ExecutableSchema = &executableSchemaStub{}
func (e *executableSchemaStub) Schema() *ast.Schema {
return gqlparser.MustLoadSchema(&ast.Source{Input: `
schema { query: Query }
type Query {
me: User!
user(id: Int): User!
}
type User { name: String! }
`})
}
func (e *executableSchemaStub) Complexity(typeName, field string, childComplexity int, args map[string]interface{}) (int, bool) {
return 0, false
}
func (e *executableSchemaStub) Query(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
return &graphql.Response{Data: []byte(`{"name":"test"}`)}
}
func (e *executableSchemaStub) Mutation(ctx context.Context, op *ast.OperationDefinition) *graphql.Response {
return graphql.ErrorResponse(ctx, "mutations are not supported")
}
func (e *executableSchemaStub) Subscription(ctx context.Context, op *ast.OperationDefinition) func() *graphql.Response {
return func() *graphql.Response {
select {
case <-ctx.Done():
return nil
case <-e.NextResp:
return &graphql.Response{
Data: []byte(`{"name":"test"}`),
}
}
}
}

View File

@ -1,6 +1,7 @@
package handler package handler
import ( import (
"bytes"
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
@ -8,11 +9,12 @@ import (
"net/http" "net/http"
"sync" "sync"
"github.com/99designs/gqlgen/graphql"
"github.com/gorilla/websocket" "github.com/gorilla/websocket"
"github.com/vektah/gqlgen/graphql" "github.com/vektah/gqlparser"
"github.com/vektah/gqlgen/neelance/errors" "github.com/vektah/gqlparser/ast"
"github.com/vektah/gqlgen/neelance/query" "github.com/vektah/gqlparser/gqlerror"
"github.com/vektah/gqlgen/neelance/validation" "github.com/vektah/gqlparser/validator"
) )
const ( const (
@ -113,7 +115,7 @@ func (c *wsConnection) run() {
closer := c.active[message.ID] closer := c.active[message.ID]
c.mu.Unlock() c.mu.Unlock()
if closer == nil { if closer == nil {
c.sendError(message.ID, errors.Errorf("%s is not running, cannot stop", message.ID)) c.sendError(message.ID, gqlerror.Errorf("%s is not running, cannot stop", message.ID))
continue continue
} }
@ -131,35 +133,34 @@ func (c *wsConnection) run() {
func (c *wsConnection) subscribe(message *operationMessage) bool { func (c *wsConnection) subscribe(message *operationMessage) bool {
var reqParams params var reqParams params
if err := json.Unmarshal(message.Payload, &reqParams); err != nil { if err := jsonDecode(bytes.NewReader(message.Payload), &reqParams); err != nil {
c.sendConnectionError("invalid json") c.sendConnectionError("invalid json")
return false return false
} }
doc, qErr := query.Parse(reqParams.Query) doc, qErr := gqlparser.LoadQuery(c.exec.Schema(), reqParams.Query)
if qErr != nil { if qErr != nil {
c.sendError(message.ID, qErr) c.sendError(message.ID, qErr...)
return true return true
} }
errs := validation.Validate(c.exec.Schema(), doc) op := doc.Operations.ForName(reqParams.OperationName)
if len(errs) != 0 { if op == nil {
c.sendError(message.ID, errs...) c.sendError(message.ID, gqlerror.Errorf("operation %s not found", reqParams.OperationName))
return true return true
} }
op, err := doc.GetOperation(reqParams.OperationName) vars, err := validator.VariableValues(c.exec.Schema(), op, reqParams.Variables)
if err != nil { if err != nil {
c.sendError(message.ID, errors.Errorf("%s", err.Error())) c.sendError(message.ID, err)
return true return true
} }
reqCtx := c.cfg.newRequestContext(doc, reqParams.Query, vars)
reqCtx := c.cfg.newRequestContext(doc, reqParams.Query, reqParams.Variables)
ctx := graphql.WithRequestContext(c.ctx, reqCtx) ctx := graphql.WithRequestContext(c.ctx, reqCtx)
if op.Type != query.Subscription { if op.Operation != ast.Subscription {
var result *graphql.Response var result *graphql.Response
if op.Type == query.Query { if op.Operation == ast.Query {
result = c.exec.Query(ctx, op) result = c.exec.Query(ctx, op)
} else { } else {
result = c.exec.Mutation(ctx, op) result = c.exec.Mutation(ctx, op)
@ -178,7 +179,7 @@ func (c *wsConnection) subscribe(message *operationMessage) bool {
defer func() { defer func() {
if r := recover(); r != nil { if r := recover(); r != nil {
userErr := reqCtx.Recover(ctx, r) userErr := reqCtx.Recover(ctx, r)
c.sendError(message.ID, &errors.QueryError{Message: userErr.Error()}) c.sendError(message.ID, &gqlerror.Error{Message: userErr.Error()})
} }
}() }()
next := c.exec.Subscription(ctx, op) next := c.exec.Subscription(ctx, op)
@ -200,14 +201,14 @@ func (c *wsConnection) subscribe(message *operationMessage) bool {
func (c *wsConnection) sendData(id string, response *graphql.Response) { func (c *wsConnection) sendData(id string, response *graphql.Response) {
b, err := json.Marshal(response) b, err := json.Marshal(response)
if err != nil { if err != nil {
c.sendError(id, errors.Errorf("unable to encode json response: %s", err.Error())) c.sendError(id, gqlerror.Errorf("unable to encode json response: %s", err.Error()))
return return
} }
c.write(&operationMessage{Type: dataMsg, ID: id, Payload: b}) c.write(&operationMessage{Type: dataMsg, ID: id, Payload: b})
} }
func (c *wsConnection) sendError(id string, errors ...*errors.QueryError) { func (c *wsConnection) sendError(id string, errors ...*gqlerror.Error) {
var errs []error var errs []error
for _, err := range errors { for _, err := range errors {
errs = append(errs, err) errs = append(errs, err)
@ -220,7 +221,7 @@ func (c *wsConnection) sendError(id string, errors ...*errors.QueryError) {
} }
func (c *wsConnection) sendConnectionError(format string, args ...interface{}) { func (c *wsConnection) sendConnectionError(format string, args ...interface{}) {
b, err := json.Marshal(&graphql.Error{Message: fmt.Sprintf(format, args...)}) b, err := json.Marshal(&gqlerror.Error{Message: fmt.Sprintf(format, args...)})
if err != nil { if err != nil {
panic(err) panic(err)
} }
@ -229,11 +230,17 @@ func (c *wsConnection) sendConnectionError(format string, args ...interface{}) {
} }
func (c *wsConnection) readOp() *operationMessage { func (c *wsConnection) readOp() *operationMessage {
message := operationMessage{} _, r, err := c.conn.NextReader()
if err := c.conn.ReadJSON(&message); err != nil { if err != nil {
c.sendConnectionError("invalid json") c.sendConnectionError("invalid json")
return nil return nil
} }
message := operationMessage{}
if err := jsonDecode(r, &message); err != nil {
c.sendConnectionError("invalid json")
return nil
}
return &message return &message
} }

View File

@ -0,0 +1,37 @@
package gopath
import (
"fmt"
"go/build"
"path/filepath"
"strings"
)
var NotFound = fmt.Errorf("not on GOPATH")
// Contains returns true if the given directory is in the GOPATH
func Contains(dir string) bool {
_, err := Dir2Import(dir)
return err == nil
}
// Dir2Import takes an *absolute* path and returns a golang import path for the package, and returns an error if it isn't on the gopath
func Dir2Import(dir string) (string, error) {
dir = filepath.ToSlash(dir)
for _, gopath := range filepath.SplitList(build.Default.GOPATH) {
gopath = filepath.ToSlash(filepath.Join(gopath, "src"))
if len(gopath) < len(dir) && strings.EqualFold(gopath, dir[0:len(gopath)]) {
return dir[len(gopath)+1:], nil
}
}
return "", NotFound
}
// MustDir2Import takes an *absolute* path and returns a golang import path for the package, and panics if it isn't on the gopath
func MustDir2Import(dir string) string {
pkg, err := Dir2Import(dir)
if err != nil {
panic(err)
}
return pkg
}

5
vendor/github.com/agnivade/levenshtein/.gitignore generated vendored Normal file
View File

@ -0,0 +1,5 @@
coverage.txt
fuzz/fuzz-fuzz.zip
fuzz/corpus/corpus/*
fuzz/corpus/suppressions/*
fuzz/corpus/crashes/*

7
vendor/github.com/agnivade/levenshtein/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,7 @@
language: go
go:
- 1.8.x
- 1.9.x
- 1.10.x
- tip

21
vendor/github.com/agnivade/levenshtein/License.txt generated vendored Normal file
View File

@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2015 Agniva De Sarker
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

13
vendor/github.com/agnivade/levenshtein/Makefile generated vendored Normal file
View File

@ -0,0 +1,13 @@
all: test install
install:
go install
lint:
gofmt -l -s -w . && go tool vet -all . && golint
test:
go test -race -v -coverprofile=coverage.txt -covermode=atomic
bench:
go test -run=XXX -bench=. -benchmem

57
vendor/github.com/agnivade/levenshtein/README.md generated vendored Normal file
View File

@ -0,0 +1,57 @@
levenshtein [![Build Status](https://travis-ci.org/agnivade/levenshtein.svg?branch=master)](https://travis-ci.org/agnivade/levenshtein) [![Go Report Card](https://goreportcard.com/badge/github.com/agnivade/levenshtein)](https://goreportcard.com/report/github.com/agnivade/levenshtein) [![GoDoc](https://godoc.org/github.com/agnivade/levenshtein?status.svg)](https://godoc.org/github.com/agnivade/levenshtein)
===========
[Go](http://golang.org) package to calculate the [Levenshtein Distance](http://en.wikipedia.org/wiki/Levenshtein_distance)
The library is fully capable of working with non-ascii strings. But the strings are not normalized. That is left as a user-dependant use case. Please normalize the strings before passing it to the library if you have such a requirement.
- https://blog.golang.org/normalization
Install
-------
go get github.com/agnivade/levenshtein
Example
-------
```go
package main
import (
"fmt"
"github.com/agnivade/levenshtein"
)
func main() {
s1 := "kitten"
s2 := "sitting"
distance := levenshtein.ComputeDistance(s1, s2)
fmt.Printf("The distance between %s and %s is %d.\n", s1, s2, distance)
// Output:
// The distance between kitten and sitting is 3.
}
```
Benchmarks
----------
```
name time/op
Simple/ASCII-4 537ns ± 2%
Simple/French-4 956ns ± 0%
Simple/Nordic-4 1.95µs ± 1%
Simple/Tibetan-4 1.53µs ± 2%
name alloc/op
Simple/ASCII-4 96.0B ± 0%
Simple/French-4 128B ± 0%
Simple/Nordic-4 192B ± 0%
Simple/Tibetan-4 144B ± 0%
name allocs/op
Simple/ASCII-4 1.00 ± 0%
Simple/French-4 1.00 ± 0%
Simple/Nordic-4 1.00 ± 0%
Simple/Tibetan-4 1.00 ± 0%
```

1
vendor/github.com/agnivade/levenshtein/go.mod generated vendored Normal file
View File

@ -0,0 +1 @@
module github.com/agnivade/levenshtein

71
vendor/github.com/agnivade/levenshtein/levenshtein.go generated vendored Normal file
View File

@ -0,0 +1,71 @@
// Package levenshtein is a Go implementation to calculate Levenshtein Distance.
//
// Implementation taken from
// https://gist.github.com/andrei-m/982927#gistcomment-1931258
package levenshtein
// ComputeDistance computes the levenshtein distance between the two
// strings passed as an argument. The return value is the levenshtein distance
//
// Works on runes (Unicode code points) but does not normalize
// the input strings. See https://blog.golang.org/normalization
// and the golang.org/x/text/unicode/norm pacage.
func ComputeDistance(a, b string) int {
if a == b {
return 0
}
// We need to convert to []rune if the strings are non-ascii.
// This could be avoided by using utf8.RuneCountInString
// and then doing some juggling with rune indices.
// The primary challenge is keeping track of the previous rune.
// With a range loop, its not that easy. And with a for-loop
// we need to keep track of the inter-rune width using utf8.DecodeRuneInString
s1 := []rune(a)
s2 := []rune(b)
// swap to save some memory O(min(a,b)) instead of O(a)
if len(s1) > len(s2) {
s1, s2 = s2, s1
}
lenS1 := len(s1)
lenS2 := len(s2)
// init the row
x := make([]int, lenS1+1)
for i := 0; i <= lenS1; i++ {
x[i] = i
}
// fill in the rest
for i := 1; i <= lenS2; i++ {
prev := i
var current int
for j := 1; j <= lenS1; j++ {
if s2[i-1] == s1[j-1] {
current = x[j-1] // match
} else {
current = min(x[j-1]+1, prev+1, x[j]+1)
}
x[j-1] = prev
prev = current
}
x[lenS1] = prev
}
return x[lenS1]
}
func min(a, b, c int) int {
if a < b {
if a < c {
return a
}
} else {
if b < c {
return b
}
}
return c
}

23
vendor/github.com/hashicorp/golang-lru/.gitignore generated vendored Normal file
View File

@ -0,0 +1,23 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test

223
vendor/github.com/hashicorp/golang-lru/2q.go generated vendored Normal file
View File

@ -0,0 +1,223 @@
package lru
import (
"fmt"
"sync"
"github.com/hashicorp/golang-lru/simplelru"
)
const (
// Default2QRecentRatio is the ratio of the 2Q cache dedicated
// to recently added entries that have only been accessed once.
Default2QRecentRatio = 0.25
// Default2QGhostEntries is the default ratio of ghost
// entries kept to track entries recently evicted
Default2QGhostEntries = 0.50
)
// TwoQueueCache is a thread-safe fixed size 2Q cache.
// 2Q is an enhancement over the standard LRU cache
// in that it tracks both frequently and recently used
// entries separately. This avoids a burst in access to new
// entries from evicting frequently used entries. It adds some
// additional tracking overhead to the standard LRU cache, and is
// computationally about 2x the cost, and adds some metadata over
// head. The ARCCache is similar, but does not require setting any
// parameters.
type TwoQueueCache struct {
size int
recentSize int
recent simplelru.LRUCache
frequent simplelru.LRUCache
recentEvict simplelru.LRUCache
lock sync.RWMutex
}
// New2Q creates a new TwoQueueCache using the default
// values for the parameters.
func New2Q(size int) (*TwoQueueCache, error) {
return New2QParams(size, Default2QRecentRatio, Default2QGhostEntries)
}
// New2QParams creates a new TwoQueueCache using the provided
// parameter values.
func New2QParams(size int, recentRatio float64, ghostRatio float64) (*TwoQueueCache, error) {
if size <= 0 {
return nil, fmt.Errorf("invalid size")
}
if recentRatio < 0.0 || recentRatio > 1.0 {
return nil, fmt.Errorf("invalid recent ratio")
}
if ghostRatio < 0.0 || ghostRatio > 1.0 {
return nil, fmt.Errorf("invalid ghost ratio")
}
// Determine the sub-sizes
recentSize := int(float64(size) * recentRatio)
evictSize := int(float64(size) * ghostRatio)
// Allocate the LRUs
recent, err := simplelru.NewLRU(size, nil)
if err != nil {
return nil, err
}
frequent, err := simplelru.NewLRU(size, nil)
if err != nil {
return nil, err
}
recentEvict, err := simplelru.NewLRU(evictSize, nil)
if err != nil {
return nil, err
}
// Initialize the cache
c := &TwoQueueCache{
size: size,
recentSize: recentSize,
recent: recent,
frequent: frequent,
recentEvict: recentEvict,
}
return c, nil
}
// Get looks up a key's value from the cache.
func (c *TwoQueueCache) Get(key interface{}) (value interface{}, ok bool) {
c.lock.Lock()
defer c.lock.Unlock()
// Check if this is a frequent value
if val, ok := c.frequent.Get(key); ok {
return val, ok
}
// If the value is contained in recent, then we
// promote it to frequent
if val, ok := c.recent.Peek(key); ok {
c.recent.Remove(key)
c.frequent.Add(key, val)
return val, ok
}
// No hit
return nil, false
}
// Add adds a value to the cache.
func (c *TwoQueueCache) Add(key, value interface{}) {
c.lock.Lock()
defer c.lock.Unlock()
// Check if the value is frequently used already,
// and just update the value
if c.frequent.Contains(key) {
c.frequent.Add(key, value)
return
}
// Check if the value is recently used, and promote
// the value into the frequent list
if c.recent.Contains(key) {
c.recent.Remove(key)
c.frequent.Add(key, value)
return
}
// If the value was recently evicted, add it to the
// frequently used list
if c.recentEvict.Contains(key) {
c.ensureSpace(true)
c.recentEvict.Remove(key)
c.frequent.Add(key, value)
return
}
// Add to the recently seen list
c.ensureSpace(false)
c.recent.Add(key, value)
return
}
// ensureSpace is used to ensure we have space in the cache
func (c *TwoQueueCache) ensureSpace(recentEvict bool) {
// If we have space, nothing to do
recentLen := c.recent.Len()
freqLen := c.frequent.Len()
if recentLen+freqLen < c.size {
return
}
// If the recent buffer is larger than
// the target, evict from there
if recentLen > 0 && (recentLen > c.recentSize || (recentLen == c.recentSize && !recentEvict)) {
k, _, _ := c.recent.RemoveOldest()
c.recentEvict.Add(k, nil)
return
}
// Remove from the frequent list otherwise
c.frequent.RemoveOldest()
}
// Len returns the number of items in the cache.
func (c *TwoQueueCache) Len() int {
c.lock.RLock()
defer c.lock.RUnlock()
return c.recent.Len() + c.frequent.Len()
}
// Keys returns a slice of the keys in the cache.
// The frequently used keys are first in the returned slice.
func (c *TwoQueueCache) Keys() []interface{} {
c.lock.RLock()
defer c.lock.RUnlock()
k1 := c.frequent.Keys()
k2 := c.recent.Keys()
return append(k1, k2...)
}
// Remove removes the provided key from the cache.
func (c *TwoQueueCache) Remove(key interface{}) {
c.lock.Lock()
defer c.lock.Unlock()
if c.frequent.Remove(key) {
return
}
if c.recent.Remove(key) {
return
}
if c.recentEvict.Remove(key) {
return
}
}
// Purge is used to completely clear the cache.
func (c *TwoQueueCache) Purge() {
c.lock.Lock()
defer c.lock.Unlock()
c.recent.Purge()
c.frequent.Purge()
c.recentEvict.Purge()
}
// Contains is used to check if the cache contains a key
// without updating recency or frequency.
func (c *TwoQueueCache) Contains(key interface{}) bool {
c.lock.RLock()
defer c.lock.RUnlock()
return c.frequent.Contains(key) || c.recent.Contains(key)
}
// Peek is used to inspect the cache value of a key
// without updating recency or frequency.
func (c *TwoQueueCache) Peek(key interface{}) (value interface{}, ok bool) {
c.lock.RLock()
defer c.lock.RUnlock()
if val, ok := c.frequent.Peek(key); ok {
return val, ok
}
return c.recent.Peek(key)
}

362
vendor/github.com/hashicorp/golang-lru/LICENSE generated vendored Normal file
View File

@ -0,0 +1,362 @@
Mozilla Public License, version 2.0
1. Definitions
1.1. "Contributor"
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. "Incompatible With Secondary Licenses"
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the terms of
a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in a
separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible, whether
at the time of the initial grant or subsequently, any and all of the
rights conveyed by this License.
1.10. "Modifications"
means any of the following:
a. any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the License,
by the making, using, selling, offering for sale, having made, import,
or transfer of either its Contributions or its Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, "control" means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights to
grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter the
recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty, or
limitations of liability) contained within the Source Code Form of the
Covered Software, except that You may alter any license notices to the
extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute,
judicial order, or regulation then You must: (a) comply with the terms of
this License to the maximum extent possible; and (b) describe the
limitations and the code they affect. Such description must be placed in a
text file included with all distributions of the Covered Software under
this License. Except to the extent prohibited by statute or regulation,
such description must be sufficiently detailed for a recipient of ordinary
skill to be able to understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing
basis, if such Contributor fails to notify You of the non-compliance by
some reasonable means prior to 60 days after You have come back into
compliance. Moreover, Your grants from a particular Contributor are
reinstated on an ongoing basis if such Contributor notifies You of the
non-compliance by some reasonable means, this is the first time You have
received notice of non-compliance with this License from such
Contributor, and You become compliant prior to 30 days after Your receipt
of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an "as is" basis,
without warranty of any kind, either expressed, implied, or statutory,
including, without limitation, warranties that the Covered Software is free
of defects, merchantable, fit for a particular purpose or non-infringing.
The entire risk as to the quality and performance of the Covered Software
is with You. Should any Covered Software prove defective in any respect,
You (not any Contributor) assume the cost of any necessary servicing,
repair, or correction. This disclaimer of warranty constitutes an essential
part of this License. No use of any Covered Software is authorized under
this License except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from
such party's negligence to the extent applicable law prohibits such
limitation. Some jurisdictions do not allow the exclusion or limitation of
incidental or consequential damages, so this exclusion and limitation may
not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts
of a jurisdiction where the defendant maintains its principal place of
business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions. Nothing
in this Section shall prevent a party's ability to bring cross-claims or
counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides that
the language of a contract shall be construed against the drafter shall not
be used to construe this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses If You choose to distribute Source Code Form that is
Incompatible With Secondary Licenses under the terms of this version of
the License, the notice described in Exhibit B of this License must be
attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file,
then You may include the notice in a location (such as a LICENSE file in a
relevant directory) where a recipient would be likely to look for such a
notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
This Source Code Form is "Incompatible
With Secondary Licenses", as defined by
the Mozilla Public License, v. 2.0.

25
vendor/github.com/hashicorp/golang-lru/README.md generated vendored Normal file
View File

@ -0,0 +1,25 @@
golang-lru
==========
This provides the `lru` package which implements a fixed-size
thread safe LRU cache. It is based on the cache in Groupcache.
Documentation
=============
Full docs are available on [Godoc](http://godoc.org/github.com/hashicorp/golang-lru)
Example
=======
Using the LRU is very simple:
```go
l, _ := New(128)
for i := 0; i < 256; i++ {
l.Add(i, nil)
}
if l.Len() != 128 {
panic(fmt.Sprintf("bad len: %v", l.Len()))
}
```

257
vendor/github.com/hashicorp/golang-lru/arc.go generated vendored Normal file
View File

@ -0,0 +1,257 @@
package lru
import (
"sync"
"github.com/hashicorp/golang-lru/simplelru"
)
// ARCCache is a thread-safe fixed size Adaptive Replacement Cache (ARC).
// ARC is an enhancement over the standard LRU cache in that tracks both
// frequency and recency of use. This avoids a burst in access to new
// entries from evicting the frequently used older entries. It adds some
// additional tracking overhead to a standard LRU cache, computationally
// it is roughly 2x the cost, and the extra memory overhead is linear
// with the size of the cache. ARC has been patented by IBM, but is
// similar to the TwoQueueCache (2Q) which requires setting parameters.
type ARCCache struct {
size int // Size is the total capacity of the cache
p int // P is the dynamic preference towards T1 or T2
t1 simplelru.LRUCache // T1 is the LRU for recently accessed items
b1 simplelru.LRUCache // B1 is the LRU for evictions from t1
t2 simplelru.LRUCache // T2 is the LRU for frequently accessed items
b2 simplelru.LRUCache // B2 is the LRU for evictions from t2
lock sync.RWMutex
}
// NewARC creates an ARC of the given size
func NewARC(size int) (*ARCCache, error) {
// Create the sub LRUs
b1, err := simplelru.NewLRU(size, nil)
if err != nil {
return nil, err
}
b2, err := simplelru.NewLRU(size, nil)
if err != nil {
return nil, err
}
t1, err := simplelru.NewLRU(size, nil)
if err != nil {
return nil, err
}
t2, err := simplelru.NewLRU(size, nil)
if err != nil {
return nil, err
}
// Initialize the ARC
c := &ARCCache{
size: size,
p: 0,
t1: t1,
b1: b1,
t2: t2,
b2: b2,
}
return c, nil
}
// Get looks up a key's value from the cache.
func (c *ARCCache) Get(key interface{}) (value interface{}, ok bool) {
c.lock.Lock()
defer c.lock.Unlock()
// If the value is contained in T1 (recent), then
// promote it to T2 (frequent)
if val, ok := c.t1.Peek(key); ok {
c.t1.Remove(key)
c.t2.Add(key, val)
return val, ok
}
// Check if the value is contained in T2 (frequent)
if val, ok := c.t2.Get(key); ok {
return val, ok
}
// No hit
return nil, false
}
// Add adds a value to the cache.
func (c *ARCCache) Add(key, value interface{}) {
c.lock.Lock()
defer c.lock.Unlock()
// Check if the value is contained in T1 (recent), and potentially
// promote it to frequent T2
if c.t1.Contains(key) {
c.t1.Remove(key)
c.t2.Add(key, value)
return
}
// Check if the value is already in T2 (frequent) and update it
if c.t2.Contains(key) {
c.t2.Add(key, value)
return
}
// Check if this value was recently evicted as part of the
// recently used list
if c.b1.Contains(key) {
// T1 set is too small, increase P appropriately
delta := 1
b1Len := c.b1.Len()
b2Len := c.b2.Len()
if b2Len > b1Len {
delta = b2Len / b1Len
}
if c.p+delta >= c.size {
c.p = c.size
} else {
c.p += delta
}
// Potentially need to make room in the cache
if c.t1.Len()+c.t2.Len() >= c.size {
c.replace(false)
}
// Remove from B1
c.b1.Remove(key)
// Add the key to the frequently used list
c.t2.Add(key, value)
return
}
// Check if this value was recently evicted as part of the
// frequently used list
if c.b2.Contains(key) {
// T2 set is too small, decrease P appropriately
delta := 1
b1Len := c.b1.Len()
b2Len := c.b2.Len()
if b1Len > b2Len {
delta = b1Len / b2Len
}
if delta >= c.p {
c.p = 0
} else {
c.p -= delta
}
// Potentially need to make room in the cache
if c.t1.Len()+c.t2.Len() >= c.size {
c.replace(true)
}
// Remove from B2
c.b2.Remove(key)
// Add the key to the frequently used list
c.t2.Add(key, value)
return
}
// Potentially need to make room in the cache
if c.t1.Len()+c.t2.Len() >= c.size {
c.replace(false)
}
// Keep the size of the ghost buffers trim
if c.b1.Len() > c.size-c.p {
c.b1.RemoveOldest()
}
if c.b2.Len() > c.p {
c.b2.RemoveOldest()
}
// Add to the recently seen list
c.t1.Add(key, value)
return
}
// replace is used to adaptively evict from either T1 or T2
// based on the current learned value of P
func (c *ARCCache) replace(b2ContainsKey bool) {
t1Len := c.t1.Len()
if t1Len > 0 && (t1Len > c.p || (t1Len == c.p && b2ContainsKey)) {
k, _, ok := c.t1.RemoveOldest()
if ok {
c.b1.Add(k, nil)
}
} else {
k, _, ok := c.t2.RemoveOldest()
if ok {
c.b2.Add(k, nil)
}
}
}
// Len returns the number of cached entries
func (c *ARCCache) Len() int {
c.lock.RLock()
defer c.lock.RUnlock()
return c.t1.Len() + c.t2.Len()
}
// Keys returns all the cached keys
func (c *ARCCache) Keys() []interface{} {
c.lock.RLock()
defer c.lock.RUnlock()
k1 := c.t1.Keys()
k2 := c.t2.Keys()
return append(k1, k2...)
}
// Remove is used to purge a key from the cache
func (c *ARCCache) Remove(key interface{}) {
c.lock.Lock()
defer c.lock.Unlock()
if c.t1.Remove(key) {
return
}
if c.t2.Remove(key) {
return
}
if c.b1.Remove(key) {
return
}
if c.b2.Remove(key) {
return
}
}
// Purge is used to clear the cache
func (c *ARCCache) Purge() {
c.lock.Lock()
defer c.lock.Unlock()
c.t1.Purge()
c.t2.Purge()
c.b1.Purge()
c.b2.Purge()
}
// Contains is used to check if the cache contains a key
// without updating recency or frequency.
func (c *ARCCache) Contains(key interface{}) bool {
c.lock.RLock()
defer c.lock.RUnlock()
return c.t1.Contains(key) || c.t2.Contains(key)
}
// Peek is used to inspect the cache value of a key
// without updating recency or frequency.
func (c *ARCCache) Peek(key interface{}) (value interface{}, ok bool) {
c.lock.RLock()
defer c.lock.RUnlock()
if val, ok := c.t1.Peek(key); ok {
return val, ok
}
return c.t2.Peek(key)
}

21
vendor/github.com/hashicorp/golang-lru/doc.go generated vendored Normal file
View File

@ -0,0 +1,21 @@
// Package lru provides three different LRU caches of varying sophistication.
//
// Cache is a simple LRU cache. It is based on the
// LRU implementation in groupcache:
// https://github.com/golang/groupcache/tree/master/lru
//
// TwoQueueCache tracks frequently used and recently used entries separately.
// This avoids a burst of accesses from taking out frequently used entries,
// at the cost of about 2x computational overhead and some extra bookkeeping.
//
// ARCCache is an adaptive replacement cache. It tracks recent evictions as
// well as recent usage in both the frequent and recent caches. Its
// computational overhead is comparable to TwoQueueCache, but the memory
// overhead is linear with the size of the cache.
//
// ARC has been patented by IBM, so do not use it if that is problematic for
// your program.
//
// All caches in this package take locks while operating, and are therefore
// thread-safe for consumers.
package lru

1
vendor/github.com/hashicorp/golang-lru/go.mod generated vendored Normal file
View File

@ -0,0 +1 @@
module github.com/hashicorp/golang-lru

110
vendor/github.com/hashicorp/golang-lru/lru.go generated vendored Normal file
View File

@ -0,0 +1,110 @@
package lru
import (
"sync"
"github.com/hashicorp/golang-lru/simplelru"
)
// Cache is a thread-safe fixed size LRU cache.
type Cache struct {
lru simplelru.LRUCache
lock sync.RWMutex
}
// New creates an LRU of the given size.
func New(size int) (*Cache, error) {
return NewWithEvict(size, nil)
}
// NewWithEvict constructs a fixed size cache with the given eviction
// callback.
func NewWithEvict(size int, onEvicted func(key interface{}, value interface{})) (*Cache, error) {
lru, err := simplelru.NewLRU(size, simplelru.EvictCallback(onEvicted))
if err != nil {
return nil, err
}
c := &Cache{
lru: lru,
}
return c, nil
}
// Purge is used to completely clear the cache.
func (c *Cache) Purge() {
c.lock.Lock()
c.lru.Purge()
c.lock.Unlock()
}
// Add adds a value to the cache. Returns true if an eviction occurred.
func (c *Cache) Add(key, value interface{}) (evicted bool) {
c.lock.Lock()
defer c.lock.Unlock()
return c.lru.Add(key, value)
}
// Get looks up a key's value from the cache.
func (c *Cache) Get(key interface{}) (value interface{}, ok bool) {
c.lock.Lock()
defer c.lock.Unlock()
return c.lru.Get(key)
}
// Contains checks if a key is in the cache, without updating the
// recent-ness or deleting it for being stale.
func (c *Cache) Contains(key interface{}) bool {
c.lock.RLock()
defer c.lock.RUnlock()
return c.lru.Contains(key)
}
// Peek returns the key value (or undefined if not found) without updating
// the "recently used"-ness of the key.
func (c *Cache) Peek(key interface{}) (value interface{}, ok bool) {
c.lock.RLock()
defer c.lock.RUnlock()
return c.lru.Peek(key)
}
// ContainsOrAdd checks if a key is in the cache without updating the
// recent-ness or deleting it for being stale, and if not, adds the value.
// Returns whether found and whether an eviction occurred.
func (c *Cache) ContainsOrAdd(key, value interface{}) (ok, evicted bool) {
c.lock.Lock()
defer c.lock.Unlock()
if c.lru.Contains(key) {
return true, false
}
evicted = c.lru.Add(key, value)
return false, evicted
}
// Remove removes the provided key from the cache.
func (c *Cache) Remove(key interface{}) {
c.lock.Lock()
c.lru.Remove(key)
c.lock.Unlock()
}
// RemoveOldest removes the oldest item from the cache.
func (c *Cache) RemoveOldest() {
c.lock.Lock()
c.lru.RemoveOldest()
c.lock.Unlock()
}
// Keys returns a slice of the keys in the cache, from oldest to newest.
func (c *Cache) Keys() []interface{} {
c.lock.RLock()
defer c.lock.RUnlock()
return c.lru.Keys()
}
// Len returns the number of items in the cache.
func (c *Cache) Len() int {
c.lock.RLock()
defer c.lock.RUnlock()
return c.lru.Len()
}

161
vendor/github.com/hashicorp/golang-lru/simplelru/lru.go generated vendored Normal file
View File

@ -0,0 +1,161 @@
package simplelru
import (
"container/list"
"errors"
)
// EvictCallback is used to get a callback when a cache entry is evicted
type EvictCallback func(key interface{}, value interface{})
// LRU implements a non-thread safe fixed size LRU cache
type LRU struct {
size int
evictList *list.List
items map[interface{}]*list.Element
onEvict EvictCallback
}
// entry is used to hold a value in the evictList
type entry struct {
key interface{}
value interface{}
}
// NewLRU constructs an LRU of the given size
func NewLRU(size int, onEvict EvictCallback) (*LRU, error) {
if size <= 0 {
return nil, errors.New("Must provide a positive size")
}
c := &LRU{
size: size,
evictList: list.New(),
items: make(map[interface{}]*list.Element),
onEvict: onEvict,
}
return c, nil
}
// Purge is used to completely clear the cache.
func (c *LRU) Purge() {
for k, v := range c.items {
if c.onEvict != nil {
c.onEvict(k, v.Value.(*entry).value)
}
delete(c.items, k)
}
c.evictList.Init()
}
// Add adds a value to the cache. Returns true if an eviction occurred.
func (c *LRU) Add(key, value interface{}) (evicted bool) {
// Check for existing item
if ent, ok := c.items[key]; ok {
c.evictList.MoveToFront(ent)
ent.Value.(*entry).value = value
return false
}
// Add new item
ent := &entry{key, value}
entry := c.evictList.PushFront(ent)
c.items[key] = entry
evict := c.evictList.Len() > c.size
// Verify size not exceeded
if evict {
c.removeOldest()
}
return evict
}
// Get looks up a key's value from the cache.
func (c *LRU) Get(key interface{}) (value interface{}, ok bool) {
if ent, ok := c.items[key]; ok {
c.evictList.MoveToFront(ent)
return ent.Value.(*entry).value, true
}
return
}
// Contains checks if a key is in the cache, without updating the recent-ness
// or deleting it for being stale.
func (c *LRU) Contains(key interface{}) (ok bool) {
_, ok = c.items[key]
return ok
}
// Peek returns the key value (or undefined if not found) without updating
// the "recently used"-ness of the key.
func (c *LRU) Peek(key interface{}) (value interface{}, ok bool) {
var ent *list.Element
if ent, ok = c.items[key]; ok {
return ent.Value.(*entry).value, true
}
return nil, ok
}
// Remove removes the provided key from the cache, returning if the
// key was contained.
func (c *LRU) Remove(key interface{}) (present bool) {
if ent, ok := c.items[key]; ok {
c.removeElement(ent)
return true
}
return false
}
// RemoveOldest removes the oldest item from the cache.
func (c *LRU) RemoveOldest() (key interface{}, value interface{}, ok bool) {
ent := c.evictList.Back()
if ent != nil {
c.removeElement(ent)
kv := ent.Value.(*entry)
return kv.key, kv.value, true
}
return nil, nil, false
}
// GetOldest returns the oldest entry
func (c *LRU) GetOldest() (key interface{}, value interface{}, ok bool) {
ent := c.evictList.Back()
if ent != nil {
kv := ent.Value.(*entry)
return kv.key, kv.value, true
}
return nil, nil, false
}
// Keys returns a slice of the keys in the cache, from oldest to newest.
func (c *LRU) Keys() []interface{} {
keys := make([]interface{}, len(c.items))
i := 0
for ent := c.evictList.Back(); ent != nil; ent = ent.Prev() {
keys[i] = ent.Value.(*entry).key
i++
}
return keys
}
// Len returns the number of items in the cache.
func (c *LRU) Len() int {
return c.evictList.Len()
}
// removeOldest removes the oldest item from the cache.
func (c *LRU) removeOldest() {
ent := c.evictList.Back()
if ent != nil {
c.removeElement(ent)
}
}
// removeElement is used to remove a given list element from the cache
func (c *LRU) removeElement(e *list.Element) {
c.evictList.Remove(e)
kv := e.Value.(*entry)
delete(c.items, kv.key)
if c.onEvict != nil {
c.onEvict(kv.key, kv.value)
}
}

View File

@ -0,0 +1,36 @@
package simplelru
// LRUCache is the interface for simple LRU cache.
type LRUCache interface {
// Adds a value to the cache, returns true if an eviction occurred and
// updates the "recently used"-ness of the key.
Add(key, value interface{}) bool
// Returns key's value from the cache and
// updates the "recently used"-ness of the key. #value, isFound
Get(key interface{}) (value interface{}, ok bool)
// Check if a key exsists in cache without updating the recent-ness.
Contains(key interface{}) (ok bool)
// Returns key's value without updating the "recently used"-ness of the key.
Peek(key interface{}) (value interface{}, ok bool)
// Removes a key from the cache.
Remove(key interface{}) bool
// Removes the oldest entry from cache.
RemoveOldest() (interface{}, interface{}, bool)
// Returns the oldest entry from the cache. #key, value, isFound
GetOldest() (interface{}, interface{}, bool)
// Returns a slice of the keys in the cache, from oldest to newest.
Keys() []interface{}
// Returns the number of items in the cache.
Len() int
// Clear all cache entries
Purge()
}

View File

@ -1,165 +0,0 @@
package codegen
import (
"fmt"
"go/build"
"go/types"
"os"
"github.com/pkg/errors"
"golang.org/x/tools/go/loader"
)
type Build struct {
PackageName string
Objects Objects
Inputs Objects
Interfaces []*Interface
Imports []*Import
QueryRoot *Object
MutationRoot *Object
SubscriptionRoot *Object
SchemaRaw string
}
type ModelBuild struct {
PackageName string
Imports []*Import
Models []Model
Enums []Enum
}
// Create a list of models that need to be generated
func (cfg *Config) models() (*ModelBuild, error) {
namedTypes := cfg.buildNamedTypes()
prog, err := cfg.loadProgram(namedTypes, true)
if err != nil {
return nil, errors.Wrap(err, "loading failed")
}
imports := buildImports(namedTypes, cfg.Model.Dir())
cfg.bindTypes(imports, namedTypes, cfg.Model.Dir(), prog)
models, err := cfg.buildModels(namedTypes, prog)
if err != nil {
return nil, err
}
return &ModelBuild{
PackageName: cfg.Model.Package,
Models: models,
Enums: cfg.buildEnums(namedTypes),
Imports: imports.finalize(),
}, nil
}
// bind a schema together with some code to generate a Build
func (cfg *Config) bind() (*Build, error) {
namedTypes := cfg.buildNamedTypes()
prog, err := cfg.loadProgram(namedTypes, true)
if err != nil {
return nil, errors.Wrap(err, "loading failed")
}
imports := buildImports(namedTypes, cfg.Exec.Dir())
cfg.bindTypes(imports, namedTypes, cfg.Exec.Dir(), prog)
objects, err := cfg.buildObjects(namedTypes, prog, imports)
if err != nil {
return nil, err
}
inputs, err := cfg.buildInputs(namedTypes, prog, imports)
if err != nil {
return nil, err
}
b := &Build{
PackageName: cfg.Exec.Package,
Objects: objects,
Interfaces: cfg.buildInterfaces(namedTypes, prog),
Inputs: inputs,
Imports: imports.finalize(),
SchemaRaw: cfg.SchemaStr,
}
if qr, ok := cfg.schema.EntryPoints["query"]; ok {
b.QueryRoot = b.Objects.ByName(qr.TypeName())
}
if mr, ok := cfg.schema.EntryPoints["mutation"]; ok {
b.MutationRoot = b.Objects.ByName(mr.TypeName())
}
if sr, ok := cfg.schema.EntryPoints["subscription"]; ok {
b.SubscriptionRoot = b.Objects.ByName(sr.TypeName())
}
if b.QueryRoot == nil {
return b, fmt.Errorf("query entry point missing")
}
// Poke a few magic methods into query
q := b.Objects.ByName(b.QueryRoot.GQLType)
q.Fields = append(q.Fields, Field{
Type: &Type{namedTypes["__Schema"], []string{modPtr}, nil},
GQLName: "__schema",
NoErr: true,
GoMethodName: "ec.introspectSchema",
Object: q,
})
q.Fields = append(q.Fields, Field{
Type: &Type{namedTypes["__Type"], []string{modPtr}, nil},
GQLName: "__type",
NoErr: true,
GoMethodName: "ec.introspectType",
Args: []FieldArgument{
{GQLName: "name", Type: &Type{namedTypes["String"], []string{}, nil}, Object: &Object{}},
},
Object: q,
})
return b, nil
}
func (cfg *Config) validate() error {
namedTypes := cfg.buildNamedTypes()
_, err := cfg.loadProgram(namedTypes, false)
return err
}
func (cfg *Config) loadProgram(namedTypes NamedTypes, allowErrors bool) (*loader.Program, error) {
conf := loader.Config{}
if allowErrors {
conf = loader.Config{
AllowErrors: true,
TypeChecker: types.Config{
Error: func(e error) {},
},
}
}
for _, imp := range ambientImports {
conf.Import(imp)
}
for _, imp := range namedTypes {
if imp.Package != "" {
conf.Import(imp.Package)
}
}
return conf.Load()
}
func resolvePkg(pkgName string) (string, error) {
cwd, _ := os.Getwd()
pkg, err := build.Default.Import(pkgName, cwd, build.FindOnly)
if err != nil {
return "", err
}
return pkg.ImportPath, nil
}

View File

@ -1,153 +0,0 @@
package codegen
import (
"bytes"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"syscall"
"github.com/pkg/errors"
"github.com/vektah/gqlgen/codegen/templates"
"github.com/vektah/gqlgen/neelance/schema"
"golang.org/x/tools/imports"
)
func Generate(cfg Config) error {
if err := cfg.normalize(); err != nil {
return err
}
_ = syscall.Unlink(cfg.Exec.Filename)
_ = syscall.Unlink(cfg.Model.Filename)
modelsBuild, err := cfg.models()
if err != nil {
return errors.Wrap(err, "model plan failed")
}
if len(modelsBuild.Models) > 0 || len(modelsBuild.Enums) > 0 {
var buf *bytes.Buffer
buf, err = templates.Run("models.gotpl", modelsBuild)
if err != nil {
return errors.Wrap(err, "model generation failed")
}
if err = write(cfg.Model.Filename, buf.Bytes()); err != nil {
return err
}
for _, model := range modelsBuild.Models {
modelCfg := cfg.Models[model.GQLType]
modelCfg.Model = cfg.Model.ImportPath() + "." + model.GoType
cfg.Models[model.GQLType] = modelCfg
}
for _, enum := range modelsBuild.Enums {
modelCfg := cfg.Models[enum.GQLType]
modelCfg.Model = cfg.Model.ImportPath() + "." + enum.GoType
cfg.Models[enum.GQLType] = modelCfg
}
}
build, err := cfg.bind()
if err != nil {
return errors.Wrap(err, "exec plan failed")
}
var buf *bytes.Buffer
buf, err = templates.Run("generated.gotpl", build)
if err != nil {
return errors.Wrap(err, "exec codegen failed")
}
if err = write(cfg.Exec.Filename, buf.Bytes()); err != nil {
return err
}
if err = cfg.validate(); err != nil {
return errors.Wrap(err, "validation failed")
}
return nil
}
func (cfg *Config) normalize() error {
if err := cfg.Model.normalize(); err != nil {
return errors.Wrap(err, "model")
}
if err := cfg.Exec.normalize(); err != nil {
return errors.Wrap(err, "exec")
}
builtins := TypeMap{
"__Directive": {Model: "github.com/vektah/gqlgen/neelance/introspection.Directive"},
"__Type": {Model: "github.com/vektah/gqlgen/neelance/introspection.Type"},
"__Field": {Model: "github.com/vektah/gqlgen/neelance/introspection.Field"},
"__EnumValue": {Model: "github.com/vektah/gqlgen/neelance/introspection.EnumValue"},
"__InputValue": {Model: "github.com/vektah/gqlgen/neelance/introspection.InputValue"},
"__Schema": {Model: "github.com/vektah/gqlgen/neelance/introspection.Schema"},
"Int": {Model: "github.com/vektah/gqlgen/graphql.Int"},
"Float": {Model: "github.com/vektah/gqlgen/graphql.Float"},
"String": {Model: "github.com/vektah/gqlgen/graphql.String"},
"Boolean": {Model: "github.com/vektah/gqlgen/graphql.Boolean"},
"ID": {Model: "github.com/vektah/gqlgen/graphql.ID"},
"Time": {Model: "github.com/vektah/gqlgen/graphql.Time"},
"Map": {Model: "github.com/vektah/gqlgen/graphql.Map"},
}
if cfg.Models == nil {
cfg.Models = TypeMap{}
}
for typeName, entry := range builtins {
if !cfg.Models.Exists(typeName) {
cfg.Models[typeName] = entry
}
}
cfg.schema = schema.New()
return cfg.schema.Parse(cfg.SchemaStr)
}
var invalidPackageNameChar = regexp.MustCompile(`[^\w]`)
func sanitizePackageName(pkg string) string {
return invalidPackageNameChar.ReplaceAllLiteralString(filepath.Base(pkg), "_")
}
func abs(path string) string {
absPath, err := filepath.Abs(path)
if err != nil {
panic(err)
}
return filepath.ToSlash(absPath)
}
func gofmt(filename string, b []byte) ([]byte, error) {
out, err := imports.Process(filename, b, nil)
if err != nil {
return b, errors.Wrap(err, "unable to gofmt")
}
return out, nil
}
func write(filename string, b []byte) error {
err := os.MkdirAll(filepath.Dir(filename), 0755)
if err != nil {
return errors.Wrap(err, "failed to create directory")
}
formatted, err := gofmt(filename, b)
if err != nil {
fmt.Fprintf(os.Stderr, "gofmt failed: %s\n", err.Error())
formatted = b
}
err = ioutil.WriteFile(filename, formatted, 0644)
if err != nil {
return errors.Wrapf(err, "failed to write %s", filename)
}
return nil
}

View File

@ -1,39 +0,0 @@
package codegen
import (
"sort"
"strings"
"github.com/vektah/gqlgen/codegen/templates"
"github.com/vektah/gqlgen/neelance/schema"
)
func (cfg *Config) buildEnums(types NamedTypes) []Enum {
var enums []Enum
for _, typ := range cfg.schema.Types {
namedType := types[typ.TypeName()]
e, isEnum := typ.(*schema.Enum)
if !isEnum || strings.HasPrefix(typ.TypeName(), "__") || namedType.IsUserDefined {
continue
}
var values []EnumValue
for _, v := range e.Values {
values = append(values, EnumValue{v.Name, v.Desc})
}
enum := Enum{
NamedType: namedType,
Values: values,
}
enum.GoType = templates.ToCamel(enum.GQLType)
enums = append(enums, enum)
}
sort.Slice(enums, func(i, j int) bool {
return strings.Compare(enums[i].GQLType, enums[j].GQLType) == -1
})
return enums
}

View File

@ -1,15 +0,0 @@
package codegen
type Model struct {
*NamedType
Fields []ModelField
}
type ModelField struct {
*Type
GQLName string
GoVarName string
GoFKName string
GoFKType string
}

View File

@ -1,206 +0,0 @@
package codegen
import (
"bytes"
"fmt"
"strconv"
"strings"
"text/template"
"unicode"
)
type Object struct {
*NamedType
Fields []Field
Satisfies []string
Root bool
DisableConcurrency bool
Stream bool
}
type Field struct {
*Type
GQLName string // The name of the field in graphql
GoMethodName string // The name of the method in go, if any
GoVarName string // The name of the var in go, if any
Args []FieldArgument // A list of arguments to be passed to this field
ForceResolver bool // Should be emit Resolver method
NoErr bool // If this is bound to a go method, does that method have an error as the second argument
Object *Object // A link back to the parent object
Default interface{} // The default value
}
type FieldArgument struct {
*Type
GQLName string // The name of the argument in graphql
GoVarName string // The name of the var in go
Object *Object // A link back to the parent object
Default interface{} // The default value
}
type Objects []*Object
func (o *Object) Implementors() string {
satisfiedBy := strconv.Quote(o.GQLType)
for _, s := range o.Satisfies {
satisfiedBy += ", " + strconv.Quote(s)
}
return "[]string{" + satisfiedBy + "}"
}
func (o *Object) HasResolvers() bool {
for _, f := range o.Fields {
if f.IsResolver() {
return true
}
}
return false
}
func (f *Field) IsResolver() bool {
return f.ForceResolver || f.GoMethodName == "" && f.GoVarName == ""
}
func (f *Field) IsConcurrent() bool {
return f.IsResolver() && !f.Object.DisableConcurrency
}
func (f *Field) ShortInvocation() string {
if !f.IsResolver() {
return ""
}
shortName := strings.ToUpper(f.GQLName[:1]) + f.GQLName[1:]
res := fmt.Sprintf("%s().%s(ctx", f.Object.GQLType, shortName)
if !f.Object.Root {
res += fmt.Sprintf(", obj")
}
for _, arg := range f.Args {
res += fmt.Sprintf(", %s", arg.GoVarName)
}
res += ")"
return res
}
func (f *Field) ShortResolverDeclaration() string {
if !f.IsResolver() {
return ""
}
decl := strings.TrimPrefix(f.ResolverDeclaration(), f.Object.GQLType+"_")
return strings.ToUpper(decl[:1]) + decl[1:]
}
func (f *Field) ResolverDeclaration() string {
if !f.IsResolver() {
return ""
}
res := fmt.Sprintf("%s_%s(ctx context.Context", f.Object.GQLType, f.GQLName)
if !f.Object.Root {
res += fmt.Sprintf(", obj *%s", f.Object.FullName())
}
for _, arg := range f.Args {
res += fmt.Sprintf(", %s %s", arg.GoVarName, arg.Signature())
}
result := f.Signature()
if f.Object.Stream {
result = "<-chan " + result
}
res += fmt.Sprintf(") (%s, error)", result)
return res
}
func (f *Field) CallArgs() string {
var args []string
if f.GoMethodName == "" {
args = append(args, "ctx")
if !f.Object.Root {
args = append(args, "obj")
}
}
for _, arg := range f.Args {
args = append(args, "args["+strconv.Quote(arg.GQLName)+"].("+arg.Signature()+")")
}
return strings.Join(args, ", ")
}
// should be in the template, but its recursive and has a bunch of args
func (f *Field) WriteJson() string {
return f.doWriteJson("res", f.Type.Modifiers, false, 1)
}
func (f *Field) doWriteJson(val string, remainingMods []string, isPtr bool, depth int) string {
switch {
case len(remainingMods) > 0 && remainingMods[0] == modPtr:
return fmt.Sprintf("if %s == nil { return graphql.Null }\n%s", val, f.doWriteJson(val, remainingMods[1:], true, depth+1))
case len(remainingMods) > 0 && remainingMods[0] == modList:
if isPtr {
val = "*" + val
}
var arr = "arr" + strconv.Itoa(depth)
var index = "idx" + strconv.Itoa(depth)
return tpl(`{{.arr}} := graphql.Array{}
for {{.index}} := range {{.val}} {
{{.arr}} = append({{.arr}}, func() graphql.Marshaler {
rctx := graphql.GetResolverContext(ctx)
rctx.PushIndex({{.index}})
defer rctx.Pop()
{{ .next }}
}())
}
return {{.arr}}`, map[string]interface{}{
"val": val,
"arr": arr,
"index": index,
"next": f.doWriteJson(val+"["+index+"]", remainingMods[1:], false, depth+1),
})
case f.IsScalar:
if isPtr {
val = "*" + val
}
return f.Marshal(val)
default:
if !isPtr {
val = "&" + val
}
return fmt.Sprintf("return ec._%s(ctx, field.Selections, %s)", f.GQLType, val)
}
}
func (os Objects) ByName(name string) *Object {
for i, o := range os {
if strings.EqualFold(o.GQLType, name) {
return os[i]
}
}
return nil
}
func tpl(tpl string, vars map[string]interface{}) string {
b := &bytes.Buffer{}
err := template.Must(template.New("inline").Parse(tpl)).Execute(b, vars)
if err != nil {
panic(err)
}
return b.String()
}
func ucFirst(s string) string {
if s == "" {
return ""
}
r := []rune(s)
r[0] = unicode.ToUpper(r[0])
return string(r)
}

View File

@ -1,144 +0,0 @@
package codegen
import (
"log"
"sort"
"strings"
"github.com/pkg/errors"
"github.com/vektah/gqlgen/neelance/schema"
"golang.org/x/tools/go/loader"
)
func (cfg *Config) buildObjects(types NamedTypes, prog *loader.Program, imports *Imports) (Objects, error) {
var objects Objects
for _, typ := range cfg.schema.Types {
switch typ := typ.(type) {
case *schema.Object:
obj, err := cfg.buildObject(types, typ)
if err != nil {
return nil, err
}
def, err := findGoType(prog, obj.Package, obj.GoType)
if err != nil {
return nil, err
}
if def != nil {
for _, bindErr := range bindObject(def.Type(), obj, imports) {
log.Println(bindErr.Error())
log.Println(" Adding resolver method")
}
}
objects = append(objects, obj)
}
}
sort.Slice(objects, func(i, j int) bool {
return strings.Compare(objects[i].GQLType, objects[j].GQLType) == -1
})
return objects, nil
}
var keywords = []string{
"break",
"default",
"func",
"interface",
"select",
"case",
"defer",
"go",
"map",
"struct",
"chan",
"else",
"goto",
"package",
"switch",
"const",
"fallthrough",
"if",
"range",
"type",
"continue",
"for",
"import",
"return",
"var",
}
func sanitizeGoName(name string) string {
for _, k := range keywords {
if name == k {
return name + "_"
}
}
return name
}
func (cfg *Config) buildObject(types NamedTypes, typ *schema.Object) (*Object, error) {
obj := &Object{NamedType: types[typ.TypeName()]}
typeEntry, entryExists := cfg.Models[typ.TypeName()]
for _, i := range typ.Interfaces {
obj.Satisfies = append(obj.Satisfies, i.Name)
}
for _, field := range typ.Fields {
var forceResolver bool
if entryExists {
if typeField, ok := typeEntry.Fields[field.Name]; ok {
forceResolver = typeField.Resolver
}
}
var args []FieldArgument
for _, arg := range field.Args {
newArg := FieldArgument{
GQLName: arg.Name.Name,
Type: types.getType(arg.Type),
Object: obj,
GoVarName: sanitizeGoName(arg.Name.Name),
}
if !newArg.Type.IsInput && !newArg.Type.IsScalar {
return nil, errors.Errorf("%s cannot be used as argument of %s.%s. only input and scalar types are allowed", arg.Type, obj.GQLType, field.Name)
}
if arg.Default != nil {
newArg.Default = arg.Default.Value(nil)
newArg.StripPtr()
}
args = append(args, newArg)
}
obj.Fields = append(obj.Fields, Field{
GQLName: field.Name,
Type: types.getType(field.Type),
Args: args,
Object: obj,
ForceResolver: forceResolver,
})
}
for name, typ := range cfg.schema.EntryPoints {
schemaObj := typ.(*schema.Object)
if schemaObj.TypeName() != obj.GQLType {
continue
}
obj.Root = true
if name == "mutation" {
obj.DisableConcurrency = true
}
if name == "subscription" {
obj.Stream = true
}
}
return obj, nil
}

View File

@ -1,30 +0,0 @@
{{- if . }}args := map[string]interface{}{} {{end}}
{{- range $i, $arg := . }}
var arg{{$i}} {{$arg.Signature }}
if tmp, ok := field.Args[{{$arg.GQLName|quote}}]; ok {
var err error
{{$arg.Unmarshal (print "arg" $i) "tmp" }}
if err != nil {
ec.Error(ctx, err)
{{- if $arg.Object.Stream }}
return nil
{{- else }}
return graphql.Null
{{- end }}
}
} {{ if $arg.Default }} else {
var tmp interface{} = {{ $arg.Default | dump }}
var err error
{{$arg.Unmarshal (print "arg" $i) "tmp" }}
if err != nil {
ec.Error(ctx, err)
{{- if $arg.Object.Stream }}
return nil
{{- else }}
return graphql.Null
{{- end }}
}
}
{{end }}
args[{{$arg.GQLName|quote}}] = arg{{$i}}
{{- end -}}

View File

@ -1,11 +0,0 @@
package templates
var data = map[string]string{
"args.gotpl": "\t{{- if . }}args := map[string]interface{}{} {{end}}\n\t{{- range $i, $arg := . }}\n\t\tvar arg{{$i}} {{$arg.Signature }}\n\t\tif tmp, ok := field.Args[{{$arg.GQLName|quote}}]; ok {\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\t{{- if $arg.Object.Stream }}\n\t\t\t\t\treturn nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t{{- end }}\n\t\t\t}\n\t\t} {{ if $arg.Default }} else {\n\t\t\tvar tmp interface{} = {{ $arg.Default | dump }}\n\t\t\tvar err error\n\t\t\t{{$arg.Unmarshal (print \"arg\" $i) \"tmp\" }}\n\t\t\tif err != nil {\n\t\t\t\tec.Error(ctx, err)\n\t\t\t\t{{- if $arg.Object.Stream }}\n\t\t\t\t\treturn nil\n\t\t\t\t{{- else }}\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\t\t{{end }}\n\t\targs[{{$arg.GQLName|quote}}] = arg{{$i}}\n\t{{- end -}}\n",
"field.gotpl": "{{ $field := . }}\n{{ $object := $field.Object }}\n\n{{- if $object.Stream }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {\n\t\t{{- template \"args.gotpl\" $field.Args }}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{Field: field})\n\t\tresults, err := ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})\n\t\tif err != nil {\n\t\t\tec.Error(ctx, err)\n\t\t\treturn nil\n\t\t}\n\t\treturn func() graphql.Marshaler {\n\t\t\tres, ok := <-results\n\t\t\tif !ok {\n\t\t\t\treturn nil\n\t\t\t}\n\t\t\tvar out graphql.OrderedMap\n\t\t\tout.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())\n\t\t\treturn &out\n\t\t}\n\t}\n{{ else }}\n\tfunc (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {\n\t\t{{- template \"args.gotpl\" $field.Args }}\n\n\t\t{{- if $field.IsConcurrent }}\n\t\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\t\tObject: {{$object.GQLType|quote}},\n\t\t\t\tArgs: {{if $field.Args }}args{{else}}nil{{end}},\n\t\t\t\tField: field,\n\t\t\t})\n\t\t\treturn graphql.Defer(func() (ret graphql.Marshaler) {\n\t\t\t\tdefer func() {\n\t\t\t\t\tif r := recover(); r != nil {\n\t\t\t\t\t\tuserErr := ec.Recover(ctx, r)\n\t\t\t\t\t\tec.Error(ctx, userErr)\n\t\t\t\t\t\tret = graphql.Null\n\t\t\t\t\t}\n\t\t\t\t}()\n\t\t{{ else }}\n\t\t\trctx := graphql.GetResolverContext(ctx)\n\t\t\trctx.Object = {{$object.GQLType|quote}}\n\t\t\trctx.Args = {{if $field.Args }}args{{else}}nil{{end}}\n\t\t\trctx.Field = field\n\t\t\trctx.PushField(field.Alias)\n\t\t\tdefer rctx.Pop()\n\t\t{{- end }}\n\n\t\t\t{{- if $field.IsResolver }}\n\t\t\t\tresTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {\n\t\t\t\t\treturn ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})\n\t\t\t\t})\n\t\t\t\tif err != nil {\n\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t}\n\t\t\t\tif resTmp == nil {\n\t\t\t\t\treturn graphql.Null\n\t\t\t\t}\n\t\t\t\tres := resTmp.({{$field.Signature}})\n\t\t\t{{- else if $field.GoVarName }}\n\t\t\t\tres := obj.{{$field.GoVarName}}\n\t\t\t{{- else if $field.GoMethodName }}\n\t\t\t\t{{- if $field.NoErr }}\n\t\t\t\t\tres := {{$field.GoMethodName}}({{ $field.CallArgs }})\n\t\t\t\t{{- else }}\n\t\t\t\t\tres, err := {{$field.GoMethodName}}({{ $field.CallArgs }})\n\t\t\t\t\tif err != nil {\n\t\t\t\t\t\tec.Error(ctx, err)\n\t\t\t\t\t\treturn graphql.Null\n\t\t\t\t\t}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t\t{{ $field.WriteJson }}\n\t\t{{- if $field.IsConcurrent }}\n\t\t\t})\n\t\t{{- end }}\n\t}\n{{ end }}\n",
"generated.gotpl": "// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n// MakeExecutableSchema creates an ExecutableSchema from the Resolvers interface.\nfunc MakeExecutableSchema(resolvers Resolvers) graphql.ExecutableSchema {\n\treturn &executableSchema{resolvers: resolvers}\n}\n\n// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.\nfunc NewExecutableSchema(resolvers ResolverRoot) graphql.ExecutableSchema {\n\treturn MakeExecutableSchema(shortMapper{r: resolvers})\n}\n\ntype Resolvers interface {\n{{- range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{ $field.ResolverDeclaration }}\n\t{{ end }}\n{{- end }}\n}\n\ntype ResolverRoot interface {\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers -}}\n\t\t{{$object.GQLType}}() {{$object.GQLType}}Resolver\n\t{{ end }}\n{{- end }}\n}\n\n{{- range $object := .Objects -}}\n\t{{ if $object.HasResolvers }}\n\t\ttype {{$object.GQLType}}Resolver interface {\n\t\t{{ range $field := $object.Fields -}}\n\t\t\t{{ $field.ShortResolverDeclaration }}\n\t\t{{ end }}\n\t\t}\n\t{{- end }}\n{{- end }}\n\ntype shortMapper struct {\n\tr ResolverRoot\n}\n\n{{- range $object := .Objects -}}\n\t{{ range $field := $object.Fields -}}\n\t\t{{- if $field.IsResolver }}\n\t\t\tfunc (s shortMapper) {{ $field.ResolverDeclaration }} {\n\t\t\t\treturn s.r.{{$field.ShortInvocation}}\n\t\t\t}\n\t\t{{- end }}\n\t{{ end }}\n{{- end }}\n\ntype executableSchema struct {\n\tresolvers Resolvers\n}\n\nfunc (e *executableSchema) Schema() *schema.Schema {\n\treturn parsedSchema\n}\n\nfunc (e *executableSchema) Query(ctx context.Context, op *query.Operation) *graphql.Response {\n\t{{- if .QueryRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.QueryRoot.GQLType}}(ctx, op.Selections)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"queries are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {\n\t{{- if .MutationRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\tdata := ec._{{.MutationRoot.GQLType}}(ctx, op.Selections)\n\t\t\tvar buf bytes.Buffer\n\t\t\tdata.MarshalGQL(&buf)\n\t\t\treturn buf.Bytes()\n\t\t})\n\n\t\treturn &graphql.Response{\n\t\t\tData: buf,\n\t\t\tErrors: ec.Errors,\n\t\t}\n\t{{- else }}\n\t\treturn graphql.ErrorResponse(ctx, \"mutations are not supported\")\n\t{{- end }}\n}\n\nfunc (e *executableSchema) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {\n\t{{- if .SubscriptionRoot }}\n\t\tec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}\n\n\t\tnext := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.Selections)\n\t\tif ec.Errors != nil {\n\t\t\treturn graphql.OneShot(&graphql.Response{Data: []byte(\"null\"), Errors: ec.Errors})\n\t\t}\n\n\t\tvar buf bytes.Buffer\n\t\treturn func() *graphql.Response {\n\t\t\tbuf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {\n\t\t\t\tbuf.Reset()\n\t\t\t\tdata := next()\n\n\t\t\t\tif data == nil {\n\t\t\t\t\treturn nil\n\t\t\t\t}\n\t\t\t\tdata.MarshalGQL(&buf)\n\t\t\t\treturn buf.Bytes()\n\t\t\t})\n\n\t\t\treturn &graphql.Response{\n\t\t\t\tData: buf,\n\t\t\t\tErrors: ec.Errors,\n\t\t\t}\n\t\t}\n\t{{- else }}\n\t\treturn graphql.OneShot(graphql.ErrorResponse(ctx, \"subscriptions are not supported\"))\n\t{{- end }}\n}\n\ntype executionContext struct {\n\t*graphql.RequestContext\n\n\tresolvers Resolvers\n}\n\n{{- range $object := .Objects }}\n\t{{ template \"object.gotpl\" $object }}\n\n\t{{- range $field := $object.Fields }}\n\t\t{{ template \"field.gotpl\" $field }}\n\t{{ end }}\n{{- end}}\n\n{{- range $interface := .Interfaces }}\n\t{{ template \"interface.gotpl\" $interface }}\n{{- end }}\n\n{{- range $input := .Inputs }}\n\t{{ template \"input.gotpl\" $input }}\n{{- end }}\n\nfunc (ec *executionContext) introspectSchema() *introspection.Schema {\n\treturn introspection.WrapSchema(parsedSchema)\n}\n\nfunc (ec *executionContext) introspectType(name string) *introspection.Type {\n\tt := parsedSchema.Resolve(name)\n\tif t == nil {\n\t\treturn nil\n\t}\n\treturn introspection.WrapType(t)\n}\n\nvar parsedSchema = schema.MustParse({{.SchemaRaw|rawQuote}})\n",
"input.gotpl": "\t{{- if .IsMarshaled }}\n\tfunc Unmarshal{{ .GQLType }}(v interface{}) ({{.FullName}}, error) {\n\t\tvar it {{.FullName}}\n\t\tvar asMap = v.(map[string]interface{})\n\t\t{{ range $field := .Fields}}\n\t\t\t{{- if $field.Default}}\n\t\t\t\tif _, present := asMap[{{$field.GQLName|quote}}] ; !present {\n\t\t\t\t\tasMap[{{$field.GQLName|quote}}] = {{ $field.Default | dump }}\n\t\t\t\t}\n\t\t\t{{- end}}\n\t\t{{- end }}\n\n\t\tfor k, v := range asMap {\n\t\t\tswitch k {\n\t\t\t{{- range $field := .Fields }}\n\t\t\tcase {{$field.GQLName|quote}}:\n\t\t\t\tvar err error\n\t\t\t\t{{ $field.Unmarshal (print \"it.\" $field.GoVarName) \"v\" }}\n\t\t\t\tif err != nil {\n\t\t\t\t\treturn it, err\n\t\t\t\t}\n\t\t\t{{- end }}\n\t\t\t}\n\t\t}\n\n\t\treturn it, nil\n\t}\n\t{{- end }}\n",
"interface.gotpl": "{{- $interface := . }}\n\nfunc (ec *executionContext) _{{$interface.GQLType}}(ctx context.Context, sel []query.Selection, obj *{{$interface.FullName}}) graphql.Marshaler {\n\tswitch obj := (*obj).(type) {\n\tcase nil:\n\t\treturn graphql.Null\n\t{{- range $implementor := $interface.Implementors }}\n\t\t{{- if $implementor.ValueReceiver }}\n\t\t\tcase {{$implementor.FullName}}:\n\t\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, &obj)\n\t\t{{- end}}\n\t\tcase *{{$implementor.FullName}}:\n\t\t\treturn ec._{{$implementor.GQLType}}(ctx, sel, obj)\n\t{{- end }}\n\tdefault:\n\t\tpanic(fmt.Errorf(\"unexpected type %T\", obj))\n\t}\n}\n",
"models.gotpl": "// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.\n\npackage {{ .PackageName }}\n\nimport (\n{{- range $import := .Imports }}\n\t{{- $import.Write }}\n{{ end }}\n)\n\n{{ range $model := .Models }}\n\t{{- if .IsInterface }}\n\t\ttype {{.GoType}} interface {}\n\t{{- else }}\n\t\ttype {{.GoType}} struct {\n\t\t\t{{- range $field := .Fields }}\n\t\t\t\t{{- if $field.GoVarName }}\n\t\t\t\t\t{{ $field.GoVarName }} {{$field.Signature}} `json:\"{{$field.GQLName}}\"`\n\t\t\t\t{{- else }}\n\t\t\t\t\t{{ $field.GoFKName }} {{$field.GoFKType}}\n\t\t\t\t{{- end }}\n\t\t\t{{- end }}\n\t\t}\n\t{{- end }}\n{{- end}}\n\n{{ range $enum := .Enums }}\n\ttype {{.GoType}} string\n\tconst (\n\t{{ range $value := .Values -}}\n\t\t{{with .Description}} {{.|prefixLines \"// \"}} {{end}}\n\t\t{{$enum.GoType}}{{ .Name|toCamel }} {{$enum.GoType}} = {{.Name|quote}}\n\t{{- end }}\n\t)\n\n\tfunc (e {{.GoType}}) IsValid() bool {\n\t\tswitch e {\n\t\tcase {{ range $index, $element := .Values}}{{if $index}},{{end}}{{ $enum.GoType }}{{ $element.Name|toCamel }}{{end}}:\n\t\t\treturn true\n\t\t}\n\t\treturn false\n\t}\n\n\tfunc (e {{.GoType}}) String() string {\n\t\treturn string(e)\n\t}\n\n\tfunc (e *{{.GoType}}) UnmarshalGQL(v interface{}) error {\n\t\tstr, ok := v.(string)\n\t\tif !ok {\n\t\t\treturn fmt.Errorf(\"enums must be strings\")\n\t\t}\n\n\t\t*e = {{.GoType}}(str)\n\t\tif !e.IsValid() {\n\t\t\treturn fmt.Errorf(\"%s is not a valid {{.GQLType}}\", str)\n\t\t}\n\t\treturn nil\n\t}\n\n\tfunc (e {{.GoType}}) MarshalGQL(w io.Writer) {\n\t\tfmt.Fprint(w, strconv.Quote(e.String()))\n\t}\n\n{{- end }}\n",
"object.gotpl": "{{ $object := . }}\n\nvar {{ $object.GQLType|lcFirst}}Implementors = {{$object.Implementors}}\n\n// nolint: gocyclo, errcheck, gas, goconst\n{{- if .Stream }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection) func() graphql.Marshaler {\n\tfields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)\n\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\tObject: {{$object.GQLType|quote}},\n\t})\n\tif len(fields) != 1 {\n\t\tec.Errorf(ctx, \"must subscribe to exactly one stream\")\n\t\treturn nil\n\t}\n\n\tswitch fields[0].Name {\n\t{{- range $field := $object.Fields }}\n\tcase \"{{$field.GQLName}}\":\n\t\treturn ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, fields[0])\n\t{{- end }}\n\tdefault:\n\t\tpanic(\"unknown field \" + strconv.Quote(fields[0].Name))\n\t}\n}\n{{- else }}\nfunc (ec *executionContext) _{{$object.GQLType}}(ctx context.Context, sel []query.Selection{{if not $object.Root}}, obj *{{$object.FullName}} {{end}}) graphql.Marshaler {\n\tfields := graphql.CollectFields(ec.Doc, sel, {{$object.GQLType|lcFirst}}Implementors, ec.Variables)\n\t{{if $object.Root}}\n\t\tctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{\n\t\t\tObject: {{$object.GQLType|quote}},\n\t\t})\n\t{{end}}\n\tout := graphql.NewOrderedMap(len(fields))\n\tfor i, field := range fields {\n\t\tout.Keys[i] = field.Alias\n\n\t\tswitch field.Name {\n\t\tcase \"__typename\":\n\t\t\tout.Values[i] = graphql.MarshalString({{$object.GQLType|quote}})\n\t\t{{- range $field := $object.Fields }}\n\t\tcase \"{{$field.GQLName}}\":\n\t\t\tout.Values[i] = ec._{{$object.GQLType}}_{{$field.GQLName}}(ctx, field{{if not $object.Root}}, obj{{end}})\n\t\t{{- end }}\n\t\tdefault:\n\t\t\tpanic(\"unknown field \" + strconv.Quote(field.Name))\n\t\t}\n\t}\n\n\treturn out\n}\n{{- end }}\n",
}

View File

@ -1,80 +0,0 @@
{{ $field := . }}
{{ $object := $field.Object }}
{{- if $object.Stream }}
func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField) func() graphql.Marshaler {
{{- template "args.gotpl" $field.Args }}
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{Field: field})
results, err := ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})
if err != nil {
ec.Error(ctx, err)
return nil
}
return func() graphql.Marshaler {
res, ok := <-results
if !ok {
return nil
}
var out graphql.OrderedMap
out.Add(field.Alias, func() graphql.Marshaler { {{ $field.WriteJson }} }())
return &out
}
}
{{ else }}
func (ec *executionContext) _{{$object.GQLType}}_{{$field.GQLName}}(ctx context.Context, field graphql.CollectedField, {{if not $object.Root}}obj *{{$object.FullName}}{{end}}) graphql.Marshaler {
{{- template "args.gotpl" $field.Args }}
{{- if $field.IsConcurrent }}
ctx = graphql.WithResolverContext(ctx, &graphql.ResolverContext{
Object: {{$object.GQLType|quote}},
Args: {{if $field.Args }}args{{else}}nil{{end}},
Field: field,
})
return graphql.Defer(func() (ret graphql.Marshaler) {
defer func() {
if r := recover(); r != nil {
userErr := ec.Recover(ctx, r)
ec.Error(ctx, userErr)
ret = graphql.Null
}
}()
{{ else }}
rctx := graphql.GetResolverContext(ctx)
rctx.Object = {{$object.GQLType|quote}}
rctx.Args = {{if $field.Args }}args{{else}}nil{{end}}
rctx.Field = field
rctx.PushField(field.Alias)
defer rctx.Pop()
{{- end }}
{{- if $field.IsResolver }}
resTmp, err := ec.ResolverMiddleware(ctx, func(ctx context.Context) (interface{}, error) {
return ec.resolvers.{{ $object.GQLType }}_{{ $field.GQLName }}({{ $field.CallArgs }})
})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
if resTmp == nil {
return graphql.Null
}
res := resTmp.({{$field.Signature}})
{{- else if $field.GoVarName }}
res := obj.{{$field.GoVarName}}
{{- else if $field.GoMethodName }}
{{- if $field.NoErr }}
res := {{$field.GoMethodName}}({{ $field.CallArgs }})
{{- else }}
res, err := {{$field.GoMethodName}}({{ $field.CallArgs }})
if err != nil {
ec.Error(ctx, err)
return graphql.Null
}
{{- end }}
{{- end }}
{{ $field.WriteJson }}
{{- if $field.IsConcurrent }}
})
{{- end }}
}
{{ end }}

View File

@ -1,175 +0,0 @@
// Code generated by github.com/vektah/gqlgen, DO NOT EDIT.
package {{ .PackageName }}
import (
{{- range $import := .Imports }}
{{- $import.Write }}
{{ end }}
)
// MakeExecutableSchema creates an ExecutableSchema from the Resolvers interface.
func MakeExecutableSchema(resolvers Resolvers) graphql.ExecutableSchema {
return &executableSchema{resolvers: resolvers}
}
// NewExecutableSchema creates an ExecutableSchema from the ResolverRoot interface.
func NewExecutableSchema(resolvers ResolverRoot) graphql.ExecutableSchema {
return MakeExecutableSchema(shortMapper{r: resolvers})
}
type Resolvers interface {
{{- range $object := .Objects -}}
{{ range $field := $object.Fields -}}
{{ $field.ResolverDeclaration }}
{{ end }}
{{- end }}
}
type ResolverRoot interface {
{{- range $object := .Objects -}}
{{ if $object.HasResolvers -}}
{{$object.GQLType}}() {{$object.GQLType}}Resolver
{{ end }}
{{- end }}
}
{{- range $object := .Objects -}}
{{ if $object.HasResolvers }}
type {{$object.GQLType}}Resolver interface {
{{ range $field := $object.Fields -}}
{{ $field.ShortResolverDeclaration }}
{{ end }}
}
{{- end }}
{{- end }}
type shortMapper struct {
r ResolverRoot
}
{{- range $object := .Objects -}}
{{ range $field := $object.Fields -}}
{{- if $field.IsResolver }}
func (s shortMapper) {{ $field.ResolverDeclaration }} {
return s.r.{{$field.ShortInvocation}}
}
{{- end }}
{{ end }}
{{- end }}
type executableSchema struct {
resolvers Resolvers
}
func (e *executableSchema) Schema() *schema.Schema {
return parsedSchema
}
func (e *executableSchema) Query(ctx context.Context, op *query.Operation) *graphql.Response {
{{- if .QueryRoot }}
ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
data := ec._{{.QueryRoot.GQLType}}(ctx, op.Selections)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
})
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
}
{{- else }}
return graphql.ErrorResponse(ctx, "queries are not supported")
{{- end }}
}
func (e *executableSchema) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {
{{- if .MutationRoot }}
ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
data := ec._{{.MutationRoot.GQLType}}(ctx, op.Selections)
var buf bytes.Buffer
data.MarshalGQL(&buf)
return buf.Bytes()
})
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
}
{{- else }}
return graphql.ErrorResponse(ctx, "mutations are not supported")
{{- end }}
}
func (e *executableSchema) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {
{{- if .SubscriptionRoot }}
ec := executionContext{graphql.GetRequestContext(ctx), e.resolvers}
next := ec._{{.SubscriptionRoot.GQLType}}(ctx, op.Selections)
if ec.Errors != nil {
return graphql.OneShot(&graphql.Response{Data: []byte("null"), Errors: ec.Errors})
}
var buf bytes.Buffer
return func() *graphql.Response {
buf := ec.RequestMiddleware(ctx, func(ctx context.Context) []byte {
buf.Reset()
data := next()
if data == nil {
return nil
}
data.MarshalGQL(&buf)
return buf.Bytes()
})
return &graphql.Response{
Data: buf,
Errors: ec.Errors,
}
}
{{- else }}
return graphql.OneShot(graphql.ErrorResponse(ctx, "subscriptions are not supported"))
{{- end }}
}
type executionContext struct {
*graphql.RequestContext
resolvers Resolvers
}
{{- range $object := .Objects }}
{{ template "object.gotpl" $object }}
{{- range $field := $object.Fields }}
{{ template "field.gotpl" $field }}
{{ end }}
{{- end}}
{{- range $interface := .Interfaces }}
{{ template "interface.gotpl" $interface }}
{{- end }}
{{- range $input := .Inputs }}
{{ template "input.gotpl" $input }}
{{- end }}
func (ec *executionContext) introspectSchema() *introspection.Schema {
return introspection.WrapSchema(parsedSchema)
}
func (ec *executionContext) introspectType(name string) *introspection.Type {
t := parsedSchema.Resolve(name)
if t == nil {
return nil
}
return introspection.WrapType(t)
}
var parsedSchema = schema.MustParse({{.SchemaRaw|rawQuote}})

View File

@ -1,30 +0,0 @@
package graphql
import (
"io"
"sync"
)
// Defer will begin executing the given function and immediately return a result that will block until the function completes
func Defer(f func() Marshaler) Marshaler {
var deferred deferred
deferred.mu.Lock()
go func() {
deferred.result = f()
deferred.mu.Unlock()
}()
return &deferred
}
type deferred struct {
result Marshaler
mu sync.Mutex
}
func (d *deferred) MarshalGQL(w io.Writer) {
d.mu.Lock()
d.result.MarshalGQL(w)
d.mu.Unlock()
}

View File

@ -1,46 +0,0 @@
package graphql
import (
"context"
)
// Error is the standard graphql error type described in https://facebook.github.io/graphql/draft/#sec-Errors
type Error struct {
Message string `json:"message"`
Path []interface{} `json:"path,omitempty"`
Locations []ErrorLocation `json:"locations,omitempty"`
Extensions map[string]interface{} `json:"extensions,omitempty"`
}
func (e *Error) Error() string {
return e.Message
}
type ErrorLocation struct {
Line int `json:"line,omitempty"`
Column int `json:"column,omitempty"`
}
type ErrorPresenterFunc func(context.Context, error) *Error
type ExtendedError interface {
Extensions() map[string]interface{}
}
func DefaultErrorPresenter(ctx context.Context, err error) *Error {
if gqlerr, ok := err.(*Error); ok {
gqlerr.Path = GetResolverContext(ctx).Path
return gqlerr
}
var extensions map[string]interface{}
if ee, ok := err.(ExtendedError); ok {
extensions = ee.Extensions()
}
return &Error{
Message: err.Error(),
Path: GetResolverContext(ctx).Path,
Extensions: extensions,
}
}

View File

@ -1,118 +0,0 @@
package graphql
import (
"context"
"fmt"
"github.com/vektah/gqlgen/neelance/common"
"github.com/vektah/gqlgen/neelance/query"
"github.com/vektah/gqlgen/neelance/schema"
)
type ExecutableSchema interface {
Schema() *schema.Schema
Query(ctx context.Context, op *query.Operation) *Response
Mutation(ctx context.Context, op *query.Operation) *Response
Subscription(ctx context.Context, op *query.Operation) func() *Response
}
func CollectFields(doc *query.Document, selSet []query.Selection, satisfies []string, variables map[string]interface{}) []CollectedField {
return collectFields(doc, selSet, satisfies, variables, map[string]bool{})
}
func collectFields(doc *query.Document, selSet []query.Selection, satisfies []string, variables map[string]interface{}, visited map[string]bool) []CollectedField {
var groupedFields []CollectedField
for _, sel := range selSet {
switch sel := sel.(type) {
case *query.Field:
f := getOrCreateField(&groupedFields, sel.Alias.Name, func() CollectedField {
f := CollectedField{
Alias: sel.Alias.Name,
Name: sel.Name.Name,
}
if len(sel.Arguments) > 0 {
f.Args = map[string]interface{}{}
for _, arg := range sel.Arguments {
if variable, ok := arg.Value.(*common.Variable); ok {
if val, ok := variables[variable.Name]; ok {
f.Args[arg.Name.Name] = val
}
} else {
f.Args[arg.Name.Name] = arg.Value.Value(variables)
}
}
}
return f
})
f.Selections = append(f.Selections, sel.Selections...)
case *query.InlineFragment:
if !instanceOf(sel.On.Ident.Name, satisfies) {
continue
}
for _, childField := range collectFields(doc, sel.Selections, satisfies, variables, visited) {
f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
f.Selections = append(f.Selections, childField.Selections...)
}
case *query.FragmentSpread:
fragmentName := sel.Name.Name
if _, seen := visited[fragmentName]; seen {
continue
}
visited[fragmentName] = true
fragment := doc.Fragments.Get(fragmentName)
if fragment == nil {
// should never happen, validator has already run
panic(fmt.Errorf("missing fragment %s", fragmentName))
}
if !instanceOf(fragment.On.Ident.Name, satisfies) {
continue
}
for _, childField := range collectFields(doc, fragment.Selections, satisfies, variables, visited) {
f := getOrCreateField(&groupedFields, childField.Name, func() CollectedField { return childField })
f.Selections = append(f.Selections, childField.Selections...)
}
default:
panic(fmt.Errorf("unsupported %T", sel))
}
}
return groupedFields
}
type CollectedField struct {
Alias string
Name string
Args map[string]interface{}
Selections []query.Selection
}
func instanceOf(val string, satisfies []string) bool {
for _, s := range satisfies {
if val == s {
return true
}
}
return false
}
func getOrCreateField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField {
for i, cf := range *c {
if cf.Alias == name {
return &(*c)[i]
}
}
f := creator()
*c = append(*c, f)
return &(*c)[len(*c)-1]
}

View File

@ -1,45 +0,0 @@
package handler
import (
"context"
"time"
"github.com/vektah/gqlgen/graphql"
"github.com/vektah/gqlgen/neelance/query"
"github.com/vektah/gqlgen/neelance/schema"
)
type executableSchemaStub struct {
}
var _ graphql.ExecutableSchema = &executableSchemaStub{}
func (e *executableSchemaStub) Schema() *schema.Schema {
return schema.MustParse(`
schema { query: Query }
type Query { me: User! }
type User { name: String! }
`)
}
func (e *executableSchemaStub) Query(ctx context.Context, op *query.Operation) *graphql.Response {
return &graphql.Response{Data: []byte(`{"name":"test"}`)}
}
func (e *executableSchemaStub) Mutation(ctx context.Context, op *query.Operation) *graphql.Response {
return graphql.ErrorResponse(ctx, "mutations are not supported")
}
func (e *executableSchemaStub) Subscription(ctx context.Context, op *query.Operation) func() *graphql.Response {
return func() *graphql.Response {
time.Sleep(50 * time.Millisecond)
select {
case <-ctx.Done():
return nil
default:
return &graphql.Response{
Data: []byte(`{"name":"test"}`),
}
}
}
}

View File

@ -1,24 +0,0 @@
Copyright (c) 2016 Richard Musiol. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following disclaimer
in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View File

@ -1,32 +0,0 @@
package common
type Directive struct {
Name Ident
Args ArgumentList
}
func ParseDirectives(l *Lexer) DirectiveList {
var directives DirectiveList
for l.Peek() == '@' {
l.ConsumeToken('@')
d := &Directive{}
d.Name = l.ConsumeIdentWithLoc()
d.Name.Loc.Column--
if l.Peek() == '(' {
d.Args = ParseArguments(l)
}
directives = append(directives, d)
}
return directives
}
type DirectiveList []*Directive
func (l DirectiveList) Get(name string) *Directive {
for _, d := range l {
if d.Name.Name == name {
return d
}
}
return nil
}

View File

@ -1,122 +0,0 @@
package common
import (
"fmt"
"text/scanner"
"github.com/vektah/gqlgen/neelance/errors"
)
type syntaxError string
type Lexer struct {
sc *scanner.Scanner
next rune
descComment string
}
type Ident struct {
Name string
Loc errors.Location
}
func New(sc *scanner.Scanner) *Lexer {
l := &Lexer{sc: sc}
l.Consume()
return l
}
func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
defer func() {
if err := recover(); err != nil {
if err, ok := err.(syntaxError); ok {
errRes = errors.Errorf("syntax error: %s", err)
errRes.Locations = []errors.Location{l.Location()}
return
}
panic(err)
}
}()
f()
return
}
func (l *Lexer) Peek() rune {
return l.next
}
func (l *Lexer) Consume() {
l.descComment = ""
for {
l.next = l.sc.Scan()
if l.next == ',' {
continue
}
if l.next == '#' {
if l.sc.Peek() == ' ' {
l.sc.Next()
}
if l.descComment != "" {
l.descComment += "\n"
}
for {
next := l.sc.Next()
if next == '\n' || next == scanner.EOF {
break
}
l.descComment += string(next)
}
continue
}
break
}
}
func (l *Lexer) ConsumeIdent() string {
name := l.sc.TokenText()
l.ConsumeToken(scanner.Ident)
return name
}
func (l *Lexer) ConsumeIdentWithLoc() Ident {
loc := l.Location()
name := l.sc.TokenText()
l.ConsumeToken(scanner.Ident)
return Ident{name, loc}
}
func (l *Lexer) ConsumeKeyword(keyword string) {
if l.next != scanner.Ident || l.sc.TokenText() != keyword {
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
}
l.Consume()
}
func (l *Lexer) ConsumeLiteral() *BasicLit {
lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()}
l.Consume()
return lit
}
func (l *Lexer) ConsumeToken(expected rune) {
if l.next != expected {
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
}
l.Consume()
}
func (l *Lexer) DescComment() string {
return l.descComment
}
func (l *Lexer) SyntaxError(message string) {
panic(syntaxError(message))
}
func (l *Lexer) Location() errors.Location {
return errors.Location{
Line: l.sc.Line,
Column: l.sc.Column,
}
}

View File

@ -1,206 +0,0 @@
package common
import (
"strconv"
"strings"
"text/scanner"
"github.com/vektah/gqlgen/neelance/errors"
)
type Literal interface {
Value(vars map[string]interface{}) interface{}
String() string
Location() errors.Location
}
type BasicLit struct {
Type rune
Text string
Loc errors.Location
}
func (lit *BasicLit) Value(vars map[string]interface{}) interface{} {
switch lit.Type {
case scanner.Int:
value, err := strconv.ParseInt(lit.Text, 10, 64)
if err != nil {
panic(err)
}
return int(value)
case scanner.Float:
value, err := strconv.ParseFloat(lit.Text, 64)
if err != nil {
panic(err)
}
return value
case scanner.String:
value, err := strconv.Unquote(lit.Text)
if err != nil {
panic(err)
}
return value
case scanner.Ident:
switch lit.Text {
case "true":
return true
case "false":
return false
default:
return lit.Text
}
default:
panic("invalid literal")
}
}
func (lit *BasicLit) String() string {
return lit.Text
}
func (lit *BasicLit) Location() errors.Location {
return lit.Loc
}
type ListLit struct {
Entries []Literal
Loc errors.Location
}
func (lit *ListLit) Value(vars map[string]interface{}) interface{} {
entries := make([]interface{}, len(lit.Entries))
for i, entry := range lit.Entries {
entries[i] = entry.Value(vars)
}
return entries
}
func (lit *ListLit) String() string {
entries := make([]string, len(lit.Entries))
for i, entry := range lit.Entries {
entries[i] = entry.String()
}
return "[" + strings.Join(entries, ", ") + "]"
}
func (lit *ListLit) Location() errors.Location {
return lit.Loc
}
type ObjectLit struct {
Fields []*ObjectLitField
Loc errors.Location
}
type ObjectLitField struct {
Name Ident
Value Literal
}
func (lit *ObjectLit) Value(vars map[string]interface{}) interface{} {
fields := make(map[string]interface{}, len(lit.Fields))
for _, f := range lit.Fields {
fields[f.Name.Name] = f.Value.Value(vars)
}
return fields
}
func (lit *ObjectLit) String() string {
entries := make([]string, 0, len(lit.Fields))
for _, f := range lit.Fields {
entries = append(entries, f.Name.Name+": "+f.Value.String())
}
return "{" + strings.Join(entries, ", ") + "}"
}
func (lit *ObjectLit) Location() errors.Location {
return lit.Loc
}
type NullLit struct {
Loc errors.Location
}
func (lit *NullLit) Value(vars map[string]interface{}) interface{} {
return nil
}
func (lit *NullLit) String() string {
return "null"
}
func (lit *NullLit) Location() errors.Location {
return lit.Loc
}
type Variable struct {
Name string
Loc errors.Location
}
func (v Variable) Value(vars map[string]interface{}) interface{} {
return vars[v.Name]
}
func (v Variable) String() string {
return "$" + v.Name
}
func (v *Variable) Location() errors.Location {
return v.Loc
}
func ParseLiteral(l *Lexer, constOnly bool) Literal {
loc := l.Location()
switch l.Peek() {
case '$':
if constOnly {
l.SyntaxError("variable not allowed")
panic("unreachable")
}
l.ConsumeToken('$')
return &Variable{l.ConsumeIdent(), loc}
case scanner.Int, scanner.Float, scanner.String, scanner.Ident:
lit := l.ConsumeLiteral()
if lit.Type == scanner.Ident && lit.Text == "null" {
return &NullLit{loc}
}
lit.Loc = loc
return lit
case '-':
l.ConsumeToken('-')
lit := l.ConsumeLiteral()
lit.Text = "-" + lit.Text
lit.Loc = loc
return lit
case '[':
l.ConsumeToken('[')
var list []Literal
for l.Peek() != ']' {
list = append(list, ParseLiteral(l, constOnly))
}
l.ConsumeToken(']')
return &ListLit{list, loc}
case '{':
l.ConsumeToken('{')
var fields []*ObjectLitField
for l.Peek() != '}' {
name := l.ConsumeIdentWithLoc()
l.ConsumeToken(':')
value := ParseLiteral(l, constOnly)
fields = append(fields, &ObjectLitField{name, value})
}
l.ConsumeToken('}')
return &ObjectLit{fields, loc}
default:
l.SyntaxError("invalid value")
panic("unreachable")
}
}

View File

@ -1,80 +0,0 @@
package common
import (
"github.com/vektah/gqlgen/neelance/errors"
)
type Type interface {
Kind() string
String() string
}
type List struct {
OfType Type
}
type NonNull struct {
OfType Type
}
type TypeName struct {
Ident
}
func (*List) Kind() string { return "LIST" }
func (*NonNull) Kind() string { return "NON_NULL" }
func (*TypeName) Kind() string { panic("TypeName needs to be resolved to actual type") }
func (t *List) String() string { return "[" + t.OfType.String() + "]" }
func (t *NonNull) String() string { return t.OfType.String() + "!" }
func (*TypeName) String() string { panic("TypeName needs to be resolved to actual type") }
func ParseType(l *Lexer) Type {
t := parseNullType(l)
if l.Peek() == '!' {
l.ConsumeToken('!')
return &NonNull{OfType: t}
}
return t
}
func parseNullType(l *Lexer) Type {
if l.Peek() == '[' {
l.ConsumeToken('[')
ofType := ParseType(l)
l.ConsumeToken(']')
return &List{OfType: ofType}
}
return &TypeName{Ident: l.ConsumeIdentWithLoc()}
}
type Resolver func(name string) Type
func ResolveType(t Type, resolver Resolver) (Type, *errors.QueryError) {
switch t := t.(type) {
case *List:
ofType, err := ResolveType(t.OfType, resolver)
if err != nil {
return nil, err
}
return &List{OfType: ofType}, nil
case *NonNull:
ofType, err := ResolveType(t.OfType, resolver)
if err != nil {
return nil, err
}
return &NonNull{OfType: ofType}, nil
case *TypeName:
refT := resolver(t.Name)
if refT == nil {
err := errors.Errorf("Unknown type %q.", t.Name)
err.Rule = "KnownTypeNames"
err.Locations = []errors.Location{t.Loc}
return nil, err
}
return refT, nil
default:
return t, nil
}
}

Some files were not shown because too many files have changed in this diff Show More