gbgen

package module
v3.3.24 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Apr 10, 2024 License: MIT Imports: 22 Imported by: 0

README

gqlgen-sqlboiler

We want developers to be able to build software faster using modern tools like GraphQL, Golang, React Native without depending on commercial providers like Firebase or AWS Amplify.

Our plugins generate type-safe code between gqlgen and sqlboiler models with support for unique id's across your whole database. We can automatically generate the implementation of queries and mutations like create, update, delete based on your graphql scheme and your sqlboiler models.

Tight coupling between your database and graphql scheme is required otherwise generation will be skipped. The advantage of this program is the most when you have a database already designed. You can write extra GrapQL resolvers, and override the generated functions so you can iterate fast.

Why gqlgen and sqlboiler

They go back to a schema first approach which we like. The generated code with these tools are the most efficient and fast in the Golang system (and probably outside of it too).

It's really amazing how fast a generated api with these techniques is!

Usage

Step 1

Create folder convert/convert.go with the following content: See example of convert.go

Step 2

run go mod tidy in convert/

Step 3

Make sure you have followed the prerequisites

Step 4
(cd convert && go run convert.go)

Features

  • schema.graphql based on sqlboiler structs
  • converts between sqlboiler and gqlgen
  • connections / edges / filtering / ordering / sorting
  • three-way-merge schema re-generate
  • converts between input models and sqlboiler
  • understands the difference between empty and null in update input
  • sqlboiler preloads from graphql context
  • foreign keys and relations
  • resolvers based on queries/mutations in schema
  • one-to-one relationships inside input types.
  • batch update/delete generation in resolvers.
  • enum support (only in graphql schema right now).
  • public errors in resolvers + logging via zerolog.
  • overriding convert functions
  • custom scope resolvers e.g userId, organizationId
  • Support gqlgen multiple .graphql files
  • Batch create helpers for sqlboiler and integration batch create inputs
Relay
Roadmap
  • Support automatic converts for custom schema objects
  • Support overriding resolvers
  • Support multiple resolvers (per schema)
  • Adding automatic database migrations and integration with web-ridge/dbifier
  • Crud of adding/removing relationships from one-to-many and many-to-many on edges
  • Support more relationships inside input types
  • Generate tests
  • Run automatic tests in Github CI/CD in https://github.com/web-ridge/gqlgen-sqlboiler-examples

Examples

Checkout our examples to see the generated schema.grapql, converts and resolvers.
web-ridge/gqlgen-sqlboiler-examples

Output example
func PostToGraphQL(m *models.Post) *graphql_models.Post {
	if m == nil {
		return nil
	}
	r := &graphql_models.Post{
		ID:      PostIDToGraphQL(m.ID),
		Content: m.Content,
	}
	if boilergql.UintIsFilled(m.UserID) {
		if m.R != nil && m.R.User != nil {
			r.User = UserToGraphQL(m.R.User)
		} else {
			r.User = UserWithUintID(m.UserID)
		}
	}
	if m.R != nil && m.R.Comments != nil {
		r.Comments = CommentsToGraphQL(m.R.Comments)
	}
	if m.R != nil && m.R.Images != nil {
		r.Images = ImagesToGraphQL(m.R.Images)
	}
	if m.R != nil && m.R.Likes != nil {
		r.Likes = LikesToGraphQL(m.R.Likes)
	}
	return r
}

Prerequisites

sqlboiler.yml
mysql:
  dbname: dbname
  host: localhost
  port: 8889
  user: root
  pass: root
  sslmode: "false"
  blacklist:
    - notifications
    - jobs
    - password_resets
    - migrations
mysqldump:
  column-statistics: 0
gqlgen.yml
schema:
  - *.graphql
exec:
  filename: models/fm/generated.go
  package: fm
model:
  filename: models/fm/generated_models.go
  package: fm
models:
  ConnectionBackwardPagination:
    model: github.com/web-ridge/utils-go/boilergql/v3.ConnectionBackwardPagination
  ConnectionForwardPagination:
    model: github.com/web-ridge/utils-go/boilergql/v3.ConnectionForwardPagination
  ConnectionPagination:
    model: github.com/web-ridge/utils-go/boilergql/v3.ConnectionPagination
  SortDirection:
    model: github.com/web-ridge/utils-go/boilergql/v3.SortDirection
resolver/resolver.go

package resolvers

import (
	"database/sql"
)

type Resolver struct {
	db        *sql.DB
	// you can add more here
}

func NewResolver(db *sql.DB) *Resolver {
	return &Resolver{
		db:        db,
        // you can add more here
	}
}

convert.go

Put something like the code below in file convert/convert.go

package main

import (
	"github.com/99designs/gqlgen/codegen/config"
	"github.com/rs/zerolog/log"
	gbgen "github.com/web-ridge/gqlgen-sqlboiler/v3"
	"github.com/web-ridge/gqlgen-sqlboiler/v3/cache"
	"github.com/web-ridge/gqlgen-sqlboiler/v3/structs"
	"os"
	"os/exec"
	"strings"
)

func main() {
	// change working directory to parent directory where all configs are located
	newDir, _ := os.Getwd()
	os.Chdir(strings.TrimSuffix(newDir, "/convert"))

	enableSoftDeletes := true
	boilerArgs := []string{"mysql", "--no-back-referencing", "--wipe", "-d"}
	if enableSoftDeletes {
		boilerArgs = append(boilerArgs, "--add-soft-deletes")
	}
	cmd := exec.Command("sqlboiler", boilerArgs...)

	err := cmd.Run()
	if err != nil {
		log.Fatal().Err(err).Str("command", cmd.String()).Msg("error generating dm models running sql-boiler")
	}

	output := structs.Config{
		Directory:   "helpers", // supports root or sub directories
		PackageName: "helpers",
	}
	backend := structs.Config{
		Directory:   "models/dm",
		PackageName: "dm",
	}
	frontend := structs.Config{
		Directory:   "models/fm",
		PackageName: "fm",
	}

	boilerCache := cache.InitializeBoilerCache(backend)

	generateSchema := true
	generatedSchema := !generateSchema
	if generateSchema {
		if err := gbgen.SchemaWrite(
			gbgen.SchemaConfig{
				BoilerCache:         boilerCache,
				Directives:          []string{"isAuthenticated"},
				SkipInputFields:     []string{"createdAt", "updatedAt", "deletedAt"},
				GenerateMutations:   true,
				GenerateBatchCreate: false,
				GenerateBatchDelete: false,
				GenerateBatchUpdate: false,
				HookShouldAddModel: func(model gbgen.SchemaModel) bool {
					if model.Name == "Config" {
						return false
					}
					return true
				},
				HookChangeFields: func(model *gbgen.SchemaModel, fields []*gbgen.SchemaField, parenType gbgen.ParentType) []*gbgen.SchemaField {
					//profile: UserPayload! @isAuthenticated

					return fields
				},
				HookChangeField: func(model *gbgen.SchemaModel, field *gbgen.SchemaField) {
					//"userId", "userOrganizationId",
					if field.Name == "userId" && model.Name != "UserUserOrganization" {
						field.SkipInput = true
					}
					if field.Name == "userOrganizationId" && model.Name != "UserUserOrganization" {
						field.SkipInput = true
					}
				},
			},
			"../frontend/schema.graphql",
			gbgen.SchemaGenerateConfig{
				MergeSchema: false,
			},
		); err != nil {
			log.Fatal().Err(err).Msg("error generating schema")
		}
		generatedSchema = true
	}
	if generatedSchema {

		cfg, err := config.LoadConfigFromDefaultLocations()
		if err != nil {
			log.Fatal().Err(err).Msg("error loading config")
		}

		data, err := gbgen.NewModelPlugin().GenerateCode(cfg)
		if err != nil {
			log.Fatal().Err(err).Msg("error generating graphql models using gqlgen")
		}

		modelCache := cache.InitializeModelCache(cfg, boilerCache, output, backend, frontend)

		if err := gbgen.NewConvertPlugin(
			modelCache,
			gbgen.ConvertPluginConfig{
				DatabaseDriver: gbgen.MySQL,
				//Searchable: {
				//	Company: {
				//		Column: dm.CompanyColumns.Name
				//	},
				//},
			},
		).GenerateCode(); err != nil {
			log.Fatal().Err(err).Msg("error while generating convert/filters")
		}

		if err := gbgen.NewResolverPlugin(
			config.ResolverConfig{
				Filename: "resolvers/all_generated_resolvers.go",
				Package:  "resolvers",
				Type:     "Resolver",
			},
			output,
			boilerCache,
			modelCache,
			gbgen.ResolverPluginConfig{

				EnableSoftDeletes: enableSoftDeletes,
				// Authorization scopes can be used to override e.g. userId, organizationId, tenantId
				// This will be resolved used the provided ScopeResolverName if the result of the AddTrigger=true
				// You would need this if you don't want to require these fields in your schema but you want to add them
				// to the db model.
				// If you do have these fields in your schema but want them authorized you could use a gqlgen directive
				AuthorizationScopes: []*gbgen.AuthorizationScope{},
				// 	{
				// 		ImportPath:        "github.com/my-repo/app/backend/auth",
				// 		ImportAlias:       "auth",
				// 		ScopeResolverName: "UserIDFromContext", // function which is called with the context of the resolver
				// 		BoilerColumnName:  "UserID",
				// 		AddHook: func(model *gbgen.BoilerModel, resolver *gbgen.Resolver, templateKey string) bool {
				// 			// fmt.Println(model.Name)
				// 			// fmt.Println(templateKey)
				// 			// templateKey contains a unique where the resolver tries to add something
				// 			// e.g.
				// 			// most of the time you can ignore this

				// 			// we want the delete call to work for every object we don't want to take in account te user-id here
				// 			if resolver.IsDelete {
				// 				return false
				// 			}

				// 			var addResolver bool
				// 			for _, field := range model.Fields {
				// 				if field.Name == "UserID" {
				// 					addResolver = true
				// 				}
				// 			}
				// 			return addResolver
				// 		},
				// 	},
				// 	{
				// 		ImportPath:        "github.com/my-repo/app/backend/auth",
				// 		ImportAlias:       "auth",
				// 		ScopeResolverName: "UserOrganizationIDFromContext", // function which is called with the context of the resolver
				// 		BoilerColumnName:  "UserOrganizationID",

				// 		AddHook: func(model *gbgen.BoilerModel, resolver *gbgen.Resolver, templateKey string) bool {
				// 			// fmt.Println(model.Name)
				// 			// fmt.Println(templateKey)
				// 			// templateKey contains a unique where the resolver tries to add something
				// 			// e.g.
				// 			// most of the time you can ignore this
				// 			var addResolver bool
				// 			for _, field := range model.Fields {
				// 				if field.Name == "UserOrganizationID" {
				// 					addResolver = true
				// 				}
				// 			}
				// 			return addResolver
				// 		},
				// 	},
				// },
			},
		).GenerateCode(data); err != nil {
			log.Fatal().Err(err).Msg("error while generating resolvers")
		}

	}
}

Overriding converts

Put a file in your helpers/ directory e.g. convert_override_user.go

package helpers

import (
	"github.com/../app/backend/graphql_models"
	"github.com/../app/backend/models"
)

// use same name as in one of the generated files to override
func UserCreateInputToBoiler(
	m *graphql_models.UserCreateInput,
) *models.User {
	if m == nil {
		return nil
	}

	originalConvert := originalUserCreateInputToBoiler(m)
	// e.g. bcrypt password
	return originalConvert
}

If you re-generate the original convert will get changed to originalUserCreateInputToBoiler which you can still use in your overridden convert.

Help us

We're the most happy with your time investments and/or pull request to improve this plugin. Feedback is also highly appreciated.

Documentation

Index

Constants

This section is empty.

Variables

View Source
var InputTypes = []string{"Create", "Update", "Delete"} //nolint:gochecknoglobals

Functions

func SchemaGet

func SchemaGet(
	config SchemaConfig,
) string

func SchemaWrite

func SchemaWrite(config SchemaConfig, outputFile string, generateOptions SchemaGenerateConfig) error

Types

type AuthorizationScope added in v3.1.0

type AuthorizationScope struct {
	ImportPath        string
	ImportAlias       string
	ScopeResolverName string
	BoilerColumnName  string
	AddHook           func(model *structs.BoilerModel, resolver *Resolver, templateKey string) bool
}

type ConvertPlugin

type ConvertPlugin struct {
	BoilerCache  *cache.BoilerCache
	ModelCache   *cache.ModelCache
	PluginConfig ConvertPluginConfig
	// contains filtered or unexported fields
}

func NewConvertPlugin

func NewConvertPlugin(modelCache *cache.ModelCache, pluginConfig ConvertPluginConfig) *ConvertPlugin

func (*ConvertPlugin) GenerateCode added in v3.3.10

func (m *ConvertPlugin) GenerateCode() error

type ConvertPluginConfig

type ConvertPluginConfig struct {
	DatabaseDriver DatabaseDriver
}

type ConvertTemplateData added in v3.3.10

type ConvertTemplateData struct {
	Backend      structs.Config
	Frontend     structs.Config
	PluginConfig ConvertPluginConfig
	PackageName  string
	Interfaces   []*structs.Interface
	Models       []*structs.Model
	Enums        []*structs.Enum
	Scalars      []string
}

func (ConvertTemplateData) Imports added in v3.3.10

func (t ConvertTemplateData) Imports() []Import

type DatabaseDriver added in v3.2.4

type DatabaseDriver string

DatabaseDriver defines which data syntax to use for some of the converts

const (
	// MySQL is the default
	MySQL DatabaseDriver = "mysql"
	// PostgreSQL is the default
	PostgreSQL DatabaseDriver = "postgres"
)

type File

type File struct {
	// These are separated because the type definition of the resolver object may live in a different file from the
	// resolver method implementations, for example when extending a type in a different graphql schema file
	Objects         []*codegen.Object
	Resolvers       []*Resolver
	Imports         []Import
	RemainingSource string
}

type Import

type Import struct {
	Alias      string
	ImportPath string
}

type ModelPlugin added in v3.3.10

type ModelPlugin struct {
}

func NewModelPlugin added in v3.3.10

func NewModelPlugin() *ModelPlugin

func (*ModelPlugin) GenerateCode added in v3.3.10

func (m *ModelPlugin) GenerateCode(cfg *config.Config) (*codegen.Data, error)

type ParentType added in v3.1.5

type ParentType string
const (
	ParentTypeNormal      ParentType = "Normal"
	ParentTypeWhere       ParentType = "Where"
	ParentTypeCreate      ParentType = "Create"
	ParentTypeUpdate      ParentType = "Update"
	ParentTypeBatchUpdate ParentType = "BatchUpdate"
	ParentTypeBatchCreate ParentType = "BatchCreate"
)

type Resolver

type Resolver struct {
	Object *codegen.Object
	Field  *codegen.Field

	Implementation            string
	IsSingle                  bool
	IsList                    bool
	IsListForward             bool
	IsListBackward            bool
	IsCreate                  bool
	IsUpdate                  bool
	IsDelete                  bool
	IsBatchCreate             bool
	IsBatchUpdate             bool
	IsBatchDelete             bool
	ResolveOrganizationID     bool // TODO: something more pluggable
	ResolveUserOrganizationID bool // TODO: something more pluggable
	ResolveUserID             bool // TODO: something more pluggable
	Model                     structs.Model
	InputModel                structs.Model
	BoilerWhiteList           string
	PublicErrorKey            string
	PublicErrorMessage        string
	SoftDeleteSuffix          string
}

type ResolverBuild

type ResolverBuild struct {
	*File
	HasRoot             bool
	PackageName         string
	ResolverType        string
	Models              []*structs.Model
	AuthorizationScopes []*AuthorizationScope
	TryHook             func(string) bool
}

func (*ResolverBuild) ShortResolverDeclaration added in v3.1.0

func (rb *ResolverBuild) ShortResolverDeclaration(r *Resolver) string

type ResolverPlugin

type ResolverPlugin struct {
	BoilerCache *cache.BoilerCache
	ModelCache  *cache.ModelCache
	// contains filtered or unexported fields
}

func NewResolverPlugin

func NewResolverPlugin(resolverConfig config.ResolverConfig, output structs.Config, boilerCache *cache.BoilerCache, modelCache *cache.ModelCache, resolverPluginConfig ResolverPluginConfig) *ResolverPlugin

func (*ResolverPlugin) GenerateCode

func (m *ResolverPlugin) GenerateCode(data *codegen.Data) error

type ResolverPluginConfig added in v3.1.0

type ResolverPluginConfig struct {
	EnableSoftDeletes   bool
	AuthorizationScopes []*AuthorizationScope
}

type SchemaConfig

type SchemaConfig struct {
	BoilerCache         *cache.BoilerCache
	Directives          []string
	SkipInputFields     []string
	GenerateBatchCreate bool
	GenerateMutations   bool
	GenerateBatchDelete bool
	GenerateBatchUpdate bool
	HookShouldAddModel  func(model SchemaModel) bool
	HookShouldAddField  func(model SchemaModel, field SchemaField) bool
	HookChangeField     func(model *SchemaModel, field *SchemaField)
	HookChangeFields    func(model *SchemaModel, fields []*SchemaField, parenType ParentType) []*SchemaField
	HookChangeModel     func(model *SchemaModel)
}

type SchemaField

type SchemaField struct {
	Name                 string
	Type                 string // String, ID, Integer
	InputWhereType       string
	InputCreateType      string
	InputUpdateType      string
	InputBatchUpdateType string
	InputBatchCreateType string
	BoilerField          *structs.BoilerField
	SkipInput            bool
	SkipWhere            bool
	SkipCreate           bool
	SkipUpdate           bool
	SkipBatchUpdate      bool
	SkipBatchCreate      bool
	InputDirectives      []string
	Directives           []string
}

func NewSchemaField added in v3.1.5

func NewSchemaField(name string, typ string, boilerField *structs.BoilerField) *SchemaField

func (*SchemaField) SetInputTypeForAllInputs added in v3.1.5

func (s *SchemaField) SetInputTypeForAllInputs(v string)

func (*SchemaField) SetSkipForAllInputs added in v3.1.5

func (s *SchemaField) SetSkipForAllInputs(v bool)

type SchemaGenerateConfig

type SchemaGenerateConfig struct {
	MergeSchema bool
}

type SchemaModel

type SchemaModel struct {
	Name   string
	IsView bool
	Fields []*SchemaField
}

type SimpleWriter

type SimpleWriter struct {
	// contains filtered or unexported fields
}

Directories

Path Synopsis

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL