diff --git a/parser/parser.go b/parser/parser.go index ef03c414..bfcf7ea4 100644 --- a/parser/parser.go +++ b/parser/parser.go @@ -1,6 +1,7 @@ package parser import ( + "fmt" "strconv" "github.com/vektah/gqlparser/v2/ast" @@ -20,6 +21,13 @@ type parser struct { comment *ast.CommentGroup commentConsuming bool + + tokenCount int + maxTokenLimit int +} + +func (p *parser) SetMaxTokenLimit(maxToken int) { + p.maxTokenLimit = maxToken } func (p *parser) consumeComment() (*ast.Comment, bool) { @@ -95,6 +103,12 @@ func (p *parser) next() lexer.Token { if p.err != nil { return p.prev } + // Increment the token count before reading the next token + p.tokenCount++ + if p.maxTokenLimit != 0 && p.tokenCount > p.maxTokenLimit { + p.err = fmt.Errorf("exceeded token limit of %d", p.maxTokenLimit) + return p.prev + } if p.peeked { p.peeked = false p.comment = nil diff --git a/parser/parser_test.go b/parser/parser_test.go index 16eb0308..2235901e 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -166,5 +166,8 @@ func TestParserUtils(t *testing.T) { } func newParser(input string) parser { - return parser{lexer: lexer.New(&ast.Source{Input: input, Name: "input.graphql"})} + return parser{ + lexer: lexer.New(&ast.Source{Input: input, Name: "input.graphql"}), + maxTokenLimit: 15000, // 15000 is the default value + } } diff --git a/parser/query.go b/parser/query.go index a7840b27..eb2a8e81 100644 --- a/parser/query.go +++ b/parser/query.go @@ -2,14 +2,14 @@ package parser import ( "github.com/vektah/gqlparser/v2/lexer" - //nolint:revive . "github.com/vektah/gqlparser/v2/ast" ) func ParseQuery(source *Source) (*QueryDocument, error) { p := parser{ - lexer: lexer.New(source), + lexer: lexer.New(source), + maxTokenLimit: 0, // 0 is the default value } return p.parseQueryDocument(), p.err } diff --git a/parser/schema.go b/parser/schema.go index 9b13d0ca..f0121734 100644 --- a/parser/schema.go +++ b/parser/schema.go @@ -20,7 +20,8 @@ func ParseSchemas(inputs ...*Source) (*SchemaDocument, error) { func ParseSchema(source *Source) (*SchemaDocument, error) { p := parser{ - lexer: lexer.New(source), + lexer: lexer.New(source), + maxTokenLimit: 15000, // default value } sd, err := p.parseSchemaDocument(), p.err if err != nil {