Passed
Push — master ( 496048...563bb3 )
by Tolga
01:01 queued 13s
created

parser.*Parser.parsePartialStatement   A

Complexity

Conditions 5

Size

Total Lines 10
Code Lines 10

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 5
eloc 10
nop 1
dl 0
loc 10
rs 9.3333
c 0
b 0
f 0
1
package parser
2
3
import (
4
	"errors"
5
	"fmt"
6
	"strings"
7
8
	"github.com/Permify/permify/pkg/dsl/ast"
9
	"github.com/Permify/permify/pkg/dsl/lexer"
10
	"github.com/Permify/permify/pkg/dsl/token"
11
	"github.com/Permify/permify/pkg/dsl/utils"
12
)
13
14
const (
15
	// iota is a special identifier that is automatically set to 0 in this case, and increments by 1 for each subsequent constant declaration. By assigning the value to the blank identifier _, it is effectively ignored.
16
	_ int = iota
17
18
	// LOWEST precedence level for lowest precedence
19
	LOWEST
20
	// AND_OR_NOT precedence level for logical operators (AND, OR)
21
	AND_OR_NOT
22
)
23
24
var precedences = map[token.Type]int{ // a map that assigns precedence levels to different token types
25
	token.AND: AND_OR_NOT,
26
	token.OR:  AND_OR_NOT,
27
	token.NOT: AND_OR_NOT,
28
}
29
30
// Parser is a struct that contains information and functions related to parsing
31
type Parser struct {
32
	// a pointer to a Lexer object that will provide tokens for parsing
33
	l *lexer.Lexer
34
	// the current token being processed
35
	currentToken token.Token
36
	// the next token after currentToken
37
	peekToken token.Token
38
	// a slice of error messages that are generated during parsing
39
	errors []string
40
	// a map that associates prefix parsing functions with token types
41
	prefixParseFns map[token.Type]prefixParseFn
42
	// a map that associates infix parsing functions with token types
43
	infixParseFunc map[token.Type]infixParseFn
44
	// references to entities, rules, relations, attributes, and permissions
45
	references *ast.References
46
}
47
48
type (
49
	// a function that parses prefix expressions and returns an ast.Expression and error
50
	prefixParseFn func() (ast.Expression, error)
51
52
	// a function that parses infix expressions and returns an ast.Expression and error
53
	infixParseFn func(ast.Expression) (ast.Expression, error)
54
)
55
56
// NewParser creates a new Parser object with the given input string
57
func NewParser(str string) (p *Parser) {
58
	// initialize a new Parser object with the given input string and default values for other fields
59
	p = &Parser{
60
		l:          lexer.NewLexer(str), // create a new Lexer object with the input string
61
		errors:     []string{},          // initialize an empty slice of error messages
62
		references: ast.NewReferences(), // initialize an empty map for relational references
63
	}
64
65
	// register prefix parsing functions for token types IDENT and NOT
66
	p.prefixParseFns = make(map[token.Type]prefixParseFn)  // initialize an empty map for prefix parsing functions
67
	p.registerPrefix(token.IDENT, p.parseIdentifierOrCall) // associate the parseIdentifier function with the IDENT token type
68
69
	// register infix parsing functions for token types AND, OR, NOT
70
	p.infixParseFunc = make(map[token.Type]infixParseFn) // initialize an empty map for infix parsing functions
71
	p.registerInfix(token.AND, p.parseInfixExpression)   // associate the parseInfixExpression function with the AND token type
72
	p.registerInfix(token.OR, p.parseInfixExpression)    // associate the parseInfixExpression function with the OR token type
73
	p.registerInfix(token.NOT, p.parseInfixExpression)   // associate the parseInfixExpression function with the OR token type
74
75
	return p // return the newly created Parser object and no error
76
}
77
78
// next retrieves the next non-ignored token from the Parser's lexer and updates the Parser's currentToken and peekToken fields
79
func (p *Parser) next() {
80
	for {
81
		// retrieve the next token from the lexer
82
		peek := p.l.NextToken()
83
		// if the token is not an ignored token (e.g. whitespace or comments), update the currentToken and peekToken fields and exit the loop
84
		if !token.IsIgnores(peek.Type) {
85
			// set the currentToken field to the previous peekToken value
86
			p.currentToken = p.peekToken
87
			// set the peekToken field to the new peek value
88
			p.peekToken = peek
89
			// exit the loop
90
			break
91
		}
92
	}
93
}
94
95
// nextWithIgnores advances the parser's token stream by one position.
96
// It updates the currentToken and peekToken of the Parser.
97
func (p *Parser) nextWithIgnores() {
98
	// Get the next token in the lexers token stream and store it in the variable peek.
99
	peek := p.l.NextToken()
100
101
	// Update the currentToken with the value of peekToken.
102
	p.currentToken = p.peekToken
103
104
	// Update the peekToken with the value of peek (the new next token in the lexers stream).
105
	p.peekToken = peek
106
}
107
108
// currentTokenIs checks if the Parser's currentToken is any of the given token types
109
func (p *Parser) currentTokenIs(tokens ...token.Type) bool {
110
	// iterate through the given token types and check if any of them match the currentToken's type
111
	for _, t := range tokens {
112
		if p.currentToken.Type == t {
113
			// if a match is found, return true
114
			return true
115
		}
116
	}
117
	// if no match is found, return false
118
	return false
119
}
120
121
// peekTokenIs checks if the Parser's peekToken is any of the given token types
122
func (p *Parser) peekTokenIs(tokens ...token.Type) bool {
123
	// iterate through the given token types and check if any of them match the peekToken's type
124
	for _, t := range tokens {
125
		if p.peekToken.Type == t {
126
			// if a match is found, return true
127
			return true
128
		}
129
	}
130
	// if no match is found, return false
131
	return false
132
}
133
134
// Error returns an error if there are any errors in the Parser's errors slice
135
func (p *Parser) Error() error {
136
	// if there are no errors, return nil
137
	if len(p.errors) == 0 {
138
		return nil
139
	}
140
	// if there are errors, return the first error message in the errors slice as an error type
141
	return errors.New(p.errors[0])
142
}
143
144
// Parse reads and parses the input string and returns an AST representation of the schema, along with any errors encountered during parsing
145
func (p *Parser) Parse() (*ast.Schema, error) {
146
	// create a new Schema object to store the parsed statements
147
	schema := ast.NewSchema()
148
	schema.Statements = []ast.Statement{}
149
150
	// loop through the input string until the end is reached
151
	for !p.currentTokenIs(token.EOF) {
152
		// parse the next statement in the input string
153
		stmt, err := p.parseStatement()
154
		if err != nil {
155
			// if there was an error parsing the statement, return the error message
156
			return nil, p.Error()
157
		}
158
		if stmt != nil {
159
			// add the parsed statement to the schema's Statements field if it is not nil
160
			schema.Statements = append(schema.Statements, stmt)
161
		}
162
163
		// move to the next token in the input string
164
		p.next()
165
	}
166
167
	schema.SetReferences(p.references)
168
169
	// return the parsed schema object and nil to indicate that there were no errors
170
	return schema, nil
171
}
172
173
func (p *Parser) ParsePartial(entityName string) (ast.Statement, error) {
174
	for !p.currentTokenIs(token.EOF) {
175
		// parse the next statement in the input string
176
		stmt, err := p.parsePartialStatement(entityName)
177
		if err != nil {
178
			return nil, p.Error()
179
		}
180
		if stmt != nil {
181
			return stmt, nil
182
		}
183
		p.next()
184
	}
185
	return nil, errors.New("no valid statement found")
186
}
187
188
func (p *Parser) parsePartialStatement(entityName string) (ast.Statement, error) {
189
	switch p.currentToken.Type {
190
	case token.ATTRIBUTE:
191
		return p.parseAttributeStatement(entityName)
192
	case token.RELATION:
193
		return p.parseRelationStatement(entityName)
194
	case token.PERMISSION:
195
		return p.parsePermissionStatement(entityName)
196
	default:
197
		return nil, nil
198
	}
199
}
200
201
// parseStatement method parses the current statement based on its defined token types
202
func (p *Parser) parseStatement() (ast.Statement, error) {
203
	// switch on the currentToken's type to determine which type of statement to parse
204
	switch p.currentToken.Type {
205
	case token.ENTITY:
206
		// if the currentToken is ENTITY, parse an EntityStatement
207
		return p.parseEntityStatement()
208
	case token.RULE:
209
		// if the currentToken is RULE, parse a RuleStatement
210
		return p.parseRuleStatement()
211
	default:
212
		return nil, nil
213
	}
214
}
215
216
// parseEntityStatement method parses an ENTITY statement and returns an EntityStatement AST node
217
func (p *Parser) parseEntityStatement() (*ast.EntityStatement, error) {
218
	// create a new EntityStatement object and set its Entity field to the currentToken
219
	stmt := &ast.EntityStatement{Entity: p.currentToken}
220
	// expect the next token to be an identifier token, and set the EntityStatement's Name field to the identifier's value
221
	if !p.expectAndNext(token.IDENT) {
222
		return nil, p.Error()
223
	}
224
	stmt.Name = p.currentToken
225
226
	// add the entity reference to the Parser's entityReferences map
227
	err := p.references.AddEntityReference(stmt.Name.Literal)
228
	if err != nil {
229
		p.duplicationError(stmt.Name.Literal) // Generate an error message indicating a duplication error
230
		return nil, p.Error()
231
	}
232
233
	// expect the next token to be a left brace token, indicating the start of the entity's body
234
	if !p.expectAndNext(token.LCB) {
235
		return nil, p.Error()
236
	}
237
238
	// loop through the entity's body until a right brace token is encountered
239
	for !p.currentTokenIs(token.RCB) {
240
		// if the currentToken is EOF, raise an error and return nil for both the statement and error values
241
		if p.currentTokenIs(token.EOF) {
242
			p.currentError(token.RCB)
243
			return nil, p.Error()
244
		}
245
		// based on the currentToken's type, parse a RelationStatement or PermissionStatement and add it to the EntityStatement's corresponding field
246
		switch p.currentToken.Type {
247
		case token.RELATION:
248
			relation, err := p.parseRelationStatement(stmt.Name.Literal)
249
			if err != nil {
250
				return nil, p.Error()
251
			}
252
			stmt.RelationStatements = append(stmt.RelationStatements, relation)
253
		case token.ATTRIBUTE:
254
			attribute, err := p.parseAttributeStatement(stmt.Name.Literal)
255
			if err != nil {
256
				return nil, p.Error()
257
			}
258
			stmt.AttributeStatements = append(stmt.AttributeStatements, attribute)
259
		case token.PERMISSION:
260
			action, err := p.parsePermissionStatement(stmt.Name.Literal)
261
			if err != nil {
262
				return nil, p.Error()
263
			}
264
			stmt.PermissionStatements = append(stmt.PermissionStatements, action)
265
		default:
266
			// if the currentToken is not recognized, check if it is a newline, left brace, or right brace token, and skip it if it is
267
			if !p.currentTokenIs(token.NEWLINE) && !p.currentTokenIs(token.LCB) && !p.currentTokenIs(token.RCB) {
268
				// if the currentToken is not recognized and not a newline, left brace, or right brace token, raise an error and return nil for both the statement and error values
269
				p.currentError(token.RELATION, token.PERMISSION, token.ATTRIBUTE)
270
				return nil, p.Error()
271
			}
272
		}
273
		// move to the next token in the input string
274
		p.next()
275
	}
276
277
	// return the parsed EntityStatement and nil for the error value
278
	return stmt, nil
279
}
280
281
// parseRuleStatement is responsible for parsing a rule statement in the form:
282
//
283
//	rule name(typ1 string, typ2 boolean) {
284
//	    EXPRESSION
285
//	}
286
//
287
// This method assumes the current token points to the 'rule' token when it is called.
288
func (p *Parser) parseRuleStatement() (*ast.RuleStatement, error) {
289
	// Create a new RuleStatement
290
	stmt := &ast.RuleStatement{Rule: p.currentToken}
291
292
	// Expect the next token to be an identifier (the name of the rule).
293
	// If it's not an identifier, return an error.
294
	if !p.expectAndNext(token.IDENT) {
295
		return nil, p.Error()
296
	}
297
	stmt.Name = p.currentToken
298
299
	// Expect the next token to be a left parenthesis '(' starting the argument list.
300
	if !p.expectAndNext(token.LP) {
301
		return nil, p.Error()
302
	}
303
304
	arguments := map[token.Token]ast.AttributeTypeStatement{}
305
	args := map[string]string{}
306
307
	// Loop over the tokens until a right parenthesis ')' is encountered.
308
	// In each iteration, two tokens are processed: an identifier (arg name) and its type.
309
	for !p.peekTokenIs(token.RP) {
310
		// Expect the first token to be the parameter's identifier.
311
		if !p.expectAndNext(token.IDENT) {
312
			return nil, p.Error()
313
		}
314
		argument := p.currentToken
315
		arg := p.currentToken.Literal
316
317
		// Expect the second token to be the parameter's type.
318
		if !p.expectAndNext(token.IDENT) {
319
			return nil, p.Error()
320
		}
321
322
		if p.peekTokenIs(token.LSB) { // Check if the next token is '['
323
			arguments[argument] = ast.AttributeTypeStatement{
324
				Type:    p.currentToken,
325
				IsArray: true, // Marking the type as an array
326
			}
327
			args[arg] = p.currentToken.Literal + "[]" // Store the argument type as string with "[]" suffix
328
			p.next()                                  // Move to the '[' token
329
			if !p.expectAndNext(token.RSB) {          // Expect and move to the ']' token
330
				return nil, p.Error()
331
			}
332
		} else {
333
			arguments[argument] = ast.AttributeTypeStatement{
334
				Type:    p.currentToken,
335
				IsArray: false, // Marking the type as not an array
336
			}
337
			args[arg] = p.currentToken.Literal // Store the regular argument type
338
		}
339
340
		// If the next token is a comma, there are more parameters to parse.
341
		// Continue to the next iteration.
342
		if p.peekTokenIs(token.COMMA) {
343
			p.next()
344
			continue
345
		} else if !p.peekTokenIs(token.RP) {
346
			// If the next token is not a comma, it must be a closing parenthesis.
347
			// If it's not, return an error.
348
			return nil, p.Error()
349
		}
350
	}
351
352
	// Save parsed arguments to the statement
353
	stmt.Arguments = arguments
354
355
	// Consume the right parenthesis.
356
	p.next()
357
358
	// Expect the next token to be a left curly bracket '{' starting the body.
359
	if !p.expectAndNext(token.LCB) {
360
		return nil, p.Error()
361
	}
362
363
	p.next()
364
365
	// Collect tokens for the body until a closing curly bracket '}' is encountered.
366
	var bodyTokens []token.Token
367
	for !p.peekTokenIs(token.RCB) {
368
		// If there's no closing bracket, return an error.
369
		if p.peekTokenIs(token.EOF) {
370
			return nil, p.Error()
371
		}
372
373
		bodyTokens = append(bodyTokens, p.currentToken)
374
		p.nextWithIgnores()
375
	}
376
377
	// Combine all the body tokens into a single string
378
	var bodyStr strings.Builder
379
	for _, t := range bodyTokens {
380
		bodyStr.WriteString(t.Literal)
381
	}
382
	stmt.Expression = bodyStr.String()
383
384
	// Expect and consume the closing curly bracket '}'.
385
	if !p.expectAndNext(token.RCB) {
386
		return nil, p.Error()
387
	}
388
389
	// Register the parsed rule in the parser's references.
390
	err := p.references.AddRuleReference(stmt.Name.Literal, args)
391
	if err != nil {
392
		// If there's an error (e.g., a duplicate rule), return an error.
393
		p.duplicationError(stmt.Name.Literal)
394
		return nil, p.Error()
395
	}
396
397
	// Return the successfully parsed RuleStatement.
398
	return stmt, nil
399
}
400
401
// parseRelationStatement method parses a RELATION statement and returns a RelationStatement AST node
402
func (p *Parser) parseAttributeStatement(entityName string) (*ast.AttributeStatement, error) {
403
	// create a new RelationStatement object and set its Relation field to the currentToken
404
	stmt := &ast.AttributeStatement{Attribute: p.currentToken}
405
406
	// expect the next token to be an identifier token, and set the RelationStatement's Name field to the identifier's value
407
	if !p.expectAndNext(token.IDENT) {
408
		return nil, p.Error()
409
	}
410
	stmt.Name = p.currentToken
411
412
	if !p.expectAndNext(token.IDENT) {
413
		return nil, p.Error()
414
	}
415
416
	atstmt := ast.AttributeTypeStatement{Type: p.currentToken}
417
	atstmt.IsArray = false
418
419
	if p.peekTokenIs(token.LSB) {
420
		p.next()
421
		if !p.expectAndNext(token.RSB) {
422
			return nil, p.Error()
423
		}
424
		atstmt.IsArray = true
425
	}
426
427
	stmt.AttributeType = atstmt
428
429
	key := utils.Key(entityName, stmt.Name.Literal)
430
	// add the relation reference to the Parser's relationReferences and relationalReferences maps
431
	err := p.references.AddAttributeReferences(key, atstmt)
432
	if err != nil {
433
		p.duplicationError(key) // Generate an error message indicating a duplication error
434
		return nil, p.Error()
435
	}
436
437
	// return the parsed RelationStatement and nil for the error value
438
	return stmt, nil
439
}
440
441
// parseRelationStatement method parses a RELATION statement and returns a RelationStatement AST node
442
func (p *Parser) parseRelationStatement(entityName string) (*ast.RelationStatement, error) {
443
	// create a new RelationStatement object and set its Relation field to the currentToken
444
	stmt := &ast.RelationStatement{Relation: p.currentToken}
445
446
	// expect the next token to be an identifier token, and set the RelationStatement's Name field to the identifier's value
447
	if !p.expectAndNext(token.IDENT) {
448
		return nil, p.Error()
449
	}
450
	stmt.Name = p.currentToken
451
	relationName := stmt.Name.Literal
452
453
	// expect the next token to be a SIGN token, indicating the start of the relation type(s)
454
	if !p.expect(token.SIGN) {
455
		return nil, p.Error()
456
	}
457
458
	// loop through the relation types until no more SIGN tokens are encountered
459
	for p.peekTokenIs(token.SIGN) {
460
		// parse a RelationTypeStatement and append it to the RelationStatement's RelationTypes field
461
		relationStatement, err := p.parseRelationTypeStatement()
462
		if err != nil {
463
			return nil, p.Error()
464
		}
465
		stmt.RelationTypes = append(stmt.RelationTypes, *relationStatement)
466
	}
467
468
	key := utils.Key(entityName, relationName)
469
470
	// add the relation reference to the Parser's relationReferences and relationalReferences maps
471
	err := p.references.AddRelationReferences(key, stmt.RelationTypes)
472
	if err != nil {
473
		p.duplicationError(key) // Generate an error message indicating a duplication error
474
		return nil, p.Error()
475
	}
476
477
	// return the parsed RelationStatement and nil for the error value
478
	return stmt, nil
479
}
480
481
// parseRelationTypeStatement method parses a single relation type within a RELATION statement and returns a RelationTypeStatement AST node
482
func (p *Parser) parseRelationTypeStatement() (*ast.RelationTypeStatement, error) {
483
	// expect the currentToken to be a SIGN token, indicating the start of the relation type
484
	if !p.expectAndNext(token.SIGN) {
485
		return nil, p.Error()
486
	}
487
	// create a new RelationTypeStatement object and set its Sign field to the SIGN token
488
	stmt := &ast.RelationTypeStatement{Sign: p.currentToken}
489
490
	// expect the next token to be an identifier token, and set the RelationTypeStatement's Type field to the identifier's value
491
	if !p.expectAndNext(token.IDENT) {
492
		return nil, p.Error()
493
	}
494
	stmt.Type = p.currentToken
495
496
	// if the next token is a HASH token, indicating that a specific relation within the relation type is being referenced, parse it and set the RelationTypeStatement's Relation field to the identifier's value
497
	if p.peekTokenIs(token.HASH) {
498
		p.next()
499
		if !p.expectAndNext(token.IDENT) {
500
			return nil, p.Error()
501
		}
502
		stmt.Relation = p.currentToken
503
	}
504
505
	// return the parsed RelationTypeStatement and nil for the error value
506
	return stmt, nil
507
}
508
509
// parsePermissionStatement method parses an PERMISSION statement and returns an PermissionStatement AST node
510
func (p *Parser) parsePermissionStatement(entityName string) (ast.Statement, error) {
511
	// create a new PermissionStatement object and set its Permission field to the currentToken
512
	stmt := &ast.PermissionStatement{Permission: p.currentToken}
513
514
	// expect the next token to be an identifier token, and set the PermissionStatement's Name field to the identifier's value
515
	if !p.expectAndNext(token.IDENT) {
516
		return nil, p.Error()
517
	}
518
	stmt.Name = p.currentToken
519
520
	key := utils.Key(entityName, stmt.Name.Literal)
521
	// add the action reference to the Parser's actionReferences and relationalReferences maps
522
	err := p.references.AddPermissionReference(key)
523
	if err != nil {
524
		p.duplicationError(key) // Generate an error message indicating a duplication error
525
		return nil, p.Error()
526
	}
527
528
	// expect the next token to be an ASSIGN token, indicating the start of the expression to be assigned to the action
529
	if !p.expectAndNext(token.ASSIGN) {
530
		return nil, p.Error()
531
	}
532
533
	p.next()
534
535
	// parse the expression statement and set it as the PermissionStatement's ExpressionStatement field
536
	ex, err := p.parseExpressionStatement()
537
	if err != nil {
538
		return nil, p.Error()
539
	}
540
	stmt.ExpressionStatement = ex
541
542
	// return the parsed PermissionStatement and nil for the error value
543
	return stmt, nil
544
}
545
546
// parseExpressionStatement method parses an expression statement and returns an ExpressionStatement AST node
547
func (p *Parser) parseExpressionStatement() (*ast.ExpressionStatement, error) {
548
	// create a new ExpressionStatement object
549
	stmt := &ast.ExpressionStatement{}
550
	var err error
551
	// parse the expression using the lowest precedence value as the initial precedence level
552
	stmt.Expression, err = p.parseExpression(LOWEST)
553
	if err != nil {
554
		return nil, p.Error()
555
	}
556
557
	// return the parsed ExpressionStatement and nil for the error value
558
	return stmt, nil
559
}
560
561
// expectAndNext method checks if the next token is of the expected type and advances the lexer to the next token if it is. It returns true if the next token is of the expected type, and false otherwise.
562
func (p *Parser) expectAndNext(t token.Type) bool {
563
	// if the next token is of the expected type, advance the lexer to the next token and return true
564
	if p.peekTokenIs(t) {
565
		p.next()
566
		return true
567
	}
568
	// otherwise, generate an error message indicating that the expected token type was not found and return false
569
	p.peekError(t)
570
	return false
571
}
572
573
// expect method checks if the next token is of the expected type, without advancing the lexer. It returns true if the next token is of the expected type, and false otherwise.
574
func (p *Parser) expect(t token.Type) bool {
575
	// if the next token is of the expected type, return true
576
	if p.peekTokenIs(t) {
577
		return true
578
	}
579
	// otherwise, generate an error message indicating that the expected token type was not found and return false
580
	p.peekError(t)
581
	return false
582
}
583
584
// parseExpression method parses an expression with a given precedence level and returns the parsed expression as an AST node. It takes an integer value indicating the precedence level.
585
func (p *Parser) parseExpression(precedence int) (ast.Expression, error) {
586
	var exp ast.Expression
587
	var err error
588
589
	if p.currentTokenIs(token.LP) {
590
		p.next() // Consume the left parenthesis.
591
		exp, err = p.parseExpression(LOWEST)
592
		if err != nil {
593
			return nil, err
594
		}
595
596
		if !p.expect(token.RP) {
597
			return nil, p.Error()
598
		}
599
		p.next() // Consume the right parenthesis.
600
	} else {
601
		// get the prefix parsing function for the current token type
602
		prefix := p.prefixParseFns[p.currentToken.Type]
603
		if prefix == nil {
604
			p.noPrefixParseFnError(p.currentToken.Type)
605
			return nil, p.Error()
606
		}
607
608
		// parse the prefix expression
609
		exp, err = prefix()
610
		if err != nil {
611
			return nil, p.Error()
612
		}
613
	}
614
615
	// continue parsing the expression while the next token has a higher precedence level than the current precedence level
616
	for !p.peekTokenIs(token.NEWLINE) && precedence < p.peekPrecedence() {
617
		// get the infix parsing function for the next token type
618
		infix := p.infixParseFunc[p.peekToken.Type]
619
		if infix == nil {
620
			return exp, nil
621
		}
622
		p.next()
623
		// parse the infix expression with the current expression as its left-hand side
624
		exp, err = infix(exp)
625
		if err != nil {
626
			return nil, p.Error()
627
		}
628
	}
629
630
	// return the parsed expression and nil for the error value
631
	return exp, nil
632
}
633
634
// parseInfixExpression parses an infix expression that has a left operand and an operator followed by
635
// a right operand, such as "a or b" or "x and y".
636
// It takes the left operand as an argument, constructs an InfixExpression with the current operator
637
// and left operand, and parses the right operand with a higher precedence to construct the final
638
// expression tree.
639
// It returns the resulting InfixExpression and any error encountered.
640
func (p *Parser) parseInfixExpression(left ast.Expression) (ast.Expression, error) {
641
	// Ensure the current token is a valid infix operator before proceeding.
642
	if !p.isInfixOperator(p.currentToken.Type) {
643
		p.currentError(token.AND, token.OR, token.NOT) // Replace with your actual valid infix token types
644
		return nil, p.Error()
645
	}
646
647
	// Create a new InfixExpression with the left operand and the current operator.
648
	expression := &ast.InfixExpression{
649
		Op:       p.currentToken,
650
		Left:     left,
651
		Operator: ast.Operator(p.currentToken.Literal),
652
	}
653
654
	// Get the precedence of the current operator and consume the operator token.
655
	precedence := p.currentPrecedence()
656
	p.next()
657
658
	// Parse the right operand with a higher precedence to construct the final expression tree.
659
	right, err := p.parseExpression(precedence)
660
	if err != nil {
661
		return nil, err
662
	}
663
664
	// Ensure the right operand is not nil.
665
	if right == nil {
666
		p.currentError(token.IDENT, token.LP) // Replace with your actual valid right operand token types
667
		return nil, p.Error()
668
	}
669
670
	// Set the right operand of the InfixExpression and return it.
671
	expression.Right = right
672
	return expression, nil
673
}
674
675
// parseIntegerLiteral parses an integer literal and returns the resulting IntegerLiteral expression.
676
func (p *Parser) isInfixOperator(tokenType token.Type) bool {
677
	return tokenType == token.AND || tokenType == token.OR || tokenType == token.NOT
678
}
679
680
// peekPrecedence returns the precedence of the next token in the input, if it is a known
681
// operator, or the lowest precedence otherwise.
682
func (p *Parser) peekPrecedence() int {
683
	if pr, ok := precedences[p.peekToken.Type]; ok {
684
		return pr
685
	}
686
	return LOWEST
687
}
688
689
// currentPrecedence returns the precedence of the current token in the input, if it is a known
690
// operator, or the lowest precedence otherwise.
691
func (p *Parser) currentPrecedence() int {
692
	if pr, ok := precedences[p.currentToken.Type]; ok {
693
		return pr
694
	}
695
	return LOWEST
696
}
697
698
func (p *Parser) parseIdentifierOrCall() (ast.Expression, error) {
699
	// Ensure the current token is a valid identifier before proceeding.
700
	if !p.currentTokenIs(token.IDENT) {
701
		return nil, fmt.Errorf("unexpected token type for identifier expression: %s", p.currentToken.Type)
702
	}
703
704
	if p.peekTokenIs(token.LP) {
705
		return p.parseCallExpression()
706
	}
707
708
	return p.parseIdentifierExpression()
709
}
710
711
// parseIdentifier parses an identifier expression that may consist of one or more dot-separated
712
// identifiers, such as "x", "foo.bar", or "a.b.c.d".
713
// It constructs a new Identifier expression with the first token as the prefix and subsequent
714
// tokens as identifiers, and returns the resulting expression and any error encountered.
715
func (p *Parser) parseIdentifierExpression() (ast.Expression, error) {
716
	// Ensure the current token is a valid identifier before proceeding.
717
	if !p.currentTokenIs(token.IDENT) {
718
		return nil, fmt.Errorf("unexpected token type for identifier expression: %s", p.currentToken.Type)
719
	}
720
721
	// Create a new Identifier expression with the first token as the prefix.
722
	ident := &ast.Identifier{Idents: []token.Token{p.currentToken}}
723
724
	// If the next token is a dot, consume it and continue parsing the next identifier.
725
	for p.peekTokenIs(token.DOT) {
726
		p.next() // Consume the dot token
727
728
		// Check if the next token after the dot is a valid identifier
729
		if !p.peekTokenIs(token.IDENT) {
730
			return nil, fmt.Errorf("expected identifier after dot, got %s", p.peekToken.Type)
731
		}
732
733
		p.next() // Consume the identifier token
734
		ident.Idents = append(ident.Idents, p.currentToken)
735
	}
736
737
	// Return the resulting Identifier expression.
738
	return ident, nil
739
}
740
741
// call_func(variable1, variable2)
742
func (p *Parser) parseCallExpression() (ast.Expression, error) {
743
	// Ensure the current token is a valid identifier before proceeding.
744
	if !p.currentTokenIs(token.IDENT) {
745
		return nil, fmt.Errorf("unexpected token type for identifier expression: %s", p.currentToken.Type)
746
	}
747
748
	// Create a new Identifier expression with the first token as the prefix.
749
	call := &ast.Call{Name: p.currentToken}
750
751
	if !p.expectAndNext(token.LP) {
752
		return nil, p.Error()
753
	}
754
755
	// Check if there are no arguments
756
	if p.peekTokenIs(token.RP) {
757
		p.next() // consume the RP token
758
		return call, nil
759
	}
760
761
	p.next()
762
763
	// Parse the first argument
764
	ident, err := p.parseIdentifierExpression()
765
	if err != nil {
766
		return nil, err
767
	}
768
769
	i, ok := ident.(*ast.Identifier)
770
	if !ok {
771
		return nil, fmt.Errorf("expected identifier, got %T", ident)
772
	}
773
	call.Arguments = append(call.Arguments, *i)
774
775
	// Parse remaining arguments
776
	for p.peekTokenIs(token.COMMA) {
777
		p.next()
778
779
		if !p.expectAndNext(token.IDENT) {
780
			return nil, p.Error()
781
		}
782
783
		ident, err = p.parseIdentifierExpression()
784
		if err != nil {
785
			return nil, err
786
		}
787
788
		i, ok = ident.(*ast.Identifier)
789
		if !ok {
790
			return nil, fmt.Errorf("expected identifier, got %T", ident)
791
		}
792
		call.Arguments = append(call.Arguments, *i)
793
	}
794
795
	if !p.expectAndNext(token.RP) {
796
		return nil, p.Error()
797
	}
798
799
	// Return the resulting Identifier expression.
800
	return call, nil
801
}
802
803
// registerPrefix safely registers a parsing function for a prefix token type in the parser's prefixParseFns map.
804
// It takes a token type and a prefix parsing function as arguments, and stores the function in the map
805
// under the given token type key.
806
func (p *Parser) registerPrefix(tokenType token.Type, fn prefixParseFn) {
807
	if fn == nil {
808
		p.duplicationError(fmt.Sprintf("registerPrefix: nil function for token type %s", tokenType))
809
		return
810
	}
811
812
	if _, exists := p.prefixParseFns[tokenType]; exists {
813
		p.duplicationError(fmt.Sprintf("registerPrefix: token type %s already registered", tokenType))
814
		return
815
	}
816
817
	p.prefixParseFns[tokenType] = fn
818
}
819
820
// registerInfix safely registers a parsing function for an infix token type in the parser's infixParseFunc map.
821
// It takes a token type and an infix parsing function as arguments, and stores the function in the map
822
// under the given token type key.
823
func (p *Parser) registerInfix(tokenType token.Type, fn infixParseFn) {
824
	if fn == nil {
825
		p.duplicationError(fmt.Sprintf("registerInfix: nil function for token type %s", tokenType))
826
		return
827
	}
828
829
	if _, exists := p.infixParseFunc[tokenType]; exists {
830
		p.duplicationError(fmt.Sprintf("registerInfix: token type %s already registered", tokenType))
831
		return
832
	}
833
834
	p.infixParseFunc[tokenType] = fn
835
}
836
837
// duplicationError adds an error message to the parser's error list indicating that a duplication was found.
838
// It takes a key string as an argument that is used to identify the source of the duplication in the input.
839
func (p *Parser) duplicationError(key string) {
840
	msg := fmt.Sprintf("%v:%v:duplication found for %s", p.l.GetLinePosition(), p.l.GetColumnPosition(), key)
841
	p.errors = append(p.errors, msg)
842
}
843
844
// noPrefixParseFnError adds an error message to the parser's error list indicating that no prefix parsing
845
// function was found for a given token type.
846
// It takes a token type as an argument that indicates the type of the token for which a parsing function is missing.
847
func (p *Parser) noPrefixParseFnError(t token.Type) {
848
	msg := fmt.Sprintf("%v:%v:no prefix parse function for %s found", p.l.GetLinePosition(), p.l.GetColumnPosition(), t)
849
	p.errors = append(p.errors, msg)
850
}
851
852
// peekError adds an error message to the parser's error list indicating that the next token in the input
853
// did not match the expected type(s).
854
// It takes one or more token types as arguments that indicate the expected types.
855
func (p *Parser) peekError(t ...token.Type) {
856
	expected := strings.Join(tokenTypesToStrings(t), ", ")
857
	msg := fmt.Sprintf("%v:%v:expected next token to be %s, got %s instead", p.l.GetLinePosition(), p.l.GetColumnPosition(), expected, p.peekToken.Type)
858
	p.errors = append(p.errors, msg)
859
}
860
861
// currentError adds an error message to the parser's error list indicating that the current token in the input
862
// did not match the expected type(s).
863
// It takes one or more token types as arguments that indicate the expected types.
864
func (p *Parser) currentError(t ...token.Type) {
865
	expected := strings.Join(tokenTypesToStrings(t), ", ")
866
	msg := fmt.Sprintf("%v:%v:expected token to be %s, got %s instead", p.l.GetLinePosition(),
867
		p.l.GetColumnPosition(), expected, p.currentToken.Type)
868
	p.errors = append(p.errors, msg)
869
}
870
871
// tokenTypesToStrings converts a slice of token types to a slice of their string representations.
872
func tokenTypesToStrings(types []token.Type) []string {
873
	strs := make([]string, len(types))
874
	for i, t := range types {
875
		strs[i] = t.String()
876
	}
877
	return strs
878
}
879