parser.tokenTypesToStrings   A
last analyzed

Complexity

Conditions 2

Size

Total Lines 6
Code Lines 5

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 2
eloc 5
nop 1
dl 0
loc 6
rs 10
c 0
b 0
f 0
1
package parser
2
3
import (
4
	"errors"
5
	"fmt"
6
	"strings"
7
8
	"github.com/Permify/permify/pkg/dsl/ast"
9
	"github.com/Permify/permify/pkg/dsl/lexer"
10
	"github.com/Permify/permify/pkg/dsl/token"
11
	"github.com/Permify/permify/pkg/dsl/utils"
12
)
13
14
const (
15
	// iota is a special identifier that is automatically set to 0 in this case, and increments by 1 for each subsequent constant declaration. By assigning the value to the blank identifier _, it is effectively ignored.
16
	_ int = iota
17
18
	// LOWEST precedence level for lowest precedence
19
	LOWEST
20
	// AND_OR_NOT precedence level for logical operators (AND, OR)
21
	AND_OR_NOT
22
)
23
24
var precedences = map[token.Type]int{ // a map that assigns precedence levels to different token types
25
	token.AND: AND_OR_NOT,
26
	token.OR:  AND_OR_NOT,
27
	token.NOT: AND_OR_NOT,
28
}
29
30
// Parser is a struct that contains information and functions related to parsing
31
type Parser struct {
32
	// a pointer to a Lexer object that will provide tokens for parsing
33
	l *lexer.Lexer
34
	// the current token being processed
35
	currentToken token.Token
36
	// the previous token (token before currentToken) for lookahead parsing and multi-line expression handling
37
	previousToken token.Token // stores last processed token
38
	// the next token after currentToken
39
	peekToken token.Token
40
	// a slice of error messages that are generated during parsing
41
	errors []string
42
	// a map that associates prefix parsing functions with token types
43
	prefixParseFns map[token.Type]prefixParseFn
44
	// a map that associates infix parsing functions with token types
45
	infixParseFunc map[token.Type]infixParseFn
46
	// references to entities, rules, relations, attributes, and permissions
47
	references *ast.References
48
}
49
50
type (
51
	// a function that parses prefix expressions and returns an ast.Expression and error
52
	prefixParseFn func() (ast.Expression, error)
53
54
	// a function that parses infix expressions and returns an ast.Expression and error
55
	infixParseFn func(ast.Expression) (ast.Expression, error)
56
)
57
58
// NewParser creates a new Parser object with the given input string
59
func NewParser(str string) (p *Parser) {
60
	// initialize a new Parser object with the given input string and default values for other fields
61
	p = &Parser{
62
		l:          lexer.NewLexer(str), // create a new Lexer object with the input string
63
		errors:     []string{},          // initialize an empty slice of error messages
64
		references: ast.NewReferences(), // initialize an empty map for relational references
65
	}
66
67
	// register prefix parsing functions for token types IDENT and NOT
68
	p.prefixParseFns = make(map[token.Type]prefixParseFn)  // initialize an empty map for prefix parsing functions
69
	p.registerPrefix(token.IDENT, p.parseIdentifierOrCall) // associate the parseIdentifier function with the IDENT token type
70
71
	// register infix parsing functions for token types AND, OR, NOT
72
	p.infixParseFunc = make(map[token.Type]infixParseFn) // initialize an empty map for infix parsing functions
73
	p.registerInfix(token.AND, p.parseInfixExpression)   // associate the parseInfixExpression function with the AND token type
74
	p.registerInfix(token.OR, p.parseInfixExpression)    // associate the parseInfixExpression function with the OR token type
75
	p.registerInfix(token.NOT, p.parseInfixExpression)   // associate the parseInfixExpression function with the OR token type
76
77
	return p // return the newly created Parser object and no error
78
}
79
80
// next retrieves the next non-ignored token from the Parser's lexer and updates the Parser's currentToken and peekToken fields
81
func (p *Parser) next() {
82
	for {
83
		// retrieve the next token from the lexer
84
		peek := p.l.NextToken()
85
		// if the token is not an ignored token (e.g. whitespace or comments), update the currentToken and peekToken fields and exit the loop
86
		if !token.IsIgnores(peek.Type) {
87
			// store the current token as previous before advancing
88
			p.previousToken = p.currentToken // save current token for lookahead
89
			// set the currentToken field to the previous peekToken value
90
			p.currentToken = p.peekToken
91
			// set the peekToken field to the new peek value
92
			p.peekToken = peek
93
			// exit the loop
94
			break
95
		}
96
	}
97
}
98
99
// nextWithIgnores advances the parser's token stream by one position.
100
// It updates the currentToken and peekToken of the Parser.
101
func (p *Parser) nextWithIgnores() {
102
	// Get the next token in the lexers token stream and store it in the variable peek.
103
	peek := p.l.NextToken()
104
105
	// Update the currentToken with the value of peekToken.
106
	p.currentToken = p.peekToken
107
108
	// Update the peekToken with the value of peek (the new next token in the lexers stream).
109
	p.peekToken = peek
110
}
111
112
// currentTokenIs checks if the Parser's currentToken is any of the given token types
113
func (p *Parser) currentTokenIs(tokens ...token.Type) bool {
114
	// iterate through the given token types and check if any of them match the currentToken's type
115
	for _, t := range tokens {
116
		if p.currentToken.Type == t {
117
			// if a match is found, return true
118
			return true
119
		}
120
	}
121
	// if no match is found, return false
122
	return false
123
}
124
125
// previousTokenIs checks if the Parser's previousToken type matches any of the given types
126
func (p *Parser) previousTokenIs(tokens ...token.Type) bool { // Check if previous token matches any type
127
	for _, tokenType := range tokens { // Iterate through token types
128
		if p.previousToken.Type == tokenType { // Check for match
129
			return true // Match found
130
		} // Continue if no match
131
	} // All types checked
132
	return false // No match found
133
} // End previousTokenIs
134
// peekTokenIs checks if the Parser's peekToken is any of the given token types
135
func (p *Parser) peekTokenIs(tokens ...token.Type) bool {
136
	// iterate through the given token types and check if any of them match the peekToken's type
137
	for _, t := range tokens {
138
		if p.peekToken.Type == t {
139
			// if a match is found, return true
140
			return true
141
		}
142
	}
143
	// if no match is found, return false
144
	return false
145
}
146
147
// Error returns an error if there are any errors in the Parser's errors slice
148
func (p *Parser) Error() error {
149
	// if there are no errors, return nil
150
	if len(p.errors) == 0 {
151
		return nil
152
	}
153
	// if there are errors, return the first error message in the errors slice as an error type
154
	return errors.New(p.errors[0])
155
}
156
157
// Parse reads and parses the input string and returns an AST representation of the schema, along with any errors encountered during parsing
158
func (p *Parser) Parse() (*ast.Schema, error) {
159
	// create a new Schema object to store the parsed statements
160
	schema := ast.NewSchema()
161
	schema.Statements = []ast.Statement{}
162
163
	// loop through the input string until the end is reached
164
	for !p.currentTokenIs(token.EOF) {
165
		// parse the next statement in the input string
166
		stmt, err := p.parseStatement()
167
		if err != nil {
168
			// if there was an error parsing the statement, return the error message
169
			return nil, p.Error()
170
		}
171
		if stmt != nil {
172
			// add the parsed statement to the schema's Statements field if it is not nil
173
			schema.Statements = append(schema.Statements, stmt)
174
		}
175
176
		// move to the next token in the input string
177
		p.next()
178
	}
179
180
	schema.SetReferences(p.references)
181
182
	// return the parsed schema object and nil to indicate that there were no errors
183
	return schema, nil
184
}
185
186
func (p *Parser) ParsePartial(entityName string) (ast.Statement, error) {
187
	for !p.currentTokenIs(token.EOF) {
188
		// parse the next statement in the input string
189
		stmt, err := p.parsePartialStatement(entityName)
190
		if err != nil {
191
			return nil, p.Error()
192
		}
193
		if stmt != nil {
194
			return stmt, nil
195
		}
196
		p.next()
197
	}
198
	return nil, errors.New("no valid statement found")
199
}
200
201
func (p *Parser) parsePartialStatement(entityName string) (ast.Statement, error) {
202
	switch p.currentToken.Type {
203
	case token.ATTRIBUTE:
204
		return p.parseAttributeStatement(entityName)
205
	case token.RELATION:
206
		return p.parseRelationStatement(entityName)
207
	case token.PERMISSION:
208
		return p.parsePermissionStatement(entityName)
209
	default:
210
		return nil, nil
211
	}
212
}
213
214
// parseStatement method parses the current statement based on its defined token types
215
func (p *Parser) parseStatement() (ast.Statement, error) {
216
	// switch on the currentToken's type to determine which type of statement to parse
217
	switch p.currentToken.Type {
218
	case token.ENTITY:
219
		// if the currentToken is ENTITY, parse an EntityStatement
220
		return p.parseEntityStatement()
221
	case token.RULE:
222
		// if the currentToken is RULE, parse a RuleStatement
223
		return p.parseRuleStatement()
224
	default:
225
		return nil, nil
226
	}
227
}
228
229
// parseEntityStatement method parses an ENTITY statement and returns an EntityStatement AST node
230
func (p *Parser) parseEntityStatement() (*ast.EntityStatement, error) {
231
	// create a new EntityStatement object and set its Entity field to the currentToken
232
	stmt := &ast.EntityStatement{Entity: p.currentToken}
233
	// expect the next token to be an identifier token, and set the EntityStatement's Name field to the identifier's value
234
	if !p.expectAndNext(token.IDENT) {
235
		return nil, p.Error()
236
	}
237
	stmt.Name = p.currentToken
238
239
	// add the entity reference to the Parser's entityReferences map
240
	err := p.references.AddEntityReference(stmt.Name.Literal)
241
	if err != nil {
242
		p.duplicationError(stmt.Name.Literal) // Generate an error message indicating a duplication error
243
		return nil, p.Error()
244
	}
245
246
	// expect the next token to be a left brace token, indicating the start of the entity's body
247
	if !p.expectAndNext(token.LCB) {
248
		return nil, p.Error()
249
	}
250
251
	// loop through the entity's body until a right brace token is encountered
252
	for !p.currentTokenIs(token.RCB) {
253
		// if the currentToken is EOF, raise an error and return nil for both the statement and error values
254
		if p.currentTokenIs(token.EOF) {
255
			p.currentError(token.RCB)
256
			return nil, p.Error()
257
		}
258
		// based on the currentToken's type, parse a RelationStatement or PermissionStatement and add it to the EntityStatement's corresponding field
259
		switch p.currentToken.Type {
260
		case token.RELATION:
261
			relation, err := p.parseRelationStatement(stmt.Name.Literal)
262
			if err != nil {
263
				return nil, p.Error()
264
			}
265
			stmt.RelationStatements = append(stmt.RelationStatements, relation)
266
		case token.ATTRIBUTE:
267
			attribute, err := p.parseAttributeStatement(stmt.Name.Literal)
268
			if err != nil {
269
				return nil, p.Error()
270
			}
271
			stmt.AttributeStatements = append(stmt.AttributeStatements, attribute)
272
		case token.PERMISSION:
273
			action, err := p.parsePermissionStatement(stmt.Name.Literal)
274
			if err != nil {
275
				return nil, p.Error()
276
			}
277
			stmt.PermissionStatements = append(stmt.PermissionStatements, action)
278
		default:
279
			// if the currentToken is not recognized, check if it is a newline, left brace, or right brace token, and skip it if it is
280
			if !p.currentTokenIs(token.NEWLINE) && !p.currentTokenIs(token.LCB) && !p.currentTokenIs(token.RCB) {
281
				// if the currentToken is not recognized and not a newline, left brace, or right brace token, raise an error and return nil for both the statement and error values
282
				p.currentError(token.RELATION, token.PERMISSION, token.ATTRIBUTE)
283
				return nil, p.Error()
284
			}
285
		}
286
		// move to the next token in the input string
287
		p.next()
288
	}
289
290
	// return the parsed EntityStatement and nil for the error value
291
	return stmt, nil
292
}
293
294
// parseRuleStatement is responsible for parsing a rule statement in the form:
295
//
296
//	rule name(typ1 string, typ2 boolean) {
297
//	    EXPRESSION
298
//	}
299
//
300
// This method assumes the current token points to the 'rule' token when it is called.
301
func (p *Parser) parseRuleStatement() (*ast.RuleStatement, error) {
302
	// Create a new RuleStatement
303
	stmt := &ast.RuleStatement{Rule: p.currentToken}
304
305
	// Expect the next token to be an identifier (the name of the rule).
306
	// If it's not an identifier, return an error.
307
	if !p.expectAndNext(token.IDENT) {
308
		return nil, p.Error()
309
	}
310
	stmt.Name = p.currentToken
311
312
	// Expect the next token to be a left parenthesis '(' starting the argument list.
313
	if !p.expectAndNext(token.LP) {
314
		return nil, p.Error()
315
	}
316
317
	arguments := map[token.Token]ast.AttributeTypeStatement{}
318
	args := map[string]string{}
319
320
	// Loop over the tokens until a right parenthesis ')' is encountered.
321
	// In each iteration, two tokens are processed: an identifier (arg name) and its type.
322
	for !p.peekTokenIs(token.RP) {
323
		// Expect the first token to be the parameter's identifier.
324
		if !p.expectAndNext(token.IDENT) {
325
			return nil, p.Error()
326
		}
327
		argument := p.currentToken
328
		arg := p.currentToken.Literal
329
330
		// Expect the second token to be the parameter's type.
331
		if !p.expectAndNext(token.IDENT) {
332
			return nil, p.Error()
333
		}
334
335
		if p.peekTokenIs(token.LSB) { // Check if the next token is '['
336
			arguments[argument] = ast.AttributeTypeStatement{
337
				Type:    p.currentToken,
338
				IsArray: true, // Marking the type as an array
339
			}
340
			args[arg] = p.currentToken.Literal + "[]" // Store the argument type as string with "[]" suffix
341
			p.next()                                  // Move to the '[' token
342
			if !p.expectAndNext(token.RSB) {          // Expect and move to the ']' token
343
				return nil, p.Error()
344
			}
345
		} else {
346
			arguments[argument] = ast.AttributeTypeStatement{
347
				Type:    p.currentToken,
348
				IsArray: false, // Marking the type as not an array
349
			}
350
			args[arg] = p.currentToken.Literal // Store the regular argument type
351
		}
352
353
		// If the next token is a comma, there are more parameters to parse.
354
		// Continue to the next iteration.
355
		if p.peekTokenIs(token.COMMA) {
356
			p.next()
357
			continue
358
		} else if !p.peekTokenIs(token.RP) {
359
			// If the next token is not a comma, it must be a closing parenthesis.
360
			// If it's not, return an error.
361
			p.peekError(token.RP)
362
			return nil, p.Error()
363
		}
364
	}
365
366
	// Save parsed arguments to the statement
367
	stmt.Arguments = arguments
368
369
	// Consume the right parenthesis.
370
	p.next()
371
372
	// Expect the next token to be a left curly bracket '{' starting the body.
373
	if !p.expectAndNext(token.LCB) {
374
		return nil, p.Error()
375
	}
376
377
	p.next()
378
379
	// Collect tokens for the body until a closing curly bracket '}' is encountered.
380
	var bodyTokens []token.Token
381
	for !p.peekTokenIs(token.RCB) {
382
		// If there's no closing bracket, return an error.
383
		if p.peekTokenIs(token.EOF) {
384
			p.peekError(token.RCB)
385
			return nil, p.Error()
386
		}
387
388
		bodyTokens = append(bodyTokens, p.currentToken)
389
		p.nextWithIgnores()
390
	}
391
392
	// Combine all the body tokens into a single string
393
	var bodyStr strings.Builder
394
	for _, t := range bodyTokens {
395
		bodyStr.WriteString(t.Literal)
396
	}
397
	stmt.Expression = bodyStr.String()
398
399
	// Expect and consume the closing curly bracket '}'.
400
	if !p.expectAndNext(token.RCB) {
401
		return nil, p.Error()
402
	}
403
404
	// Register the parsed rule in the parser's references.
405
	err := p.references.AddRuleReference(stmt.Name.Literal, args)
406
	if err != nil {
407
		// If there's an error (e.g., a duplicate rule), return an error.
408
		p.duplicationError(stmt.Name.Literal)
409
		return nil, p.Error()
410
	}
411
412
	// Return the successfully parsed RuleStatement.
413
	return stmt, nil
414
}
415
416
// parseRelationStatement method parses a RELATION statement and returns a RelationStatement AST node
417
func (p *Parser) parseAttributeStatement(entityName string) (*ast.AttributeStatement, error) {
418
	// create a new RelationStatement object and set its Relation field to the currentToken
419
	stmt := &ast.AttributeStatement{Attribute: p.currentToken}
420
421
	// expect the next token to be an identifier token, and set the RelationStatement's Name field to the identifier's value
422
	if !p.expectAndNext(token.IDENT) {
423
		return nil, p.Error()
424
	}
425
	stmt.Name = p.currentToken
426
427
	if !p.expectAndNext(token.IDENT) {
428
		return nil, p.Error()
429
	}
430
431
	atstmt := ast.AttributeTypeStatement{Type: p.currentToken}
432
	atstmt.IsArray = false
433
434
	if p.peekTokenIs(token.LSB) {
435
		p.next()
436
		if !p.expectAndNext(token.RSB) {
437
			return nil, p.Error()
438
		}
439
		atstmt.IsArray = true
440
	}
441
442
	stmt.AttributeType = atstmt
443
444
	key := utils.Key(entityName, stmt.Name.Literal)
445
	// add the relation reference to the Parser's relationReferences and relationalReferences maps
446
	err := p.references.AddAttributeReferences(key, atstmt)
447
	if err != nil {
448
		p.duplicationError(key) // Generate an error message indicating a duplication error
449
		return nil, p.Error()
450
	}
451
452
	// return the parsed RelationStatement and nil for the error value
453
	return stmt, nil
454
}
455
456
// parseRelationStatement method parses a RELATION statement and returns a RelationStatement AST node
457
func (p *Parser) parseRelationStatement(entityName string) (*ast.RelationStatement, error) {
458
	// create a new RelationStatement object and set its Relation field to the currentToken
459
	stmt := &ast.RelationStatement{Relation: p.currentToken}
460
461
	// expect the next token to be an identifier token, and set the RelationStatement's Name field to the identifier's value
462
	if !p.expectAndNext(token.IDENT) {
463
		return nil, p.Error()
464
	}
465
	stmt.Name = p.currentToken
466
	relationName := stmt.Name.Literal
467
468
	// expect the next token to be a SIGN token, indicating the start of the relation type(s)
469
	if !p.expect(token.SIGN) {
470
		return nil, p.Error()
471
	}
472
473
	// loop through the relation types until no more SIGN tokens are encountered
474
	for p.peekTokenIs(token.SIGN) {
475
		// parse a RelationTypeStatement and append it to the RelationStatement's RelationTypes field
476
		relationStatement, err := p.parseRelationTypeStatement()
477
		if err != nil {
478
			return nil, p.Error()
479
		}
480
		stmt.RelationTypes = append(stmt.RelationTypes, *relationStatement)
481
	}
482
483
	key := utils.Key(entityName, relationName)
484
485
	// add the relation reference to the Parser's relationReferences and relationalReferences maps
486
	err := p.references.AddRelationReferences(key, stmt.RelationTypes)
487
	if err != nil {
488
		p.duplicationError(key) // Generate an error message indicating a duplication error
489
		return nil, p.Error()
490
	}
491
492
	// return the parsed RelationStatement and nil for the error value
493
	return stmt, nil
494
}
495
496
// parseRelationTypeStatement method parses a single relation type within a RELATION statement and returns a RelationTypeStatement AST node
497
func (p *Parser) parseRelationTypeStatement() (*ast.RelationTypeStatement, error) {
498
	// expect the currentToken to be a SIGN token, indicating the start of the relation type
499
	if !p.expectAndNext(token.SIGN) {
500
		return nil, p.Error()
501
	}
502
	// create a new RelationTypeStatement object and set its Sign field to the SIGN token
503
	stmt := &ast.RelationTypeStatement{Sign: p.currentToken}
504
505
	// expect the next token to be an identifier token, and set the RelationTypeStatement's Type field to the identifier's value
506
	if !p.expectAndNext(token.IDENT) {
507
		return nil, p.Error()
508
	}
509
	stmt.Type = p.currentToken
510
511
	// if the next token is a HASH token, indicating that a specific relation within the relation type is being referenced, parse it and set the RelationTypeStatement's Relation field to the identifier's value
512
	if p.peekTokenIs(token.HASH) {
513
		p.next()
514
		if !p.expectAndNext(token.IDENT) {
515
			return nil, p.Error()
516
		}
517
		stmt.Relation = p.currentToken
518
	}
519
520
	// return the parsed RelationTypeStatement and nil for the error value
521
	return stmt, nil
522
}
523
524
// parsePermissionStatement method parses an PERMISSION statement and returns an PermissionStatement AST node
525
func (p *Parser) parsePermissionStatement(entityName string) (ast.Statement, error) {
526
	// create a new PermissionStatement object and set its Permission field to the currentToken
527
	stmt := &ast.PermissionStatement{Permission: p.currentToken}
528
529
	// expect the next token to be an identifier token, and set the PermissionStatement's Name field to the identifier's value
530
	if !p.expectAndNext(token.IDENT) {
531
		return nil, p.Error()
532
	}
533
	stmt.Name = p.currentToken
534
535
	key := utils.Key(entityName, stmt.Name.Literal)
536
	// add the action reference to the Parser's actionReferences and relationalReferences maps
537
	err := p.references.AddPermissionReference(key)
538
	if err != nil {
539
		p.duplicationError(key) // Generate an error message indicating a duplication error
540
		return nil, p.Error()
541
	}
542
543
	// expect the next token to be an ASSIGN token, indicating the start of the expression to be assigned to the action
544
	if !p.expectAndNext(token.ASSIGN) {
545
		return nil, p.Error()
546
	}
547
548
	p.next()
549
550
	// parse the expression statement and set it as the PermissionStatement's ExpressionStatement field
551
	ex, err := p.parseExpressionStatement()
552
	if err != nil {
553
		return nil, p.Error()
554
	}
555
	stmt.ExpressionStatement = ex
556
557
	// return the parsed PermissionStatement and nil for the error value
558
	return stmt, nil
559
}
560
561
// parseExpressionStatement method parses an expression statement and returns an ExpressionStatement AST node
562
func (p *Parser) parseExpressionStatement() (*ast.ExpressionStatement, error) {
563
	// create a new ExpressionStatement object
564
	stmt := &ast.ExpressionStatement{}
565
	var err error
566
	// parse the expression using the lowest precedence value as the initial precedence level
567
	stmt.Expression, err = p.parseExpression(LOWEST)
568
	if err != nil {
569
		return nil, p.Error()
570
	}
571
572
	// return the parsed ExpressionStatement and nil for the error value
573
	return stmt, nil
574
}
575
576
// expectAndNext method checks if the next token is of the expected type and advances the lexer to the next token if it is. It returns true if the next token is of the expected type, and false otherwise.
577
func (p *Parser) expectAndNext(t token.Type) bool {
578
	// if the next token is of the expected type, advance the lexer to the next token and return true
579
	if p.peekTokenIs(t) {
580
		p.next()
581
		return true
582
	}
583
	// otherwise, generate an error message indicating that the expected token type was not found and return false
584
	p.peekError(t)
585
	return false
586
}
587
588
// expect method checks if the next token is of the expected type, without advancing the lexer. It returns true if the next token is of the expected type, and false otherwise.
589
func (p *Parser) expect(t token.Type) bool {
590
	// if the next token is of the expected type, return true
591
	if p.peekTokenIs(t) {
592
		return true
593
	}
594
	// otherwise, generate an error message indicating that the expected token type was not found and return false
595
	p.peekError(t)
596
	return false
597
}
598
599
// parseExpression method parses an expression with a given precedence level and returns the parsed expression as an AST node. It takes an integer value indicating the precedence level.
600
func (p *Parser) parseExpression(precedence int) (ast.Expression, error) {
601
	var exp ast.Expression
602
	var err error
603
604
	if p.currentTokenIs(token.NEWLINE) && p.previousTokenIs(token.LP, token.AND, token.OR, token.NOT, token.ASSIGN) {
605
		p.next() // skip newline after operators
606
	} // Newline handling complete
607
	if p.currentTokenIs(token.LP) {
608
		p.next() // Consume the left parenthesis.
609
		exp, err = p.parseExpression(LOWEST)
610
		if err != nil {
611
			return nil, err
612
		}
613
614
		if !p.expect(token.RP) {
615
			return nil, p.Error()
616
		}
617
		p.next() // Consume the right parenthesis.
618
	} else {
619
		// get the prefix parsing function for the current token type
620
		prefix := p.prefixParseFns[p.currentToken.Type]
621
		if prefix == nil {
622
			p.noPrefixParseFnError(p.currentToken.Type)
623
			return nil, p.Error()
624
		}
625
626
		// parse the prefix expression
627
		exp, err = prefix()
628
		if err != nil {
629
			return nil, p.Error()
630
		}
631
	}
632
633
	// continue parsing the expression while the next token has a higher precedence level than the current precedence level
634
	for !p.peekTokenIs(token.NEWLINE) && precedence < p.peekPrecedence() {
635
		// get the infix parsing function for the next token type
636
		infix := p.infixParseFunc[p.peekToken.Type]
637
		if infix == nil {
638
			return exp, nil
639
		}
640
		p.next()
641
		// parse the infix expression with the current expression as its left-hand side
642
		exp, err = infix(exp)
643
		if err != nil {
644
			return nil, p.Error()
645
		}
646
	}
647
648
	// return the parsed expression and nil for the error value
649
	return exp, nil
650
}
651
652
// parseInfixExpression parses an infix expression that has a left operand and an operator followed by
653
// a right operand, such as "a or b" or "x and y".
654
// It takes the left operand as an argument, constructs an InfixExpression with the current operator
655
// and left operand, and parses the right operand with a higher precedence to construct the final
656
// expression tree.
657
// It returns the resulting InfixExpression and any error encountered.
658
func (p *Parser) parseInfixExpression(left ast.Expression) (ast.Expression, error) {
659
	// Ensure the current token is a valid infix operator before proceeding.
660
	if !p.isInfixOperator(p.currentToken.Type) {
661
		p.currentError(token.AND, token.OR, token.NOT) // Replace with your actual valid infix token types
662
		return nil, p.Error()
663
	}
664
665
	// Create a new InfixExpression with the left operand and the current operator.
666
	expression := &ast.InfixExpression{
667
		Op:       p.currentToken,
668
		Left:     left,
669
		Operator: ast.Operator(p.currentToken.Literal),
670
	}
671
672
	// Get the precedence of the current operator and consume the operator token.
673
	precedence := p.currentPrecedence()
674
	p.next()
675
676
	// Parse the right operand with a higher precedence to construct the final expression tree.
677
	right, err := p.parseExpression(precedence)
678
	if err != nil {
679
		return nil, err
680
	}
681
682
	// Ensure the right operand is not nil.
683
	if right == nil {
684
		p.currentError(token.IDENT, token.LP) // Replace with your actual valid right operand token types
685
		return nil, p.Error()
686
	}
687
688
	// Set the right operand of the InfixExpression and return it.
689
	expression.Right = right
690
	return expression, nil
691
}
692
693
// parseIntegerLiteral parses an integer literal and returns the resulting IntegerLiteral expression.
694
func (p *Parser) isInfixOperator(tokenType token.Type) bool {
695
	return tokenType == token.AND || tokenType == token.OR || tokenType == token.NOT
696
}
697
698
// peekPrecedence returns the precedence of the next token in the input, if it is a known
699
// operator, or the lowest precedence otherwise.
700
func (p *Parser) peekPrecedence() int {
701
	if pr, ok := precedences[p.peekToken.Type]; ok {
702
		return pr
703
	}
704
	return LOWEST
705
}
706
707
// currentPrecedence returns the precedence of the current token in the input, if it is a known
708
// operator, or the lowest precedence otherwise.
709
func (p *Parser) currentPrecedence() int {
710
	if pr, ok := precedences[p.currentToken.Type]; ok {
711
		return pr
712
	}
713
	return LOWEST
714
}
715
716
func (p *Parser) parseIdentifierOrCall() (ast.Expression, error) {
717
	// Ensure the current token is a valid identifier before proceeding.
718
	if !p.currentTokenIs(token.IDENT) {
719
		return nil, fmt.Errorf("unexpected token type for identifier expression: %s", p.currentToken.Type)
720
	}
721
722
	if p.peekTokenIs(token.LP) {
723
		return p.parseCallExpression()
724
	}
725
726
	return p.parseIdentifierExpression()
727
}
728
729
// parseIdentifier parses an identifier expression that may consist of one or more dot-separated
730
// identifiers, such as "x", "foo.bar", or "a.b.c.d".
731
// It constructs a new Identifier expression with the first token as the prefix and subsequent
732
// tokens as identifiers, and returns the resulting expression and any error encountered.
733
func (p *Parser) parseIdentifierExpression() (ast.Expression, error) {
734
	// Ensure the current token is a valid identifier before proceeding.
735
	if !p.currentTokenIs(token.IDENT) {
736
		p.currentError(token.IDENT)
737
		return nil, p.Error()
738
	}
739
740
	// Create a new Identifier expression with the first token as the prefix.
741
	ident := &ast.Identifier{Idents: []token.Token{p.currentToken}}
742
743
	// If the next token is a dot, consume it and continue parsing the next identifier.
744
	for p.peekTokenIs(token.DOT) {
745
		p.next() // Consume the dot token
746
747
		// Check if the next token after the dot is a valid identifier
748
		if !p.expectAndNext(token.IDENT) {
749
			return nil, p.Error()
750
		}
751
752
		ident.Idents = append(ident.Idents, p.currentToken)
753
	}
754
755
	// Return the resulting Identifier expression.
756
	return ident, nil
757
}
758
759
// call_func(variable1, variable2)
760
func (p *Parser) parseCallExpression() (ast.Expression, error) {
761
	// Ensure the current token is a valid identifier before proceeding.
762
	if !p.currentTokenIs(token.IDENT) {
763
		p.currentError(token.IDENT)
764
		return nil, p.Error()
765
	}
766
767
	// Create a new Identifier expression with the first token as the prefix.
768
	call := &ast.Call{Name: p.currentToken}
769
770
	if !p.expectAndNext(token.LP) {
771
		return nil, p.Error()
772
	}
773
774
	// Check if there are no arguments
775
	if p.peekTokenIs(token.RP) {
776
		p.next() // consume the RP token
777
		return call, nil
778
	}
779
780
	p.next()
781
782
	// Parse the first argument
783
	ident, err := p.parseIdentifierExpression()
784
	if err != nil {
785
		return nil, err
786
	}
787
788
	i, ok := ident.(*ast.Identifier)
789
	if !ok {
790
		return nil, fmt.Errorf("expected identifier, got %T", ident)
791
	}
792
	call.Arguments = append(call.Arguments, *i)
793
794
	// Parse remaining arguments
795
	for p.peekTokenIs(token.COMMA) {
796
		p.next()
797
798
		if !p.expectAndNext(token.IDENT) {
799
			return nil, p.Error()
800
		}
801
802
		ident, err = p.parseIdentifierExpression()
803
		if err != nil {
804
			return nil, err
805
		}
806
807
		i, ok = ident.(*ast.Identifier)
808
		if !ok {
809
			return nil, fmt.Errorf("expected identifier, got %T", ident)
810
		}
811
		call.Arguments = append(call.Arguments, *i)
812
	}
813
814
	if !p.expectAndNext(token.RP) {
815
		return nil, p.Error()
816
	}
817
818
	// Return the resulting Identifier expression.
819
	return call, nil
820
}
821
822
// registerPrefix safely registers a parsing function for a prefix token type in the parser's prefixParseFns map.
823
// It takes a token type and a prefix parsing function as arguments, and stores the function in the map
824
// under the given token type key.
825
func (p *Parser) registerPrefix(tokenType token.Type, fn prefixParseFn) {
826
	if fn == nil {
827
		p.duplicationError(fmt.Sprintf("registerPrefix: nil function for token type %s", tokenType))
828
		return
829
	}
830
831
	if _, exists := p.prefixParseFns[tokenType]; exists {
832
		p.duplicationError(fmt.Sprintf("registerPrefix: token type %s already registered", tokenType))
833
		return
834
	}
835
836
	p.prefixParseFns[tokenType] = fn
837
}
838
839
// registerInfix safely registers a parsing function for an infix token type in the parser's infixParseFunc map.
840
// It takes a token type and an infix parsing function as arguments, and stores the function in the map
841
// under the given token type key.
842
func (p *Parser) registerInfix(tokenType token.Type, fn infixParseFn) {
843
	if fn == nil {
844
		p.duplicationError(fmt.Sprintf("registerInfix: nil function for token type %s", tokenType))
845
		return
846
	}
847
848
	if _, exists := p.infixParseFunc[tokenType]; exists {
849
		p.duplicationError(fmt.Sprintf("registerInfix: token type %s already registered", tokenType))
850
		return
851
	}
852
853
	p.infixParseFunc[tokenType] = fn
854
}
855
856
// duplicationError adds an error message to the parser's error list indicating that a duplication was found.
857
// It takes a key string as an argument that is used to identify the source of the duplication in the input.
858
func (p *Parser) duplicationError(key string) {
859
	msg := fmt.Sprintf("%v:%v:duplication found for %s", p.l.GetLinePosition(), p.l.GetColumnPosition(), key)
860
	p.errors = append(p.errors, msg)
861
}
862
863
// noPrefixParseFnError adds an error message to the parser's error list indicating that no prefix parsing
864
// function was found for a given token type.
865
// It takes a token type as an argument that indicates the type of the token for which a parsing function is missing.
866
func (p *Parser) noPrefixParseFnError(t token.Type) {
867
	msg := fmt.Sprintf("%v:%v:no prefix parse function for %s found", p.l.GetLinePosition(), p.l.GetColumnPosition(), t)
868
	p.errors = append(p.errors, msg)
869
}
870
871
// peekError adds an error message to the parser's error list indicating that the next token in the input
872
// did not match the expected type(s).
873
// It takes one or more token types as arguments that indicate the expected types.
874
func (p *Parser) peekError(t ...token.Type) {
875
	expected := strings.Join(tokenTypesToStrings(t), ", ")
876
	msg := fmt.Sprintf("%v:%v:expected next token to be %s, got %s instead", p.l.GetLinePosition(), p.l.GetColumnPosition(), expected, p.peekToken.Type)
877
	p.errors = append(p.errors, msg)
878
}
879
880
// currentError adds an error message to the parser's error list indicating that the current token in the input
881
// did not match the expected type(s).
882
// It takes one or more token types as arguments that indicate the expected types.
883
func (p *Parser) currentError(t ...token.Type) {
884
	expected := strings.Join(tokenTypesToStrings(t), ", ")
885
	msg := fmt.Sprintf("%v:%v:expected token to be %s, got %s instead", p.l.GetLinePosition(),
886
		p.l.GetColumnPosition(), expected, p.currentToken.Type)
887
	p.errors = append(p.errors, msg)
888
}
889
890
// tokenTypesToStrings converts a slice of token types to a slice of their string representations.
891
func tokenTypesToStrings(types []token.Type) []string {
892
	strs := make([]string, len(types))
893
	for i, t := range types {
894
		strs[i] = t.String()
895
	}
896
	return strs
897
}
898