1
|
|
|
package development |
2
|
|
|
|
3
|
|
|
import ( |
4
|
|
|
"context" |
5
|
|
|
"fmt" |
6
|
|
|
"log/slog" |
7
|
|
|
"os" |
8
|
|
|
"sort" |
9
|
|
|
"strings" |
10
|
|
|
|
11
|
|
|
"google.golang.org/protobuf/types/known/structpb" |
12
|
|
|
|
13
|
|
|
"gopkg.in/yaml.v3" |
14
|
|
|
|
15
|
|
|
"github.com/rs/xid" |
16
|
|
|
|
17
|
|
|
"github.com/Permify/permify/internal/config" |
18
|
|
|
"github.com/Permify/permify/internal/engines" |
19
|
|
|
"github.com/Permify/permify/internal/factories" |
20
|
|
|
"github.com/Permify/permify/internal/invoke" |
21
|
|
|
"github.com/Permify/permify/internal/servers" |
22
|
|
|
"github.com/Permify/permify/internal/storage" |
23
|
|
|
"github.com/Permify/permify/internal/validation" |
24
|
|
|
"github.com/Permify/permify/pkg/attribute" |
25
|
|
|
"github.com/Permify/permify/pkg/database" |
26
|
|
|
"github.com/Permify/permify/pkg/development/file" |
27
|
|
|
"github.com/Permify/permify/pkg/dsl/compiler" |
28
|
|
|
"github.com/Permify/permify/pkg/dsl/parser" |
29
|
|
|
v1 "github.com/Permify/permify/pkg/pb/base/v1" |
30
|
|
|
"github.com/Permify/permify/pkg/telemetry" |
31
|
|
|
"github.com/Permify/permify/pkg/token" |
32
|
|
|
"github.com/Permify/permify/pkg/tuple" |
33
|
|
|
) |
34
|
|
|
|
35
|
|
|
type Development struct { |
36
|
|
|
Container *servers.Container |
37
|
|
|
} |
38
|
|
|
|
39
|
|
|
func NewContainer() *Development { |
40
|
|
|
var err error |
41
|
|
|
|
42
|
|
|
// Create a new in-memory database using the factories package |
43
|
|
|
var db database.Database |
44
|
|
|
db, err = factories.DatabaseFactory(config.Database{Engine: database.MEMORY.String()}) |
45
|
|
|
if err != nil { |
46
|
|
|
fmt.Println(err) |
47
|
|
|
} |
48
|
|
|
|
49
|
|
|
// Create a new logger instance |
50
|
|
|
logger := slog.New(slog.NewTextHandler(os.Stdout, nil)) |
51
|
|
|
slog.SetDefault(logger) |
52
|
|
|
|
53
|
|
|
// Create instances of storage using the factories package |
54
|
|
|
dataReader := factories.DataReaderFactory(db) |
55
|
|
|
dataWriter := factories.DataWriterFactory(db) |
56
|
|
|
bundleReader := factories.BundleReaderFactory(db) |
57
|
|
|
bundleWriter := factories.BundleWriterFactory(db) |
58
|
|
|
schemaReader := factories.SchemaReaderFactory(db) |
59
|
|
|
schemaWriter := factories.SchemaWriterFactory(db) |
60
|
|
|
tenantReader := factories.TenantReaderFactory(db) |
61
|
|
|
tenantWriter := factories.TenantWriterFactory(db) |
62
|
|
|
|
63
|
|
|
// Create instances of engines |
64
|
|
|
checkEngine := engines.NewCheckEngine(schemaReader, dataReader) |
65
|
|
|
expandEngine := engines.NewExpandEngine(schemaReader, dataReader) |
66
|
|
|
lookupEngine := engines.NewLookupEngine(checkEngine, schemaReader, dataReader) |
67
|
|
|
subjectPermissionEngine := engines.NewSubjectPermission(checkEngine, schemaReader) |
68
|
|
|
|
69
|
|
|
invoker := invoke.NewDirectInvoker( |
70
|
|
|
schemaReader, |
71
|
|
|
dataReader, |
72
|
|
|
checkEngine, |
73
|
|
|
expandEngine, |
74
|
|
|
lookupEngine, |
75
|
|
|
subjectPermissionEngine, |
76
|
|
|
telemetry.NewNoopMeter(), |
77
|
|
|
) |
78
|
|
|
|
79
|
|
|
checkEngine.SetInvoker(invoker) |
80
|
|
|
|
81
|
|
|
// Create a new container instance with engines, storage, and other dependencies |
82
|
|
|
return &Development{ |
83
|
|
|
Container: servers.NewContainer( |
84
|
|
|
invoker, |
85
|
|
|
dataReader, |
86
|
|
|
dataWriter, |
87
|
|
|
bundleReader, |
88
|
|
|
bundleWriter, |
89
|
|
|
schemaReader, |
90
|
|
|
schemaWriter, |
91
|
|
|
tenantReader, |
92
|
|
|
tenantWriter, |
93
|
|
|
storage.NewNoopWatcher(), |
94
|
|
|
), |
95
|
|
|
} |
96
|
|
|
} |
97
|
|
|
|
98
|
|
|
// ReadSchema - Creates new read schema request |
99
|
|
|
func (c *Development) ReadSchema(ctx context.Context) (sch *v1.SchemaDefinition, err error) { |
100
|
|
|
// Get the head version of the "t1" schema from the schema repository |
101
|
|
|
version, err := c.Container.SR.HeadVersion(ctx, "t1") |
102
|
|
|
if err != nil { |
103
|
|
|
return nil, err |
104
|
|
|
} |
105
|
|
|
|
106
|
|
|
// Read the schema definition for the given schema and version from the schema repository |
107
|
|
|
return c.Container.SR.ReadSchema(ctx, "t1", version) |
108
|
|
|
} |
109
|
|
|
|
110
|
|
|
type Error struct { |
111
|
|
|
Type string `json:"type"` |
112
|
|
|
Key any `json:"key"` |
113
|
|
|
Message string `json:"message"` |
114
|
|
|
} |
115
|
|
|
|
116
|
|
|
func (c *Development) Run(ctx context.Context, shape map[string]interface{}) (errors []Error) { |
117
|
|
|
// Marshal the shape map into YAML format |
118
|
|
|
out, err := yaml.Marshal(shape) |
119
|
|
|
if err != nil { |
120
|
|
|
errors = append(errors, Error{ |
121
|
|
|
Type: "file_validation", |
122
|
|
|
Key: "", |
123
|
|
|
Message: err.Error(), |
124
|
|
|
}) |
125
|
|
|
return |
126
|
|
|
} |
127
|
|
|
|
128
|
|
|
// Unmarshal the YAML data into a file.Shape object |
129
|
|
|
s := &file.Shape{} |
130
|
|
|
err = yaml.Unmarshal(out, &s) |
131
|
|
|
if err != nil { |
132
|
|
|
errors = append(errors, Error{ |
133
|
|
|
Type: "file_validation", |
134
|
|
|
Key: "", |
135
|
|
|
Message: err.Error(), |
136
|
|
|
}) |
137
|
|
|
return |
138
|
|
|
} |
139
|
|
|
|
140
|
|
|
return c.RunWithShape(ctx, s) |
141
|
|
|
} |
142
|
|
|
|
143
|
|
|
func (c *Development) RunWithShape(ctx context.Context, shape *file.Shape) (errors []Error) { |
144
|
|
|
// Parse the schema using the parser library |
145
|
|
|
sch, err := parser.NewParser(shape.Schema).Parse() |
146
|
|
|
if err != nil { |
147
|
|
|
errors = append(errors, Error{ |
148
|
|
|
Type: "schema", |
149
|
|
|
Key: "", |
150
|
|
|
Message: err.Error(), |
151
|
|
|
}) |
152
|
|
|
return |
153
|
|
|
} |
154
|
|
|
|
155
|
|
|
// Compile the parsed schema |
156
|
|
|
_, _, err = compiler.NewCompiler(true, sch).Compile() |
157
|
|
|
if err != nil { |
158
|
|
|
errors = append(errors, Error{ |
159
|
|
|
Type: "schema", |
160
|
|
|
Key: "", |
161
|
|
|
Message: err.Error(), |
162
|
|
|
}) |
163
|
|
|
return |
164
|
|
|
} |
165
|
|
|
|
166
|
|
|
// Generate a new unique ID for this version of the schema |
167
|
|
|
version := xid.New().String() |
168
|
|
|
|
169
|
|
|
// Create a slice of SchemaDefinitions, one for each statement in the schema |
170
|
|
|
cnf := make([]storage.SchemaDefinition, 0, len(sch.Statements)) |
171
|
|
|
for _, st := range sch.Statements { |
172
|
|
|
cnf = append(cnf, storage.SchemaDefinition{ |
173
|
|
|
TenantID: "t1", |
174
|
|
|
Version: version, |
175
|
|
|
Name: st.GetName(), |
176
|
|
|
SerializedDefinition: []byte(st.String()), |
177
|
|
|
}) |
178
|
|
|
} |
179
|
|
|
|
180
|
|
|
// Write the schema definitions into the storage |
181
|
|
|
err = c.Container.SW.WriteSchema(ctx, cnf) |
182
|
|
|
if err != nil { |
183
|
|
|
errors = append(errors, Error{ |
184
|
|
|
Type: "schema", |
185
|
|
|
Key: "", |
186
|
|
|
Message: err.Error(), |
187
|
|
|
}) |
188
|
|
|
return |
189
|
|
|
} |
190
|
|
|
|
191
|
|
|
// Each item in the Relationships slice is processed individually |
192
|
|
|
for _, t := range shape.Relationships { |
193
|
|
|
tup, err := tuple.Tuple(t) |
194
|
|
|
if err != nil { |
195
|
|
|
errors = append(errors, Error{ |
196
|
|
|
Type: "relationships", |
197
|
|
|
Key: t, |
198
|
|
|
Message: err.Error(), |
199
|
|
|
}) |
200
|
|
|
continue |
201
|
|
|
} |
202
|
|
|
|
203
|
|
|
// Read the schema definition for this relationship |
204
|
|
|
definition, _, err := c.Container.SR.ReadEntityDefinition(ctx, "t1", tup.GetEntity().GetType(), version) |
205
|
|
|
if err != nil { |
206
|
|
|
errors = append(errors, Error{ |
207
|
|
|
Type: "relationships", |
208
|
|
|
Key: t, |
209
|
|
|
Message: err.Error(), |
210
|
|
|
}) |
211
|
|
|
continue |
212
|
|
|
} |
213
|
|
|
|
214
|
|
|
// Validate the relationship tuple against the schema definition |
215
|
|
|
err = validation.ValidateTuple(definition, tup) |
216
|
|
|
if err != nil { |
217
|
|
|
errors = append(errors, Error{ |
218
|
|
|
Type: "relationships", |
219
|
|
|
Key: t, |
220
|
|
|
Message: err.Error(), |
221
|
|
|
}) |
222
|
|
|
continue |
223
|
|
|
} |
224
|
|
|
|
225
|
|
|
// Write the relationship to the database |
226
|
|
|
_, err = c.Container.DW.Write(ctx, "t1", database.NewTupleCollection(tup), database.NewAttributeCollection()) |
227
|
|
|
// Continue to the next relationship if an error occurred |
228
|
|
|
if err != nil { |
229
|
|
|
errors = append(errors, Error{ |
230
|
|
|
Type: "relationships", |
231
|
|
|
Key: t, |
232
|
|
|
Message: err.Error(), |
233
|
|
|
}) |
234
|
|
|
continue |
235
|
|
|
} |
236
|
|
|
} |
237
|
|
|
|
238
|
|
|
// Each item in the Attributes slice is processed individually |
239
|
|
|
for _, a := range shape.Attributes { |
240
|
|
|
attr, err := attribute.Attribute(a) |
241
|
|
|
if err != nil { |
242
|
|
|
errors = append(errors, Error{ |
243
|
|
|
Type: "attributes", |
244
|
|
|
Key: a, |
245
|
|
|
Message: err.Error(), |
246
|
|
|
}) |
247
|
|
|
continue |
248
|
|
|
} |
249
|
|
|
|
250
|
|
|
// Read the schema definition for this attribute |
251
|
|
|
definition, _, err := c.Container.SR.ReadEntityDefinition(ctx, "t1", attr.GetEntity().GetType(), version) |
252
|
|
|
if err != nil { |
253
|
|
|
errors = append(errors, Error{ |
254
|
|
|
Type: "attributes", |
255
|
|
|
Key: a, |
256
|
|
|
Message: err.Error(), |
257
|
|
|
}) |
258
|
|
|
continue |
259
|
|
|
} |
260
|
|
|
|
261
|
|
|
// Validate the attribute against the schema definition |
262
|
|
|
err = validation.ValidateAttribute(definition, attr) |
263
|
|
|
if err != nil { |
264
|
|
|
errors = append(errors, Error{ |
265
|
|
|
Type: "attributes", |
266
|
|
|
Key: a, |
267
|
|
|
Message: err.Error(), |
268
|
|
|
}) |
269
|
|
|
continue |
270
|
|
|
} |
271
|
|
|
|
272
|
|
|
// Write the attribute to the database |
273
|
|
|
_, err = c.Container.DW.Write(ctx, "t1", database.NewTupleCollection(), database.NewAttributeCollection(attr)) |
274
|
|
|
// Continue to the next attribute if an error occurred |
275
|
|
|
if err != nil { |
276
|
|
|
errors = append(errors, Error{ |
277
|
|
|
Type: "attributes", |
278
|
|
|
Key: a, |
279
|
|
|
Message: err.Error(), |
280
|
|
|
}) |
281
|
|
|
continue |
282
|
|
|
} |
283
|
|
|
} |
284
|
|
|
|
285
|
|
|
// Each item in the Scenarios slice is processed individually |
286
|
|
|
for i, scenario := range shape.Scenarios { |
287
|
|
|
|
288
|
|
|
// Each Check in the current scenario is processed |
289
|
|
|
for _, check := range scenario.Checks { |
290
|
|
|
entity, err := tuple.E(check.Entity) |
291
|
|
|
if err != nil { |
292
|
|
|
errors = append(errors, Error{ |
293
|
|
|
Type: "scenarios", |
294
|
|
|
Key: i, |
295
|
|
|
Message: err.Error(), |
296
|
|
|
}) |
297
|
|
|
continue |
298
|
|
|
} |
299
|
|
|
|
300
|
|
|
ear, err := tuple.EAR(check.Subject) |
301
|
|
|
if err != nil { |
302
|
|
|
errors = append(errors, Error{ |
303
|
|
|
Type: "scenarios", |
304
|
|
|
Key: i, |
305
|
|
|
Message: err.Error(), |
306
|
|
|
}) |
307
|
|
|
continue |
308
|
|
|
} |
309
|
|
|
|
310
|
|
|
cont, err := Context(check.Context) |
311
|
|
|
if err != nil { |
312
|
|
|
errors = append(errors, Error{ |
313
|
|
|
Type: "scenarios", |
314
|
|
|
Key: i, |
315
|
|
|
Message: err.Error(), |
316
|
|
|
}) |
317
|
|
|
continue |
318
|
|
|
} |
319
|
|
|
|
320
|
|
|
subject := &v1.Subject{ |
321
|
|
|
Type: ear.GetEntity().GetType(), |
322
|
|
|
Id: ear.GetEntity().GetId(), |
323
|
|
|
Relation: ear.GetRelation(), |
324
|
|
|
} |
325
|
|
|
|
326
|
|
|
// Each Assertion in the current check is processed |
327
|
|
|
for permission, expected := range check.Assertions { |
328
|
|
|
exp := v1.CheckResult_CHECK_RESULT_ALLOWED |
329
|
|
|
if !expected { |
330
|
|
|
exp = v1.CheckResult_CHECK_RESULT_DENIED |
331
|
|
|
} |
332
|
|
|
|
333
|
|
|
// A Permission Check is made for the current entity, permission and subject |
334
|
|
|
res, err := c.Container.Invoker.Check(ctx, &v1.PermissionCheckRequest{ |
335
|
|
|
TenantId: "t1", |
336
|
|
|
Metadata: &v1.PermissionCheckRequestMetadata{ |
337
|
|
|
SchemaVersion: version, |
338
|
|
|
SnapToken: token.NewNoopToken().Encode().String(), |
339
|
|
|
Depth: 100, |
340
|
|
|
}, |
341
|
|
|
Context: cont, |
342
|
|
|
Entity: entity, |
343
|
|
|
Permission: permission, |
344
|
|
|
Subject: subject, |
345
|
|
|
}) |
346
|
|
|
if err != nil { |
347
|
|
|
errors = append(errors, Error{ |
348
|
|
|
Type: "scenarios", |
349
|
|
|
Key: i, |
350
|
|
|
Message: err.Error(), |
351
|
|
|
}) |
352
|
|
|
continue |
353
|
|
|
} |
354
|
|
|
|
355
|
|
|
query := tuple.SubjectToString(subject) + " " + permission + " " + tuple.EntityToString(entity) |
356
|
|
|
|
357
|
|
|
// Check if the permission check result matches the expected result |
358
|
|
|
if res.Can != exp { |
359
|
|
|
var expectedStr, actualStr string |
360
|
|
|
if exp == v1.CheckResult_CHECK_RESULT_ALLOWED { |
361
|
|
|
expectedStr = "true" |
362
|
|
|
} else { |
363
|
|
|
expectedStr = "false" |
364
|
|
|
} |
365
|
|
|
|
366
|
|
|
if res.Can == v1.CheckResult_CHECK_RESULT_ALLOWED { |
367
|
|
|
actualStr = "true" |
368
|
|
|
} else { |
369
|
|
|
actualStr = "false" |
370
|
|
|
} |
371
|
|
|
|
372
|
|
|
// Construct a detailed error message with the expected result, actual result, and the query |
373
|
|
|
errorMsg := fmt.Sprintf("Query: %s, Expected: %s, Actual: %s", query, expectedStr, actualStr) |
374
|
|
|
|
375
|
|
|
errors = append(errors, Error{ |
376
|
|
|
Type: "scenarios", |
377
|
|
|
Key: i, |
378
|
|
|
Message: errorMsg, |
379
|
|
|
}) |
380
|
|
|
} |
381
|
|
|
} |
382
|
|
|
} |
383
|
|
|
|
384
|
|
|
// Each EntityFilter in the current scenario is processed |
385
|
|
|
for _, filter := range scenario.EntityFilters { |
386
|
|
|
ear, err := tuple.EAR(filter.Subject) |
387
|
|
|
if err != nil { |
388
|
|
|
errors = append(errors, Error{ |
389
|
|
|
Type: "scenarios", |
390
|
|
|
Key: i, |
391
|
|
|
Message: err.Error(), |
392
|
|
|
}) |
393
|
|
|
continue |
394
|
|
|
} |
395
|
|
|
|
396
|
|
|
cont, err := Context(filter.Context) |
397
|
|
|
if err != nil { |
398
|
|
|
errors = append(errors, Error{ |
399
|
|
|
Type: "scenarios", |
400
|
|
|
Key: i, |
401
|
|
|
Message: err.Error(), |
402
|
|
|
}) |
403
|
|
|
continue |
404
|
|
|
} |
405
|
|
|
|
406
|
|
|
subject := &v1.Subject{ |
407
|
|
|
Type: ear.GetEntity().GetType(), |
408
|
|
|
Id: ear.GetEntity().GetId(), |
409
|
|
|
Relation: ear.GetRelation(), |
410
|
|
|
} |
411
|
|
|
|
412
|
|
|
// Each Assertion in the current filter is processed |
413
|
|
|
|
414
|
|
|
for permission, expected := range filter.Assertions { |
415
|
|
|
// Perform a lookup for the entity with the given subject and permission |
416
|
|
|
res, err := c.Container.Invoker.LookupEntity(ctx, &v1.PermissionLookupEntityRequest{ |
417
|
|
|
TenantId: "t1", |
418
|
|
|
Metadata: &v1.PermissionLookupEntityRequestMetadata{ |
419
|
|
|
SchemaVersion: version, |
420
|
|
|
SnapToken: token.NewNoopToken().Encode().String(), |
421
|
|
|
Depth: 100, |
422
|
|
|
}, |
423
|
|
|
Context: cont, |
424
|
|
|
EntityType: filter.EntityType, |
425
|
|
|
Permission: permission, |
426
|
|
|
Subject: subject, |
427
|
|
|
}) |
428
|
|
|
if err != nil { |
429
|
|
|
errors = append(errors, Error{ |
430
|
|
|
Type: "scenarios", |
431
|
|
|
Key: i, |
432
|
|
|
Message: err.Error(), |
433
|
|
|
}) |
434
|
|
|
continue |
435
|
|
|
} |
436
|
|
|
|
437
|
|
|
query := tuple.SubjectToString(subject) + " " + permission + " " + filter.EntityType |
438
|
|
|
|
439
|
|
|
// Check if the actual result of the entity lookup does NOT match the expected result |
440
|
|
|
if !isSameArray(res.GetEntityIds(), expected) { |
441
|
|
|
expectedStr := strings.Join(expected, ", ") |
442
|
|
|
actualStr := strings.Join(res.GetEntityIds(), ", ") |
443
|
|
|
|
444
|
|
|
errorMsg := fmt.Sprintf("Query: %s, Expected: [%s], Actual: [%s]", query, expectedStr, actualStr) |
445
|
|
|
|
446
|
|
|
errors = append(errors, Error{ |
447
|
|
|
Type: "scenarios", |
448
|
|
|
Key: i, |
449
|
|
|
Message: errorMsg, |
450
|
|
|
}) |
451
|
|
|
} |
452
|
|
|
} |
453
|
|
|
} |
454
|
|
|
|
455
|
|
|
// Each SubjectFilter in the current scenario is processed |
456
|
|
|
for _, filter := range scenario.SubjectFilters { |
457
|
|
|
|
458
|
|
|
subjectReference := tuple.RelationReference(filter.SubjectReference) |
459
|
|
|
if err != nil { |
460
|
|
|
errors = append(errors, Error{ |
461
|
|
|
Type: "scenarios", |
462
|
|
|
Key: i, |
463
|
|
|
Message: err.Error(), |
464
|
|
|
}) |
465
|
|
|
continue |
466
|
|
|
} |
467
|
|
|
|
468
|
|
|
cont, err := Context(filter.Context) |
469
|
|
|
if err != nil { |
470
|
|
|
errors = append(errors, Error{ |
471
|
|
|
Type: "scenarios", |
472
|
|
|
Key: i, |
473
|
|
|
Message: err.Error(), |
474
|
|
|
}) |
475
|
|
|
continue |
476
|
|
|
} |
477
|
|
|
|
478
|
|
|
var entity *v1.Entity |
479
|
|
|
entity, err = tuple.E(filter.Entity) |
480
|
|
|
if err != nil { |
481
|
|
|
errors = append(errors, Error{ |
482
|
|
|
Type: "scenarios", |
483
|
|
|
Key: i, |
484
|
|
|
Message: err.Error(), |
485
|
|
|
}) |
486
|
|
|
continue |
487
|
|
|
} |
488
|
|
|
|
489
|
|
|
// Each Assertion in the current filter is processed |
490
|
|
|
for permission, expected := range filter.Assertions { |
491
|
|
|
// Perform a lookup for the subject with the given entity and permission |
492
|
|
|
res, err := c.Container.Invoker.LookupSubject(ctx, &v1.PermissionLookupSubjectRequest{ |
493
|
|
|
TenantId: "t1", |
494
|
|
|
Metadata: &v1.PermissionLookupSubjectRequestMetadata{ |
495
|
|
|
SchemaVersion: version, |
496
|
|
|
SnapToken: token.NewNoopToken().Encode().String(), |
497
|
|
|
Depth: 100, |
498
|
|
|
}, |
499
|
|
|
Context: cont, |
500
|
|
|
SubjectReference: subjectReference, |
501
|
|
|
Permission: permission, |
502
|
|
|
Entity: entity, |
503
|
|
|
}) |
504
|
|
|
if err != nil { |
505
|
|
|
errors = append(errors, Error{ |
506
|
|
|
Type: "scenarios", |
507
|
|
|
Key: i, |
508
|
|
|
Message: err.Error(), |
509
|
|
|
}) |
510
|
|
|
continue |
511
|
|
|
} |
512
|
|
|
|
513
|
|
|
query := tuple.EntityToString(entity) + " " + permission + " " + filter.SubjectReference |
514
|
|
|
|
515
|
|
|
// Check if the actual result of the subject lookup does NOT match the expected result |
516
|
|
|
if !isSameArray(res.GetSubjectIds(), expected) { |
517
|
|
|
expectedStr := strings.Join(expected, ", ") |
518
|
|
|
actualStr := strings.Join(res.GetSubjectIds(), ", ") |
519
|
|
|
|
520
|
|
|
errorMsg := fmt.Sprintf("Query: %s, Expected: [%s], Actual: [%s]", query, expectedStr, actualStr) |
521
|
|
|
|
522
|
|
|
errors = append(errors, Error{ |
523
|
|
|
Type: "scenarios", |
524
|
|
|
Key: i, |
525
|
|
|
Message: errorMsg, |
526
|
|
|
}) |
527
|
|
|
} |
528
|
|
|
} |
529
|
|
|
} |
530
|
|
|
} |
531
|
|
|
|
532
|
|
|
return |
533
|
|
|
} |
534
|
|
|
|
535
|
|
|
// Context is a function that takes a file context and returns a base context and an error. |
536
|
|
|
func Context(fileContext file.Context) (cont *v1.Context, err error) { |
537
|
|
|
// Initialize an empty base context to be populated from the file context. |
538
|
|
|
cont = &v1.Context{ |
539
|
|
|
Tuples: []*v1.Tuple{}, |
540
|
|
|
Attributes: []*v1.Attribute{}, |
541
|
|
|
Data: nil, |
542
|
|
|
} |
543
|
|
|
|
544
|
|
|
// Convert the file context's data to a Struct object. |
545
|
|
|
st, err := structpb.NewStruct(fileContext.Data) |
546
|
|
|
if err != nil { |
547
|
|
|
// If an error occurs, return it. |
548
|
|
|
return nil, err |
549
|
|
|
} |
550
|
|
|
|
551
|
|
|
// Assign the Struct object to the context's data field. |
552
|
|
|
cont.Data = st |
553
|
|
|
|
554
|
|
|
// Iterate over the file context's tuples. |
555
|
|
|
for _, t := range fileContext.Tuples { |
556
|
|
|
// Convert each tuple to a base tuple. |
557
|
|
|
tup, err := tuple.Tuple(t) |
558
|
|
|
if err != nil { |
559
|
|
|
// If an error occurs, return it. |
560
|
|
|
return nil, err |
561
|
|
|
} |
562
|
|
|
|
563
|
|
|
// Add the converted tuple to the context's tuples slice. |
564
|
|
|
cont.Tuples = append(cont.Tuples, tup) |
565
|
|
|
} |
566
|
|
|
|
567
|
|
|
// Iterate over the file context's attributes. |
568
|
|
|
for _, t := range fileContext.Attributes { |
569
|
|
|
// Convert each attribute to a base attribute. |
570
|
|
|
attr, err := attribute.Attribute(t) |
571
|
|
|
if err != nil { |
572
|
|
|
// If an error occurs, return it. |
573
|
|
|
return nil, err |
574
|
|
|
} |
575
|
|
|
|
576
|
|
|
// Add the converted attribute to the context's attributes slice. |
577
|
|
|
cont.Attributes = append(cont.Attributes, attr) |
578
|
|
|
} |
579
|
|
|
|
580
|
|
|
// If everything goes well, return the context and a nil error. |
581
|
|
|
return cont, nil |
582
|
|
|
} |
583
|
|
|
|
584
|
|
|
// isSameArray - check if two arrays are the same |
585
|
|
|
func isSameArray(a, b []string) bool { |
586
|
|
|
if len(a) != len(b) { |
587
|
|
|
return false |
588
|
|
|
} |
589
|
|
|
|
590
|
|
|
sortedA := make([]string, len(a)) |
591
|
|
|
copy(sortedA, a) |
592
|
|
|
sort.Strings(sortedA) |
593
|
|
|
|
594
|
|
|
sortedB := make([]string, len(b)) |
595
|
|
|
copy(sortedB, b) |
596
|
|
|
sort.Strings(sortedB) |
597
|
|
|
|
598
|
|
|
for i := range sortedA { |
599
|
|
|
if sortedA[i] != sortedB[i] { |
600
|
|
|
return false |
601
|
|
|
} |
602
|
|
|
} |
603
|
|
|
|
604
|
|
|
return true |
605
|
|
|
} |
606
|
|
|
|