1
|
|
|
package development |
2
|
|
|
|
3
|
|
|
import ( |
4
|
|
|
"context" |
5
|
|
|
"fmt" |
6
|
|
|
"log/slog" |
7
|
|
|
"os" |
8
|
|
|
"sort" |
9
|
|
|
"strings" |
10
|
|
|
|
11
|
|
|
"google.golang.org/protobuf/types/known/structpb" |
12
|
|
|
|
13
|
|
|
"gopkg.in/yaml.v3" |
14
|
|
|
|
15
|
|
|
"github.com/rs/xid" |
16
|
|
|
|
17
|
|
|
"github.com/Permify/permify/internal/config" |
18
|
|
|
"github.com/Permify/permify/internal/engines" |
19
|
|
|
"github.com/Permify/permify/internal/factories" |
20
|
|
|
"github.com/Permify/permify/internal/invoke" |
21
|
|
|
"github.com/Permify/permify/internal/servers" |
22
|
|
|
"github.com/Permify/permify/internal/storage" |
23
|
|
|
"github.com/Permify/permify/internal/validation" |
24
|
|
|
"github.com/Permify/permify/pkg/attribute" |
25
|
|
|
"github.com/Permify/permify/pkg/database" |
26
|
|
|
"github.com/Permify/permify/pkg/development/file" |
27
|
|
|
"github.com/Permify/permify/pkg/dsl/compiler" |
28
|
|
|
"github.com/Permify/permify/pkg/dsl/parser" |
29
|
|
|
v1 "github.com/Permify/permify/pkg/pb/base/v1" |
30
|
|
|
"github.com/Permify/permify/pkg/token" |
31
|
|
|
"github.com/Permify/permify/pkg/tuple" |
32
|
|
|
) |
33
|
|
|
|
34
|
|
|
type Development struct { |
35
|
|
|
Container *servers.Container |
36
|
|
|
} |
37
|
|
|
|
38
|
|
|
func NewContainer() *Development { |
39
|
|
|
var err error |
40
|
|
|
|
41
|
|
|
// Create a new in-memory database using the factories package |
42
|
|
|
var db database.Database |
43
|
|
|
db, err = factories.DatabaseFactory(config.Database{Engine: database.MEMORY.String()}) |
44
|
|
|
if err != nil { |
45
|
|
|
fmt.Println(err) |
46
|
|
|
} |
47
|
|
|
|
48
|
|
|
// Create a new logger instance |
49
|
|
|
logger := slog.New(slog.NewTextHandler(os.Stdout, nil)) |
50
|
|
|
slog.SetDefault(logger) |
51
|
|
|
|
52
|
|
|
// Create instances of storage using the factories package |
53
|
|
|
dataReader := factories.DataReaderFactory(db) |
54
|
|
|
dataWriter := factories.DataWriterFactory(db) |
55
|
|
|
bundleReader := factories.BundleReaderFactory(db) |
56
|
|
|
bundleWriter := factories.BundleWriterFactory(db) |
57
|
|
|
schemaReader := factories.SchemaReaderFactory(db) |
58
|
|
|
schemaWriter := factories.SchemaWriterFactory(db) |
59
|
|
|
tenantReader := factories.TenantReaderFactory(db) |
60
|
|
|
tenantWriter := factories.TenantWriterFactory(db) |
61
|
|
|
|
62
|
|
|
// Create instances of engines |
63
|
|
|
checkEngine := engines.NewCheckEngine(schemaReader, dataReader) |
64
|
|
|
expandEngine := engines.NewExpandEngine(schemaReader, dataReader) |
65
|
|
|
lookupEngine := engines.NewLookupEngine(checkEngine, schemaReader, dataReader) |
66
|
|
|
subjectPermissionEngine := engines.NewSubjectPermission(checkEngine, schemaReader) |
67
|
|
|
|
68
|
|
|
invoker := invoke.NewDirectInvoker( |
69
|
|
|
schemaReader, |
70
|
|
|
dataReader, |
71
|
|
|
checkEngine, |
72
|
|
|
expandEngine, |
73
|
|
|
lookupEngine, |
74
|
|
|
subjectPermissionEngine, |
75
|
|
|
) |
76
|
|
|
|
77
|
|
|
checkEngine.SetInvoker(invoker) |
78
|
|
|
|
79
|
|
|
// Create a new container instance with engines, storage, and other dependencies |
80
|
|
|
return &Development{ |
81
|
|
|
Container: servers.NewContainer( |
82
|
|
|
invoker, |
83
|
|
|
dataReader, |
84
|
|
|
dataWriter, |
85
|
|
|
bundleReader, |
86
|
|
|
bundleWriter, |
87
|
|
|
schemaReader, |
88
|
|
|
schemaWriter, |
89
|
|
|
tenantReader, |
90
|
|
|
tenantWriter, |
91
|
|
|
storage.NewNoopWatcher(), |
92
|
|
|
), |
93
|
|
|
} |
94
|
|
|
} |
95
|
|
|
|
96
|
|
|
// ReadSchema - Creates new read schema request |
97
|
|
|
func (c *Development) ReadSchema(ctx context.Context) (sch *v1.SchemaDefinition, err error) { |
98
|
|
|
// Get the head version of the "t1" schema from the schema repository |
99
|
|
|
version, err := c.Container.SR.HeadVersion(ctx, "t1") |
100
|
|
|
if err != nil { |
101
|
|
|
return nil, err |
102
|
|
|
} |
103
|
|
|
|
104
|
|
|
// Read the schema definition for the given schema and version from the schema repository |
105
|
|
|
return c.Container.SR.ReadSchema(ctx, "t1", version) |
106
|
|
|
} |
107
|
|
|
|
108
|
|
|
type Error struct { |
109
|
|
|
Type string `json:"type"` |
110
|
|
|
Key any `json:"key"` |
111
|
|
|
Message string `json:"message"` |
112
|
|
|
} |
113
|
|
|
|
114
|
|
|
func (c *Development) Run(ctx context.Context, shape map[string]interface{}) (errors []Error) { |
115
|
|
|
// Marshal the shape map into YAML format |
116
|
|
|
out, err := yaml.Marshal(shape) |
117
|
|
|
if err != nil { |
118
|
|
|
errors = append(errors, Error{ |
119
|
|
|
Type: "file_validation", |
120
|
|
|
Key: "", |
121
|
|
|
Message: err.Error(), |
122
|
|
|
}) |
123
|
|
|
return |
124
|
|
|
} |
125
|
|
|
|
126
|
|
|
// Unmarshal the YAML data into a file.Shape object |
127
|
|
|
s := &file.Shape{} |
128
|
|
|
err = yaml.Unmarshal(out, &s) |
129
|
|
|
if err != nil { |
130
|
|
|
errors = append(errors, Error{ |
131
|
|
|
Type: "file_validation", |
132
|
|
|
Key: "", |
133
|
|
|
Message: err.Error(), |
134
|
|
|
}) |
135
|
|
|
return |
136
|
|
|
} |
137
|
|
|
|
138
|
|
|
return c.RunWithShape(ctx, s) |
139
|
|
|
} |
140
|
|
|
|
141
|
|
|
func (c *Development) RunWithShape(ctx context.Context, shape *file.Shape) (errors []Error) { |
142
|
|
|
// Parse the schema using the parser library |
143
|
|
|
sch, err := parser.NewParser(shape.Schema).Parse() |
144
|
|
|
if err != nil { |
145
|
|
|
errors = append(errors, Error{ |
146
|
|
|
Type: "schema", |
147
|
|
|
Key: "", |
148
|
|
|
Message: err.Error(), |
149
|
|
|
}) |
150
|
|
|
return |
151
|
|
|
} |
152
|
|
|
|
153
|
|
|
// Compile the parsed schema |
154
|
|
|
_, _, err = compiler.NewCompiler(true, sch).Compile() |
155
|
|
|
if err != nil { |
156
|
|
|
errors = append(errors, Error{ |
157
|
|
|
Type: "schema", |
158
|
|
|
Key: "", |
159
|
|
|
Message: err.Error(), |
160
|
|
|
}) |
161
|
|
|
return |
162
|
|
|
} |
163
|
|
|
|
164
|
|
|
// Generate a new unique ID for this version of the schema |
165
|
|
|
version := xid.New().String() |
166
|
|
|
|
167
|
|
|
// Create a slice of SchemaDefinitions, one for each statement in the schema |
168
|
|
|
cnf := make([]storage.SchemaDefinition, 0, len(sch.Statements)) |
169
|
|
|
for _, st := range sch.Statements { |
170
|
|
|
cnf = append(cnf, storage.SchemaDefinition{ |
171
|
|
|
TenantID: "t1", |
172
|
|
|
Version: version, |
173
|
|
|
Name: st.GetName(), |
174
|
|
|
SerializedDefinition: []byte(st.String()), |
175
|
|
|
}) |
176
|
|
|
} |
177
|
|
|
|
178
|
|
|
// Write the schema definitions into the storage |
179
|
|
|
err = c.Container.SW.WriteSchema(ctx, cnf) |
180
|
|
|
if err != nil { |
181
|
|
|
errors = append(errors, Error{ |
182
|
|
|
Type: "schema", |
183
|
|
|
Key: "", |
184
|
|
|
Message: err.Error(), |
185
|
|
|
}) |
186
|
|
|
return |
187
|
|
|
} |
188
|
|
|
|
189
|
|
|
// Each item in the Relationships slice is processed individually |
190
|
|
|
for _, t := range shape.Relationships { |
191
|
|
|
tup, err := tuple.Tuple(t) |
192
|
|
|
if err != nil { |
193
|
|
|
errors = append(errors, Error{ |
194
|
|
|
Type: "relationships", |
195
|
|
|
Key: t, |
196
|
|
|
Message: err.Error(), |
197
|
|
|
}) |
198
|
|
|
continue |
199
|
|
|
} |
200
|
|
|
|
201
|
|
|
// Read the schema definition for this relationship |
202
|
|
|
definition, _, err := c.Container.SR.ReadEntityDefinition(ctx, "t1", tup.GetEntity().GetType(), version) |
203
|
|
|
if err != nil { |
204
|
|
|
errors = append(errors, Error{ |
205
|
|
|
Type: "relationships", |
206
|
|
|
Key: t, |
207
|
|
|
Message: err.Error(), |
208
|
|
|
}) |
209
|
|
|
continue |
210
|
|
|
} |
211
|
|
|
|
212
|
|
|
// Validate the relationship tuple against the schema definition |
213
|
|
|
err = validation.ValidateTuple(definition, tup) |
214
|
|
|
if err != nil { |
215
|
|
|
errors = append(errors, Error{ |
216
|
|
|
Type: "relationships", |
217
|
|
|
Key: t, |
218
|
|
|
Message: err.Error(), |
219
|
|
|
}) |
220
|
|
|
continue |
221
|
|
|
} |
222
|
|
|
|
223
|
|
|
// Write the relationship to the database |
224
|
|
|
_, err = c.Container.DW.Write(ctx, "t1", database.NewTupleCollection(tup), database.NewAttributeCollection()) |
225
|
|
|
// Continue to the next relationship if an error occurred |
226
|
|
|
if err != nil { |
227
|
|
|
errors = append(errors, Error{ |
228
|
|
|
Type: "relationships", |
229
|
|
|
Key: t, |
230
|
|
|
Message: err.Error(), |
231
|
|
|
}) |
232
|
|
|
continue |
233
|
|
|
} |
234
|
|
|
} |
235
|
|
|
|
236
|
|
|
// Each item in the Attributes slice is processed individually |
237
|
|
|
for _, a := range shape.Attributes { |
238
|
|
|
attr, err := attribute.Attribute(a) |
239
|
|
|
if err != nil { |
240
|
|
|
errors = append(errors, Error{ |
241
|
|
|
Type: "attributes", |
242
|
|
|
Key: a, |
243
|
|
|
Message: err.Error(), |
244
|
|
|
}) |
245
|
|
|
continue |
246
|
|
|
} |
247
|
|
|
|
248
|
|
|
// Read the schema definition for this attribute |
249
|
|
|
definition, _, err := c.Container.SR.ReadEntityDefinition(ctx, "t1", attr.GetEntity().GetType(), version) |
250
|
|
|
if err != nil { |
251
|
|
|
errors = append(errors, Error{ |
252
|
|
|
Type: "attributes", |
253
|
|
|
Key: a, |
254
|
|
|
Message: err.Error(), |
255
|
|
|
}) |
256
|
|
|
continue |
257
|
|
|
} |
258
|
|
|
|
259
|
|
|
// Validate the attribute against the schema definition |
260
|
|
|
err = validation.ValidateAttribute(definition, attr) |
261
|
|
|
if err != nil { |
262
|
|
|
errors = append(errors, Error{ |
263
|
|
|
Type: "attributes", |
264
|
|
|
Key: a, |
265
|
|
|
Message: err.Error(), |
266
|
|
|
}) |
267
|
|
|
continue |
268
|
|
|
} |
269
|
|
|
|
270
|
|
|
// Write the attribute to the database |
271
|
|
|
_, err = c.Container.DW.Write(ctx, "t1", database.NewTupleCollection(), database.NewAttributeCollection(attr)) |
272
|
|
|
// Continue to the next attribute if an error occurred |
273
|
|
|
if err != nil { |
274
|
|
|
errors = append(errors, Error{ |
275
|
|
|
Type: "attributes", |
276
|
|
|
Key: a, |
277
|
|
|
Message: err.Error(), |
278
|
|
|
}) |
279
|
|
|
continue |
280
|
|
|
} |
281
|
|
|
} |
282
|
|
|
|
283
|
|
|
// Each item in the Scenarios slice is processed individually |
284
|
|
|
for i, scenario := range shape.Scenarios { |
285
|
|
|
|
286
|
|
|
// Each Check in the current scenario is processed |
287
|
|
|
for _, check := range scenario.Checks { |
288
|
|
|
entity, err := tuple.E(check.Entity) |
289
|
|
|
if err != nil { |
290
|
|
|
errors = append(errors, Error{ |
291
|
|
|
Type: "scenarios", |
292
|
|
|
Key: i, |
293
|
|
|
Message: err.Error(), |
294
|
|
|
}) |
295
|
|
|
continue |
296
|
|
|
} |
297
|
|
|
|
298
|
|
|
ear, err := tuple.EAR(check.Subject) |
299
|
|
|
if err != nil { |
300
|
|
|
errors = append(errors, Error{ |
301
|
|
|
Type: "scenarios", |
302
|
|
|
Key: i, |
303
|
|
|
Message: err.Error(), |
304
|
|
|
}) |
305
|
|
|
continue |
306
|
|
|
} |
307
|
|
|
|
308
|
|
|
cont, err := Context(check.Context) |
309
|
|
|
if err != nil { |
310
|
|
|
errors = append(errors, Error{ |
311
|
|
|
Type: "scenarios", |
312
|
|
|
Key: i, |
313
|
|
|
Message: err.Error(), |
314
|
|
|
}) |
315
|
|
|
continue |
316
|
|
|
} |
317
|
|
|
|
318
|
|
|
subject := &v1.Subject{ |
319
|
|
|
Type: ear.GetEntity().GetType(), |
320
|
|
|
Id: ear.GetEntity().GetId(), |
321
|
|
|
Relation: ear.GetRelation(), |
322
|
|
|
} |
323
|
|
|
|
324
|
|
|
// Each Assertion in the current check is processed |
325
|
|
|
for permission, expected := range check.Assertions { |
326
|
|
|
exp := v1.CheckResult_CHECK_RESULT_ALLOWED |
327
|
|
|
if !expected { |
328
|
|
|
exp = v1.CheckResult_CHECK_RESULT_DENIED |
329
|
|
|
} |
330
|
|
|
|
331
|
|
|
// A Permission Check is made for the current entity, permission and subject |
332
|
|
|
res, err := c.Container.Invoker.Check(ctx, &v1.PermissionCheckRequest{ |
333
|
|
|
TenantId: "t1", |
334
|
|
|
Metadata: &v1.PermissionCheckRequestMetadata{ |
335
|
|
|
SchemaVersion: version, |
336
|
|
|
SnapToken: token.NewNoopToken().Encode().String(), |
337
|
|
|
Depth: 100, |
338
|
|
|
}, |
339
|
|
|
Context: cont, |
340
|
|
|
Entity: entity, |
341
|
|
|
Permission: permission, |
342
|
|
|
Subject: subject, |
343
|
|
|
}) |
344
|
|
|
if err != nil { |
345
|
|
|
errors = append(errors, Error{ |
346
|
|
|
Type: "scenarios", |
347
|
|
|
Key: i, |
348
|
|
|
Message: err.Error(), |
349
|
|
|
}) |
350
|
|
|
continue |
351
|
|
|
} |
352
|
|
|
|
353
|
|
|
query := tuple.SubjectToString(subject) + " " + permission + " " + tuple.EntityToString(entity) |
354
|
|
|
|
355
|
|
|
// Check if the permission check result matches the expected result |
356
|
|
|
if res.Can != exp { |
357
|
|
|
var expectedStr, actualStr string |
358
|
|
|
if exp == v1.CheckResult_CHECK_RESULT_ALLOWED { |
359
|
|
|
expectedStr = "true" |
360
|
|
|
} else { |
361
|
|
|
expectedStr = "false" |
362
|
|
|
} |
363
|
|
|
|
364
|
|
|
if res.Can == v1.CheckResult_CHECK_RESULT_ALLOWED { |
365
|
|
|
actualStr = "true" |
366
|
|
|
} else { |
367
|
|
|
actualStr = "false" |
368
|
|
|
} |
369
|
|
|
|
370
|
|
|
// Construct a detailed error message with the expected result, actual result, and the query |
371
|
|
|
errorMsg := fmt.Sprintf("Query: %s, Expected: %s, Actual: %s", query, expectedStr, actualStr) |
372
|
|
|
|
373
|
|
|
errors = append(errors, Error{ |
374
|
|
|
Type: "scenarios", |
375
|
|
|
Key: i, |
376
|
|
|
Message: errorMsg, |
377
|
|
|
}) |
378
|
|
|
} |
379
|
|
|
} |
380
|
|
|
} |
381
|
|
|
|
382
|
|
|
// Each EntityFilter in the current scenario is processed |
383
|
|
|
for _, filter := range scenario.EntityFilters { |
384
|
|
|
ear, err := tuple.EAR(filter.Subject) |
385
|
|
|
if err != nil { |
386
|
|
|
errors = append(errors, Error{ |
387
|
|
|
Type: "scenarios", |
388
|
|
|
Key: i, |
389
|
|
|
Message: err.Error(), |
390
|
|
|
}) |
391
|
|
|
continue |
392
|
|
|
} |
393
|
|
|
|
394
|
|
|
cont, err := Context(filter.Context) |
395
|
|
|
if err != nil { |
396
|
|
|
errors = append(errors, Error{ |
397
|
|
|
Type: "scenarios", |
398
|
|
|
Key: i, |
399
|
|
|
Message: err.Error(), |
400
|
|
|
}) |
401
|
|
|
continue |
402
|
|
|
} |
403
|
|
|
|
404
|
|
|
subject := &v1.Subject{ |
405
|
|
|
Type: ear.GetEntity().GetType(), |
406
|
|
|
Id: ear.GetEntity().GetId(), |
407
|
|
|
Relation: ear.GetRelation(), |
408
|
|
|
} |
409
|
|
|
|
410
|
|
|
// Each Assertion in the current filter is processed |
411
|
|
|
|
412
|
|
|
for permission, expected := range filter.Assertions { |
413
|
|
|
// Perform a lookup for the entity with the given subject and permission |
414
|
|
|
res, err := c.Container.Invoker.LookupEntity(ctx, &v1.PermissionLookupEntityRequest{ |
415
|
|
|
TenantId: "t1", |
416
|
|
|
Metadata: &v1.PermissionLookupEntityRequestMetadata{ |
417
|
|
|
SchemaVersion: version, |
418
|
|
|
SnapToken: token.NewNoopToken().Encode().String(), |
419
|
|
|
Depth: 100, |
420
|
|
|
}, |
421
|
|
|
Context: cont, |
422
|
|
|
EntityType: filter.EntityType, |
423
|
|
|
Permission: permission, |
424
|
|
|
Subject: subject, |
425
|
|
|
}) |
426
|
|
|
if err != nil { |
427
|
|
|
errors = append(errors, Error{ |
428
|
|
|
Type: "scenarios", |
429
|
|
|
Key: i, |
430
|
|
|
Message: err.Error(), |
431
|
|
|
}) |
432
|
|
|
continue |
433
|
|
|
} |
434
|
|
|
|
435
|
|
|
query := tuple.SubjectToString(subject) + " " + permission + " " + filter.EntityType |
436
|
|
|
|
437
|
|
|
// Check if the actual result of the entity lookup does NOT match the expected result |
438
|
|
|
if !isSameArray(res.GetEntityIds(), expected) { |
439
|
|
|
expectedStr := strings.Join(expected, ", ") |
440
|
|
|
actualStr := strings.Join(res.GetEntityIds(), ", ") |
441
|
|
|
|
442
|
|
|
errorMsg := fmt.Sprintf("Query: %s, Expected: [%s], Actual: [%s]", query, expectedStr, actualStr) |
443
|
|
|
|
444
|
|
|
errors = append(errors, Error{ |
445
|
|
|
Type: "scenarios", |
446
|
|
|
Key: i, |
447
|
|
|
Message: errorMsg, |
448
|
|
|
}) |
449
|
|
|
} |
450
|
|
|
} |
451
|
|
|
} |
452
|
|
|
|
453
|
|
|
// Each SubjectFilter in the current scenario is processed |
454
|
|
|
for _, filter := range scenario.SubjectFilters { |
455
|
|
|
subjectReference := tuple.RelationReference(filter.SubjectReference) |
456
|
|
|
|
457
|
|
|
cont, err := Context(filter.Context) |
458
|
|
|
if err != nil { |
459
|
|
|
errors = append(errors, Error{ |
460
|
|
|
Type: "scenarios", |
461
|
|
|
Key: i, |
462
|
|
|
Message: err.Error(), |
463
|
|
|
}) |
464
|
|
|
continue |
465
|
|
|
} |
466
|
|
|
|
467
|
|
|
var entity *v1.Entity |
468
|
|
|
entity, err = tuple.E(filter.Entity) |
469
|
|
|
if err != nil { |
470
|
|
|
errors = append(errors, Error{ |
471
|
|
|
Type: "scenarios", |
472
|
|
|
Key: i, |
473
|
|
|
Message: err.Error(), |
474
|
|
|
}) |
475
|
|
|
continue |
476
|
|
|
} |
477
|
|
|
|
478
|
|
|
// Each Assertion in the current filter is processed |
479
|
|
|
for permission, expected := range filter.Assertions { |
480
|
|
|
// Perform a lookup for the subject with the given entity and permission |
481
|
|
|
res, err := c.Container.Invoker.LookupSubject(ctx, &v1.PermissionLookupSubjectRequest{ |
482
|
|
|
TenantId: "t1", |
483
|
|
|
Metadata: &v1.PermissionLookupSubjectRequestMetadata{ |
484
|
|
|
SchemaVersion: version, |
485
|
|
|
SnapToken: token.NewNoopToken().Encode().String(), |
486
|
|
|
Depth: 100, |
487
|
|
|
}, |
488
|
|
|
Context: cont, |
489
|
|
|
SubjectReference: subjectReference, |
490
|
|
|
Permission: permission, |
491
|
|
|
Entity: entity, |
492
|
|
|
}) |
493
|
|
|
if err != nil { |
494
|
|
|
errors = append(errors, Error{ |
495
|
|
|
Type: "scenarios", |
496
|
|
|
Key: i, |
497
|
|
|
Message: err.Error(), |
498
|
|
|
}) |
499
|
|
|
continue |
500
|
|
|
} |
501
|
|
|
|
502
|
|
|
query := tuple.EntityToString(entity) + " " + permission + " " + filter.SubjectReference |
503
|
|
|
|
504
|
|
|
// Check if the actual result of the subject lookup does NOT match the expected result |
505
|
|
|
if !isSameArray(res.GetSubjectIds(), expected) { |
506
|
|
|
expectedStr := strings.Join(expected, ", ") |
507
|
|
|
actualStr := strings.Join(res.GetSubjectIds(), ", ") |
508
|
|
|
|
509
|
|
|
errorMsg := fmt.Sprintf("Query: %s, Expected: [%s], Actual: [%s]", query, expectedStr, actualStr) |
510
|
|
|
|
511
|
|
|
errors = append(errors, Error{ |
512
|
|
|
Type: "scenarios", |
513
|
|
|
Key: i, |
514
|
|
|
Message: errorMsg, |
515
|
|
|
}) |
516
|
|
|
} |
517
|
|
|
} |
518
|
|
|
} |
519
|
|
|
} |
520
|
|
|
|
521
|
|
|
return |
522
|
|
|
} |
523
|
|
|
|
524
|
|
|
// Context is a function that takes a file context and returns a base context and an error. |
525
|
|
|
func Context(fileContext file.Context) (cont *v1.Context, err error) { |
526
|
|
|
// Initialize an empty base context to be populated from the file context. |
527
|
|
|
cont = &v1.Context{ |
528
|
|
|
Tuples: []*v1.Tuple{}, |
529
|
|
|
Attributes: []*v1.Attribute{}, |
530
|
|
|
Data: nil, |
531
|
|
|
} |
532
|
|
|
|
533
|
|
|
// Convert the file context's data to a Struct object. |
534
|
|
|
st, err := structpb.NewStruct(fileContext.Data) |
535
|
|
|
if err != nil { |
536
|
|
|
// If an error occurs, return it. |
537
|
|
|
return nil, err |
538
|
|
|
} |
539
|
|
|
|
540
|
|
|
// Assign the Struct object to the context's data field. |
541
|
|
|
cont.Data = st |
542
|
|
|
|
543
|
|
|
// Iterate over the file context's tuples. |
544
|
|
|
for _, t := range fileContext.Tuples { |
545
|
|
|
// Convert each tuple to a base tuple. |
546
|
|
|
tup, err := tuple.Tuple(t) |
547
|
|
|
if err != nil { |
548
|
|
|
// If an error occurs, return it. |
549
|
|
|
return nil, err |
550
|
|
|
} |
551
|
|
|
|
552
|
|
|
// Add the converted tuple to the context's tuples slice. |
553
|
|
|
cont.Tuples = append(cont.Tuples, tup) |
554
|
|
|
} |
555
|
|
|
|
556
|
|
|
// Iterate over the file context's attributes. |
557
|
|
|
for _, t := range fileContext.Attributes { |
558
|
|
|
// Convert each attribute to a base attribute. |
559
|
|
|
attr, err := attribute.Attribute(t) |
560
|
|
|
if err != nil { |
561
|
|
|
// If an error occurs, return it. |
562
|
|
|
return nil, err |
563
|
|
|
} |
564
|
|
|
|
565
|
|
|
// Add the converted attribute to the context's attributes slice. |
566
|
|
|
cont.Attributes = append(cont.Attributes, attr) |
567
|
|
|
} |
568
|
|
|
|
569
|
|
|
// If everything goes well, return the context and a nil error. |
570
|
|
|
return cont, nil |
571
|
|
|
} |
572
|
|
|
|
573
|
|
|
// isSameArray - check if two arrays are the same |
574
|
|
|
func isSameArray(a, b []string) bool { |
575
|
|
|
if len(a) != len(b) { |
576
|
|
|
return false |
577
|
|
|
} |
578
|
|
|
|
579
|
|
|
sortedA := make([]string, len(a)) |
580
|
|
|
copy(sortedA, a) |
581
|
|
|
sort.Strings(sortedA) |
582
|
|
|
|
583
|
|
|
sortedB := make([]string, len(b)) |
584
|
|
|
copy(sortedB, b) |
585
|
|
|
sort.Strings(sortedB) |
586
|
|
|
|
587
|
|
|
for i := range sortedA { |
588
|
|
|
if sortedA[i] != sortedB[i] { |
589
|
|
|
return false |
590
|
|
|
} |
591
|
|
|
} |
592
|
|
|
|
593
|
|
|
return true |
594
|
|
|
} |
595
|
|
|
|