|
1
|
|
|
<?php declare(strict_types = 1); |
|
2
|
|
|
|
|
3
|
|
|
namespace Apicart\FQL\Tests\Tokenizer; |
|
4
|
|
|
|
|
5
|
|
|
use Apicart\FQL\Token\Token\GroupBegin as GroupBeginToken; |
|
6
|
|
|
use Apicart\FQL\Token\Token\Phrase as PhraseToken; |
|
7
|
|
|
use Apicart\FQL\Token\Token\Word as WordToken; |
|
8
|
|
|
use Apicart\FQL\Tokenizer\AbstractTokenExtractor; |
|
9
|
|
|
use Apicart\FQL\Tokenizer\Text; |
|
10
|
|
|
use Apicart\FQL\Tokenizer\Tokenizer; |
|
11
|
|
|
use Apicart\FQL\Value\Token; |
|
12
|
|
|
|
|
13
|
|
|
final class TextTest extends FullTest |
|
14
|
|
|
{ |
|
15
|
|
|
/** |
|
16
|
|
|
* @var array |
|
17
|
|
|
*/ |
|
18
|
|
|
protected static $fixtureOverride = []; |
|
19
|
|
|
|
|
20
|
|
|
|
|
21
|
|
|
public static function setUpBeforeClass(): void |
|
22
|
|
|
{ |
|
23
|
|
|
self::$fixtureOverride = [ |
|
24
|
|
|
'#tag' => [new WordToken('#tag', 0, '', '#tag')], |
|
25
|
|
|
'\#tag' => [new WordToken('\#tag', 0, '', '\#tag')], |
|
26
|
|
|
'#_tag-tag' => [new WordToken('#_tag-tag', 0, '', '#_tag-tag')], |
|
27
|
|
|
'#tag+' => [new WordToken('#tag+', 0, '', '#tag+')], |
|
28
|
|
|
'#tag-' => [new WordToken('#tag-', 0, '', '#tag-')], |
|
29
|
|
|
'#tag!' => [new WordToken('#tag!', 0, '', '#tag!')], |
|
30
|
|
|
"#tag\n" => [new WordToken('#tag', 0, '', '#tag'), new Token(Tokenizer::TOKEN_WHITESPACE, "\n", 4)], |
|
31
|
|
|
'#tag ' => [new WordToken('#tag', 0, '', '#tag'), new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 4)], |
|
32
|
|
|
'#tag(' => [new WordToken('#tag', 0, '', '#tag'), new GroupBeginToken('(', 4, '(', null)], |
|
33
|
|
|
'#tag)' => [new WordToken('#tag', 0, '', '#tag'), new Token(Tokenizer::TOKEN_GROUP_END, ')', 4)], |
|
34
|
|
|
'@user' => [new WordToken('@user', 0, '', '@user')], |
|
35
|
|
|
'@user.user' => [new WordToken('@user.user', 0, '', '@user.user')], |
|
36
|
|
|
'\@user' => [new WordToken('\@user', 0, '', '\@user')], |
|
37
|
|
|
'@_user-user' => [new WordToken('@_user-user', 0, '', '@_user-user')], |
|
38
|
|
|
'@user+' => [new WordToken('@user+', 0, '', '@user+')], |
|
39
|
|
|
'@user-' => [new WordToken('@user-', 0, '', '@user-')], |
|
40
|
|
|
'@user!' => [new WordToken('@user!', 0, '', '@user!')], |
|
41
|
|
|
"@user\n" => [new WordToken('@user', 0, '', '@user'), new Token(Tokenizer::TOKEN_WHITESPACE, "\n", 5)], |
|
42
|
|
|
'@user ' => [new WordToken('@user', 0, '', '@user'), new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 5)], |
|
43
|
|
|
'@user(' => [new WordToken('@user', 0, '', '@user'), new GroupBeginToken('(', 5, '(', null)], |
|
44
|
|
|
'@user)' => [new WordToken('@user', 0, '', '@user'), new Token(Tokenizer::TOKEN_GROUP_END, ')', 5)], |
|
45
|
|
|
'[a TO b]' => [ |
|
46
|
|
|
new WordToken('[a', 0, '', '[a'), |
|
47
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 2), |
|
48
|
|
|
new WordToken('TO', 4, '', 'TO'), |
|
49
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 6), |
|
50
|
|
|
new WordToken('b]', 7, '', 'b]'), |
|
51
|
|
|
], |
|
52
|
|
|
'[a TO b}' => [ |
|
53
|
|
|
new WordToken('[a', 0, '', '[a'), |
|
54
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 2), |
|
55
|
|
|
new WordToken('TO', 3, '', 'TO'), |
|
56
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 5), |
|
57
|
|
|
new WordToken('b}', 6, '', 'b}'), |
|
58
|
|
|
], |
|
59
|
|
|
'{a TO b}' => [ |
|
60
|
|
|
new WordToken('{a', 0, '', '{a'), |
|
61
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 2), |
|
62
|
|
|
new WordToken('TO', 3, '', 'TO'), |
|
63
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 5), |
|
64
|
|
|
new WordToken('b}', 6, '', 'b}'), |
|
65
|
|
|
], |
|
66
|
|
|
'{a TO b]' => [ |
|
67
|
|
|
new WordToken('{a', 0, '', '{a'), |
|
68
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 2), |
|
69
|
|
|
new WordToken('TO', 3, '', 'TO'), |
|
70
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 5), |
|
71
|
|
|
new WordToken('b]', 6, '', 'b]'), |
|
72
|
|
|
], |
|
73
|
|
|
'[2017-01-01 TO 2017-01-05]' => [ |
|
74
|
|
|
new WordToken('[2017-01-01', 0, '', '[2017-01-01'), |
|
75
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 11), |
|
76
|
|
|
new WordToken('TO', 12, '', 'TO'), |
|
77
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 14), |
|
78
|
|
|
new WordToken('2017-01-05]', 15, '', '2017-01-05]'), |
|
79
|
|
|
], |
|
80
|
|
|
'[20 TO *]' => [ |
|
81
|
|
|
new WordToken('[20', 0, '', '[20'), |
|
82
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 3), |
|
83
|
|
|
new WordToken('TO', 7, '', 'TO'), |
|
84
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 9), |
|
85
|
|
|
new WordToken('*]', 12, '', '*]'), |
|
86
|
|
|
], |
|
87
|
|
|
'[* TO 20]' => [ |
|
88
|
|
|
new WordToken('[*', 0, '', '[*'), |
|
89
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 2), |
|
90
|
|
|
new WordToken('TO', 3, '', 'TO'), |
|
91
|
|
|
new Token(Tokenizer::TOKEN_WHITESPACE, ' ', 5), |
|
92
|
|
|
new WordToken('20]', 6, '', '20]'), |
|
93
|
|
|
], |
|
94
|
|
|
'domain:domain:' => [new WordToken('domain:domain:', 0, '', 'domain:domain:')], |
|
95
|
|
|
'some.domain:some.domain:' |
|
96
|
|
|
=> [new WordToken('some.domain:some.domain:', 0, '', 'some.domain:some.domain:')], |
|
97
|
|
|
'domain:domain:domain:domain' => [ |
|
98
|
|
|
new WordToken('domain:domain:domain:domain', 0, '', 'domain:domain:domain:domain'), |
|
99
|
|
|
], |
|
100
|
|
|
'domain\:' => [new WordToken('domain\:', 0, '', 'domain\:')], |
|
101
|
|
|
'domain\::' => [new WordToken('domain\::', 0, '', 'domain\::')], |
|
102
|
|
|
'domain:word' => [new WordToken('domain:word', 0, '', 'domain:word')], |
|
103
|
|
|
'domain\:word' => [new WordToken('domain\:word', 0, '', 'domain\:word')], |
|
104
|
|
|
'domain:"phrase"' => [ |
|
105
|
|
|
new WordToken('domain:', 0, '', 'domain:'), |
|
106
|
|
|
new PhraseToken('"phrase"', 7, '', '"', 'phrase'), |
|
107
|
|
|
], |
|
108
|
|
|
'some.domain:"phrase"' => [ |
|
109
|
|
|
new WordToken('some.domain:', 0, '', 'some.domain:'), |
|
110
|
|
|
new PhraseToken('"phrase"', 12, '', '"', 'phrase'), |
|
111
|
|
|
], |
|
112
|
|
|
'domain\:"phrase"' => [ |
|
113
|
|
|
new WordToken('domain\:', 0, '', 'domain\:'), |
|
114
|
|
|
new PhraseToken('"phrase"', 8, '', '"', 'phrase'), |
|
115
|
|
|
], |
|
116
|
|
|
'domain:(one)' => [ |
|
117
|
|
|
new WordToken('domain:', 0, '', 'domain:'), |
|
118
|
|
|
new GroupBeginToken('(', 7, '(', ''), |
|
119
|
|
|
new WordToken('one', 8, '', 'one'), |
|
120
|
|
|
new Token(Tokenizer::TOKEN_GROUP_END, ')', 11), |
|
121
|
|
|
], |
|
122
|
|
|
'some.domain:(one)' => [ |
|
123
|
|
|
new WordToken('some.domain:', 0, '', 'some.domain:'), |
|
124
|
|
|
new GroupBeginToken('(', 12, '(', ''), |
|
125
|
|
|
new WordToken('one', 13, '', 'one'), |
|
126
|
|
|
new Token(Tokenizer::TOKEN_GROUP_END, ')', 16), |
|
127
|
|
|
], |
|
128
|
|
|
]; |
|
129
|
|
|
} |
|
130
|
|
|
|
|
131
|
|
|
|
|
132
|
|
|
/** |
|
133
|
|
|
* @dataProvider providerForTestTokenize |
|
134
|
|
|
* |
|
135
|
|
|
* @param string $string |
|
136
|
|
|
*/ |
|
137
|
|
|
public function testTokenize($string, array $expectedTokens): void |
|
138
|
|
|
{ |
|
139
|
|
|
$expectedTokens = $this->getExpectedFixtureWithOverride($string, $expectedTokens); |
|
140
|
|
|
parent::testTokenize($string, $expectedTokens); |
|
141
|
|
|
} |
|
142
|
|
|
|
|
143
|
|
|
|
|
144
|
|
|
/** |
|
145
|
|
|
* @param string $string |
|
146
|
|
|
* |
|
147
|
|
|
* @return Token[] |
|
148
|
|
|
*/ |
|
149
|
|
|
protected function getExpectedFixtureWithOverride($string, array $expectedTokens) |
|
150
|
|
|
{ |
|
151
|
|
|
if (isset(self::$fixtureOverride[$string])) { |
|
152
|
|
|
return self::$fixtureOverride[$string]; |
|
153
|
|
|
} |
|
154
|
|
|
return $expectedTokens; |
|
155
|
|
|
} |
|
156
|
|
|
|
|
157
|
|
|
|
|
158
|
|
|
protected function getTokenExtractor(): AbstractTokenExtractor |
|
159
|
|
|
{ |
|
160
|
|
|
return new Text; |
|
161
|
|
|
} |
|
162
|
|
|
|
|
163
|
|
|
} |
|
164
|
|
|
|