| Conditions | 1 |
| Paths | 1 |
| Total Lines | 1645 |
| Code Lines | 1118 |
| Lines | 0 |
| Ratio | 0 % |
| Changes | 1 | ||
| Bugs | 0 | Features | 0 |
Small methods make your code easier to understand, in particular if combined with a good name. Besides, if your method is small, finding a good name is usually much easier.
For example, if you find yourself adding comments to a method's body, this is usually a good sign to extract the commented part to a new method, and use the comment as a starting point when coming up with a good name for this new method.
Commonly applied refactorings include:
If many parameters/temporary variables are present:
| 1 | <?php declare(strict_types = 1); |
||
| 660 | public function providerForTestQueryCorrected(): array |
||
| 661 | { |
||
| 662 | return [ |
||
| 663 | [ |
||
| 664 | 'one"', |
||
| 665 | 'one', |
||
| 666 | [$token1 = new WordToken('one', 0, '', 'one'), $token2 = new Token(Tokenizer::TOKEN_BAILOUT, '"', 3)], |
||
| 667 | new Query([new Term($token1)]), |
||
| 668 | [new Correction(Parser::CORRECTION_BAILOUT_TOKEN_IGNORED, $token2)], |
||
| 669 | ], |
||
| 670 | [ |
||
| 671 | 'one AND two AND', |
||
| 672 | 'one AND two', |
||
| 673 | [ |
||
| 674 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 675 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 676 | $token3 = new WordToken('two', 8, '', 'two'), |
||
| 677 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 12), |
||
| 678 | ], |
||
| 679 | new Query([new LogicalAnd(new Term($token1), new Term($token3), $token2)]), |
||
| 680 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_RIGHT_OPERAND_IGNORED, $token4)], |
||
| 681 | ], |
||
| 682 | [ |
||
| 683 | 'AND one AND two', |
||
| 684 | 'one AND two', |
||
| 685 | [ |
||
| 686 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 0), |
||
| 687 | $token2 = new WordToken('one', 4, '', 'one'), |
||
| 688 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 8), |
||
| 689 | $token4 = new WordToken('two', 12, '', 'two'), |
||
| 690 | ], |
||
| 691 | new Query([new LogicalAnd(new Term($token2), new Term($token4), $token3)]), |
||
| 692 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1)], |
||
| 693 | ], |
||
| 694 | [ |
||
| 695 | 'AND AND one AND AND two', |
||
| 696 | 'one two', |
||
| 697 | [ |
||
| 698 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 0), |
||
| 699 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 700 | $token3 = new WordToken('one', 8, '', 'one'), |
||
| 701 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 12), |
||
| 702 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 16), |
||
| 703 | $token6 = new WordToken('two', 20, '', 'two'), |
||
| 704 | ], |
||
| 705 | new Query([new Term($token3), new Term($token6)]), |
||
| 706 | [ |
||
| 707 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1), |
||
| 708 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 709 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token4, $token5), |
||
| 710 | ], |
||
| 711 | ], |
||
| 712 | [ |
||
| 713 | 'OR one OR two', |
||
| 714 | 'one OR two', |
||
| 715 | [ |
||
| 716 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 0), |
||
| 717 | $token2 = new WordToken('one', 3, '', 'one'), |
||
| 718 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 7), |
||
| 719 | $token4 = new WordToken('two', 10, '', 'two'), |
||
| 720 | ], |
||
| 721 | new Query([new LogicalOr(new Term($token2), new Term($token4), $token3)]), |
||
| 722 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1)], |
||
| 723 | ], |
||
| 724 | [ |
||
| 725 | 'OR OR one OR OR two', |
||
| 726 | 'one two', |
||
| 727 | [ |
||
| 728 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 0), |
||
| 729 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 3), |
||
| 730 | $token3 = new WordToken('one', 6, '', 'one'), |
||
| 731 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 10), |
||
| 732 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 13), |
||
| 733 | $token6 = new WordToken('two', 16, '', 'two'), |
||
| 734 | ], |
||
| 735 | new Query([new Term($token3), new Term($token6)]), |
||
| 736 | [ |
||
| 737 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1), |
||
| 738 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 739 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token4, $token5), |
||
| 740 | ], |
||
| 741 | ], |
||
| 742 | [ |
||
| 743 | 'OR OR one OR OR AND two', |
||
| 744 | 'one two', |
||
| 745 | [ |
||
| 746 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 0), |
||
| 747 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 3), |
||
| 748 | $token3 = new WordToken('one', 6, '', 'one'), |
||
| 749 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 10), |
||
| 750 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 13), |
||
| 751 | $token6 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 16), |
||
| 752 | $token7 = new WordToken('two', 20, '', 'two'), |
||
| 753 | ], |
||
| 754 | new Query([new Term($token3), new Term($token7)]), |
||
| 755 | [ |
||
| 756 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1), |
||
| 757 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 758 | new Correction( |
||
| 759 | Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, |
||
| 760 | $token4, |
||
| 761 | $token5, |
||
| 762 | $token6 |
||
| 763 | ), |
||
| 764 | ], |
||
| 765 | ], |
||
| 766 | [ |
||
| 767 | 'one OR two AND OR NOT', |
||
| 768 | 'one OR two', |
||
| 769 | [ |
||
| 770 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 771 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 4), |
||
| 772 | $token3 = new WordToken('two', 7, '', 'two'), |
||
| 773 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 11), |
||
| 774 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 15), |
||
| 775 | $token6 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 18), |
||
| 776 | ], |
||
| 777 | new Query([new LogicalOr(new Term($token1), new Term($token3), $token2)]), |
||
| 778 | [ |
||
| 779 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token4, $token5), |
||
| 780 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token6), |
||
| 781 | ], |
||
| 782 | ], |
||
| 783 | [ |
||
| 784 | 'AND OR one AND OR two AND OR three', |
||
| 785 | 'one two three', |
||
| 786 | [ |
||
| 787 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 0), |
||
| 788 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 4), |
||
| 789 | $token3 = new WordToken('one', 7, '', 'one'), |
||
| 790 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 11), |
||
| 791 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 15), |
||
| 792 | $token6 = new WordToken('two', 18, '', 'two'), |
||
| 793 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 22), |
||
| 794 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 26), |
||
| 795 | $token9 = new WordToken('three', 29, '', 'three'), |
||
| 796 | ], |
||
| 797 | new Query([new Term($token3), new Term($token6), new Term($token9)]), |
||
| 798 | [ |
||
| 799 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1), |
||
| 800 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 801 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token4, $token5), |
||
| 802 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token7, $token8), |
||
| 803 | ], |
||
| 804 | ], |
||
| 805 | [ |
||
| 806 | 'OR AND one OR AND two OR AND three', |
||
| 807 | 'one two three', |
||
| 808 | [ |
||
| 809 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 0), |
||
| 810 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 3), |
||
| 811 | $token3 = new WordToken('one', 7, '', 'one'), |
||
| 812 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 11), |
||
| 813 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 14), |
||
| 814 | $token6 = new WordToken('two', 18, '', 'two'), |
||
| 815 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 22), |
||
| 816 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 25), |
||
| 817 | $token9 = new WordToken('three', 29, '', 'three'), |
||
| 818 | ], |
||
| 819 | new Query([new Term($token3), new Term($token6), new Term($token9)]), |
||
| 820 | [ |
||
| 821 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1), |
||
| 822 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 823 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token4, $token5), |
||
| 824 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token7, $token8), |
||
| 825 | ], |
||
| 826 | ], |
||
| 827 | [ |
||
| 828 | 'one AND NOT AND two', |
||
| 829 | 'one two', |
||
| 830 | [ |
||
| 831 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 832 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 833 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 8), |
||
| 834 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 12), |
||
| 835 | $token5 = new WordToken('two', 16, '', 'two'), |
||
| 836 | ], |
||
| 837 | new Query([new Term($token1), new Term($token5)]), |
||
| 838 | [ |
||
| 839 | new Correction( |
||
| 840 | Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, |
||
| 841 | $token2, |
||
| 842 | $token3, |
||
| 843 | $token4 |
||
| 844 | ), |
||
| 845 | ], |
||
| 846 | ], |
||
| 847 | [ |
||
| 848 | 'one NOT AND two', |
||
| 849 | 'one two', |
||
| 850 | [ |
||
| 851 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 852 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 4), |
||
| 853 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 8), |
||
| 854 | $token4 = new WordToken('two', 12, '', 'two'), |
||
| 855 | ], |
||
| 856 | new Query([new Term($token1), new Term($token4)]), |
||
| 857 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token2, $token3)], |
||
| 858 | ], |
||
| 859 | [ |
||
| 860 | 'one NOT AND NOT two', |
||
| 861 | 'one NOT two', |
||
| 862 | [ |
||
| 863 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 864 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 4), |
||
| 865 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 8), |
||
| 866 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 12), |
||
| 867 | $token5 = new WordToken('two', 16, '', 'two'), |
||
| 868 | ], |
||
| 869 | new Query([new Term($token1), new LogicalNot(new Term($token5), $token4)]), |
||
| 870 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token2, $token3)], |
||
| 871 | ], |
||
| 872 | [ |
||
| 873 | 'one OR NOT OR two', |
||
| 874 | 'one two', |
||
| 875 | [ |
||
| 876 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 877 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 4), |
||
| 878 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 7), |
||
| 879 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 11), |
||
| 880 | $token5 = new WordToken('two', 14, '', 'two'), |
||
| 881 | ], |
||
| 882 | new Query([new Term($token1), new Term($token5)]), |
||
| 883 | [ |
||
| 884 | new Correction( |
||
| 885 | Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, |
||
| 886 | $token2, |
||
| 887 | $token3, |
||
| 888 | $token4 |
||
| 889 | ), |
||
| 890 | ], |
||
| 891 | ], |
||
| 892 | [ |
||
| 893 | 'one NOT OR two', |
||
| 894 | 'one two', |
||
| 895 | [ |
||
| 896 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 897 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 4), |
||
| 898 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 8), |
||
| 899 | $token4 = new WordToken('two', 11, '', 'two'), |
||
| 900 | ], |
||
| 901 | new Query([new Term($token1), new Term($token4)]), |
||
| 902 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token2, $token3)], |
||
| 903 | ], |
||
| 904 | [ |
||
| 905 | 'one NOT OR NOT two', |
||
| 906 | 'one NOT two', |
||
| 907 | [ |
||
| 908 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 909 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 4), |
||
| 910 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 8), |
||
| 911 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 11), |
||
| 912 | $token5 = new WordToken('two', 15, '', 'two'), |
||
| 913 | ], |
||
| 914 | new Query([new Term($token1), new LogicalNot(new Term($token5), $token4)]), |
||
| 915 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token2, $token3)], |
||
| 916 | ], |
||
| 917 | [ |
||
| 918 | '(one AND two OR NOT)', |
||
| 919 | '(one AND two)', |
||
| 920 | [ |
||
| 921 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 922 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 923 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 5), |
||
| 924 | $token4 = new WordToken('two', 9, '', 'two'), |
||
| 925 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 13), |
||
| 926 | $token6 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 16), |
||
| 927 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 19), |
||
| 928 | ], |
||
| 929 | new Query([ |
||
| 930 | new Group([new LogicalAnd(new Term($token2), new Term($token4), $token3)], $token1, $token7), |
||
| 931 | ]), |
||
| 932 | [ |
||
| 933 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token6), |
||
| 934 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_RIGHT_OPERAND_IGNORED, $token5), |
||
| 935 | ], |
||
| 936 | ], |
||
| 937 | [ |
||
| 938 | '(AND one OR two)', |
||
| 939 | '(one OR two)', |
||
| 940 | [ |
||
| 941 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 942 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 1), |
||
| 943 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 944 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 9), |
||
| 945 | $token5 = new WordToken('two', 12, '', 'two'), |
||
| 946 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 15), |
||
| 947 | ], |
||
| 948 | new Query([ |
||
| 949 | new Group([new LogicalOr(new Term($token3), new Term($token5), $token4)], $token1, $token6), |
||
| 950 | ]), |
||
| 951 | [new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2)], |
||
| 952 | ], |
||
| 953 | [ |
||
| 954 | 'AND (((OR AND one AND NOT OR))) OR NOT', |
||
| 955 | '(((one)))', |
||
| 956 | [ |
||
| 957 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 0), |
||
| 958 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 959 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 960 | $token4 = new GroupBeginToken('(', 6, '(', null), |
||
| 961 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 7), |
||
| 962 | $token6 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 10), |
||
| 963 | $token7 = new WordToken('one', 14, '', 'one'), |
||
| 964 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 18), |
||
| 965 | $token9 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 22), |
||
| 966 | $token10 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 26), |
||
| 967 | $token11 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 28), |
||
| 968 | $token12 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 29), |
||
| 969 | $token13 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 30), |
||
| 970 | $token14 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 32), |
||
| 971 | $token15 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 35), |
||
| 972 | ], |
||
| 973 | new Query( |
||
| 974 | [ |
||
| 975 | new Group( |
||
| 976 | [new Group([new Group([new Term($token7)], $token4, $token11)], $token3, $token12)], |
||
| 977 | $token2, |
||
| 978 | $token13 |
||
| 979 | ), |
||
| 980 | ] |
||
| 981 | ), |
||
| 982 | [ |
||
| 983 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1), |
||
| 984 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token5), |
||
| 985 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token6), |
||
| 986 | new Correction( |
||
| 987 | Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, |
||
| 988 | $token8, |
||
| 989 | $token9, |
||
| 990 | $token10 |
||
| 991 | ), |
||
| 992 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token15), |
||
| 993 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_RIGHT_OPERAND_IGNORED, $token14), |
||
| 994 | ], |
||
| 995 | ], |
||
| 996 | [ |
||
| 997 | 'one ()', |
||
| 998 | 'one', |
||
| 999 | [ |
||
| 1000 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1001 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 1002 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 5), |
||
| 1003 | ], |
||
| 1004 | new Query([new Term($token1)]), |
||
| 1005 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token3)], |
||
| 1006 | ], |
||
| 1007 | [ |
||
| 1008 | 'one (())', |
||
| 1009 | 'one', |
||
| 1010 | [ |
||
| 1011 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1012 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 1013 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 1014 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 1015 | $token5 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 7), |
||
| 1016 | ], |
||
| 1017 | new Query([new Term($token1)]), |
||
| 1018 | [ |
||
| 1019 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token3, $token4), |
||
| 1020 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token5), |
||
| 1021 | ], |
||
| 1022 | ], |
||
| 1023 | [ |
||
| 1024 | 'one AND (()) OR two', |
||
| 1025 | 'one two', |
||
| 1026 | [ |
||
| 1027 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1028 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 1029 | $token3 = new GroupBeginToken('(', 8, '(', null), |
||
| 1030 | $token4 = new GroupBeginToken('(', 9, '(', null), |
||
| 1031 | $token5 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 10), |
||
| 1032 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 11), |
||
| 1033 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 13), |
||
| 1034 | $token8 = new WordToken('two', 16, '', 'two'), |
||
| 1035 | ], |
||
| 1036 | new Query([new Term($token1), new Term($token8)]), |
||
| 1037 | [ |
||
| 1038 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token4, $token5), |
||
| 1039 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token3, $token6, $token7), |
||
| 1040 | ], |
||
| 1041 | ], |
||
| 1042 | [ |
||
| 1043 | 'one (AND OR NOT)', |
||
| 1044 | 'one', |
||
| 1045 | [ |
||
| 1046 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1047 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 1048 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 5), |
||
| 1049 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 9), |
||
| 1050 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 12), |
||
| 1051 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 15), |
||
| 1052 | ], |
||
| 1053 | new Query([new Term($token1)]), |
||
| 1054 | [ |
||
| 1055 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token3), |
||
| 1056 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token4), |
||
| 1057 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token5), |
||
| 1058 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token6), |
||
| 1059 | ], |
||
| 1060 | ], |
||
| 1061 | [ |
||
| 1062 | 'one) (AND)) OR NOT)', |
||
| 1063 | 'one', |
||
| 1064 | [ |
||
| 1065 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1066 | $token2 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 3), |
||
| 1067 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 1068 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 6), |
||
| 1069 | $token5 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 9), |
||
| 1070 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 10), |
||
| 1071 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 12), |
||
| 1072 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 15), |
||
| 1073 | $token9 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 18), |
||
| 1074 | ], |
||
| 1075 | new Query([new Term($token1)]), |
||
| 1076 | [ |
||
| 1077 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_RIGHT_DELIMITER_IGNORED, $token9), |
||
| 1078 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_RIGHT_DELIMITER_IGNORED, $token6), |
||
| 1079 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_RIGHT_DELIMITER_IGNORED, $token2), |
||
| 1080 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token4), |
||
| 1081 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token3, $token5, $token7), |
||
| 1082 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token8), |
||
| 1083 | ], |
||
| 1084 | ], |
||
| 1085 | [ |
||
| 1086 | '(one( (AND) OR NOT((', |
||
| 1087 | 'one', |
||
| 1088 | [ |
||
| 1089 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 1090 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1091 | $token3 = new GroupBeginToken('(', 4, '(', null), |
||
| 1092 | $token4 = new GroupBeginToken('(', 6, '(', null), |
||
| 1093 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 7), |
||
| 1094 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 10), |
||
| 1095 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 12), |
||
| 1096 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 15), |
||
| 1097 | $token9 = new GroupBeginToken('(', 18, '(', null), |
||
| 1098 | $token10 = new GroupBeginToken('(', 19, '(', null), |
||
| 1099 | ], |
||
| 1100 | new Query([new Term($token2)]), |
||
| 1101 | [ |
||
| 1102 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token10), |
||
| 1103 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token9), |
||
| 1104 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token3), |
||
| 1105 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token1), |
||
| 1106 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token5), |
||
| 1107 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token4, $token6, $token7), |
||
| 1108 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token8), |
||
| 1109 | ], |
||
| 1110 | ], |
||
| 1111 | [ |
||
| 1112 | 'OR NOT (one OR two AND OR NOT) OR three AND NOT', |
||
| 1113 | 'NOT (one OR two) OR three', |
||
| 1114 | [ |
||
| 1115 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 0), |
||
| 1116 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 3), |
||
| 1117 | $token3 = new GroupBeginToken('(', 7, '(', null), |
||
| 1118 | $token4 = new WordToken('one', 8, '', 'one'), |
||
| 1119 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 12), |
||
| 1120 | $token6 = new WordToken('two', 15, '', 'two'), |
||
| 1121 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 19), |
||
| 1122 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 23), |
||
| 1123 | $token9 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 26), |
||
| 1124 | $token10 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 29), |
||
| 1125 | $token11 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 31), |
||
| 1126 | $token12 = new WordToken('three', 34, '', 'three'), |
||
| 1127 | $token13 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 40), |
||
| 1128 | $token14 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 44), |
||
| 1129 | ], |
||
| 1130 | new Query( |
||
| 1131 | [ |
||
| 1132 | new LogicalOr( |
||
| 1133 | new LogicalNot( |
||
| 1134 | new Group([ |
||
| 1135 | new LogicalOr(new Term($token4), new Term($token6), $token5), |
||
| 1136 | ], $token3, $token10), |
||
| 1137 | $token2 |
||
| 1138 | ), |
||
| 1139 | new Term($token12), |
||
| 1140 | $token11 |
||
| 1141 | ), |
||
| 1142 | ] |
||
| 1143 | ), |
||
| 1144 | [ |
||
| 1145 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token1), |
||
| 1146 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_FOLLOWING_OPERATOR_IGNORED, $token7, $token8), |
||
| 1147 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token9), |
||
| 1148 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token14), |
||
| 1149 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_RIGHT_OPERAND_IGNORED, $token13), |
||
| 1150 | ], |
||
| 1151 | ], |
||
| 1152 | [ |
||
| 1153 | '+ one', |
||
| 1154 | 'one', |
||
| 1155 | [$token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), $token2 = new WordToken('one', 2, '', 'one')], |
||
| 1156 | new Query([new Term($token2)]), |
||
| 1157 | [new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token1)], |
||
| 1158 | ], |
||
| 1159 | [ |
||
| 1160 | '! one', |
||
| 1161 | 'one', |
||
| 1162 | [ |
||
| 1163 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 0), |
||
| 1164 | $token2 = new WordToken('one', 2, '', 'one'), |
||
| 1165 | ], |
||
| 1166 | new Query([new Term($token2)]), |
||
| 1167 | [new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token1)], |
||
| 1168 | ], |
||
| 1169 | [ |
||
| 1170 | '+++one ++two', |
||
| 1171 | '+one +two', |
||
| 1172 | [ |
||
| 1173 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1174 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 1), |
||
| 1175 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 2), |
||
| 1176 | $token4 = new WordToken('one', 3, '', 'one'), |
||
| 1177 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 7), |
||
| 1178 | $token6 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 8), |
||
| 1179 | $token7 = new WordToken('two', 9, '', 'two'), |
||
| 1180 | ], |
||
| 1181 | new Query([new Mandatory(new Term($token4), $token3), new Mandatory(new Term($token7), $token6)]), |
||
| 1182 | [ |
||
| 1183 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1), |
||
| 1184 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 1185 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token5), |
||
| 1186 | ], |
||
| 1187 | ], |
||
| 1188 | [ |
||
| 1189 | '+one + +AND +++ two', |
||
| 1190 | '+one AND two', |
||
| 1191 | [ |
||
| 1192 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1193 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1194 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 5), |
||
| 1195 | $token4 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 7), |
||
| 1196 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 8), |
||
| 1197 | $token6 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 12), |
||
| 1198 | $token7 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 13), |
||
| 1199 | $token8 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 14), |
||
| 1200 | $token9 = new WordToken('two', 16, '', 'two'), |
||
| 1201 | ], |
||
| 1202 | new Query([new LogicalAnd(new Mandatory(new Term($token2), $token1), new Term($token9), $token5)]), |
||
| 1203 | [ |
||
| 1204 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token3), |
||
| 1205 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token4), |
||
| 1206 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token6), |
||
| 1207 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token7), |
||
| 1208 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token8), |
||
| 1209 | ], |
||
| 1210 | ], |
||
| 1211 | [ |
||
| 1212 | '+one + +OR++ +two ++ +', |
||
| 1213 | '+one OR +two', |
||
| 1214 | [ |
||
| 1215 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1216 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1217 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 5), |
||
| 1218 | $token4 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 7), |
||
| 1219 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 8), |
||
| 1220 | $token6 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 10), |
||
| 1221 | $token7 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 11), |
||
| 1222 | $token8 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 13), |
||
| 1223 | $token9 = new WordToken('two', 14, '', 'two'), |
||
| 1224 | $token10 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 18), |
||
| 1225 | $token11 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 19), |
||
| 1226 | $token12 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 21), |
||
| 1227 | ], |
||
| 1228 | new Query( |
||
| 1229 | [ |
||
| 1230 | new LogicalOr(new Mandatory(new Term($token2), $token1), new Mandatory(new Term( |
||
| 1231 | $token9 |
||
| 1232 | ), $token8), $token5), |
||
| 1233 | ] |
||
| 1234 | ), |
||
| 1235 | [ |
||
| 1236 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token3), |
||
| 1237 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token4), |
||
| 1238 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token6), |
||
| 1239 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token7), |
||
| 1240 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token10), |
||
| 1241 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token11), |
||
| 1242 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token12), |
||
| 1243 | ], |
||
| 1244 | ], |
||
| 1245 | [ |
||
| 1246 | 'NOT +one', |
||
| 1247 | '+one', |
||
| 1248 | [ |
||
| 1249 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1250 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 1251 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1252 | ], |
||
| 1253 | new Query([new Mandatory(new Term($token3), $token2)]), |
||
| 1254 | [new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token1)], |
||
| 1255 | ], |
||
| 1256 | [ |
||
| 1257 | '+(+one + +OR++ +two ++ +)', |
||
| 1258 | '+(+one OR +two)', |
||
| 1259 | [ |
||
| 1260 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1261 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1262 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 2), |
||
| 1263 | $token4 = new WordToken('one', 3, '', 'one'), |
||
| 1264 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 7), |
||
| 1265 | $token6 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 9), |
||
| 1266 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 10), |
||
| 1267 | $token8 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 12), |
||
| 1268 | $token9 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 13), |
||
| 1269 | $token10 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 15), |
||
| 1270 | $token11 = new WordToken('two', 16, '', 'two'), |
||
| 1271 | $token12 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 20), |
||
| 1272 | $token13 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 21), |
||
| 1273 | $token14 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 23), |
||
| 1274 | $token15 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 24), |
||
| 1275 | ], |
||
| 1276 | new Query( |
||
| 1277 | [ |
||
| 1278 | new Mandatory( |
||
| 1279 | new Group( |
||
| 1280 | [ |
||
| 1281 | new LogicalOr( |
||
| 1282 | new Mandatory(new Term($token4), $token3), |
||
| 1283 | new Mandatory(new Term($token11), $token10), |
||
| 1284 | $token7 |
||
| 1285 | ), |
||
| 1286 | ], |
||
| 1287 | $token2, |
||
| 1288 | $token15 |
||
| 1289 | ), |
||
| 1290 | $token1 |
||
| 1291 | ), |
||
| 1292 | ] |
||
| 1293 | ), |
||
| 1294 | [ |
||
| 1295 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token5), |
||
| 1296 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token6), |
||
| 1297 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token8), |
||
| 1298 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token9), |
||
| 1299 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token12), |
||
| 1300 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token13), |
||
| 1301 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token14), |
||
| 1302 | ], |
||
| 1303 | ], |
||
| 1304 | [ |
||
| 1305 | '- one', |
||
| 1306 | 'one', |
||
| 1307 | [ |
||
| 1308 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1309 | $token2 = new WordToken('one', 2, '', 'one'), |
||
| 1310 | ], |
||
| 1311 | new Query([new Term($token2)]), |
||
| 1312 | [new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token1)], |
||
| 1313 | ], |
||
| 1314 | [ |
||
| 1315 | '---one --two', |
||
| 1316 | '-one -two', |
||
| 1317 | [ |
||
| 1318 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1319 | $token2 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 1), |
||
| 1320 | $token3 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 2), |
||
| 1321 | $token4 = new WordToken('one', 3, '', 'one'), |
||
| 1322 | $token5 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 7), |
||
| 1323 | $token6 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 8), |
||
| 1324 | $token7 = new WordToken('two', 9, '', 'two'), |
||
| 1325 | ], |
||
| 1326 | new Query([new Prohibited(new Term($token4), $token3), new Prohibited(new Term($token7), $token6)]), |
||
| 1327 | [ |
||
| 1328 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1), |
||
| 1329 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 1330 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token5), |
||
| 1331 | ], |
||
| 1332 | ], |
||
| 1333 | [ |
||
| 1334 | '-one - -AND --- two', |
||
| 1335 | '-one AND two', |
||
| 1336 | [ |
||
| 1337 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1338 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1339 | $token3 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 5), |
||
| 1340 | $token4 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 7), |
||
| 1341 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 8), |
||
| 1342 | $token6 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 12), |
||
| 1343 | $token7 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 13), |
||
| 1344 | $token8 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 14), |
||
| 1345 | $token9 = new WordToken('two', 16, '', 'two'), |
||
| 1346 | ], |
||
| 1347 | new Query([new LogicalAnd(new Prohibited(new Term($token2), $token1), new Term($token9), $token5)]), |
||
| 1348 | [ |
||
| 1349 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token3), |
||
| 1350 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token4), |
||
| 1351 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token6), |
||
| 1352 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token7), |
||
| 1353 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token8), |
||
| 1354 | ], |
||
| 1355 | ], |
||
| 1356 | [ |
||
| 1357 | '-one - -OR-- -two -- -', |
||
| 1358 | '-one OR -two', |
||
| 1359 | [ |
||
| 1360 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1361 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1362 | $token3 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 5), |
||
| 1363 | $token4 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 7), |
||
| 1364 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 8), |
||
| 1365 | $token6 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 10), |
||
| 1366 | $token7 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 11), |
||
| 1367 | $token8 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 13), |
||
| 1368 | $token9 = new WordToken('two', 14, '', 'two'), |
||
| 1369 | $token10 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 18), |
||
| 1370 | $token11 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 19), |
||
| 1371 | $token12 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 21), |
||
| 1372 | ], |
||
| 1373 | new Query( |
||
| 1374 | [ |
||
| 1375 | new LogicalOr( |
||
| 1376 | new Prohibited(new Term($token2), $token1), |
||
| 1377 | new Prohibited(new Term($token9), $token8), |
||
| 1378 | $token5 |
||
| 1379 | ), |
||
| 1380 | ] |
||
| 1381 | ), |
||
| 1382 | [ |
||
| 1383 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token3), |
||
| 1384 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token4), |
||
| 1385 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token6), |
||
| 1386 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token7), |
||
| 1387 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token10), |
||
| 1388 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token11), |
||
| 1389 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token12), |
||
| 1390 | ], |
||
| 1391 | ], |
||
| 1392 | [ |
||
| 1393 | 'NOT -one', |
||
| 1394 | '-one', |
||
| 1395 | [ |
||
| 1396 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1397 | $token2 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 4), |
||
| 1398 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1399 | ], |
||
| 1400 | new Query([new Prohibited(new Term($token3), $token2)]), |
||
| 1401 | [new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token1)], |
||
| 1402 | ], |
||
| 1403 | [ |
||
| 1404 | '-(-one - -OR-- -two --)-', |
||
| 1405 | '-(-one OR -two)', |
||
| 1406 | [ |
||
| 1407 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1408 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1409 | $token3 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 2), |
||
| 1410 | $token4 = new WordToken('one', 3, '', 'one'), |
||
| 1411 | $token5 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 7), |
||
| 1412 | $token6 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 9), |
||
| 1413 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 10), |
||
| 1414 | $token8 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 12), |
||
| 1415 | $token9 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 13), |
||
| 1416 | $token10 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 15), |
||
| 1417 | $token11 = new WordToken('two', 16, '', 'two'), |
||
| 1418 | $token12 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 20), |
||
| 1419 | $token13 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 21), |
||
| 1420 | $token15 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 22), |
||
| 1421 | $token14 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 23), |
||
| 1422 | ], |
||
| 1423 | new Query( |
||
| 1424 | [ |
||
| 1425 | new Prohibited( |
||
| 1426 | new Group( |
||
| 1427 | [ |
||
| 1428 | new LogicalOr( |
||
| 1429 | new Prohibited(new Term($token4), $token3), |
||
| 1430 | new Prohibited(new Term($token11), $token10), |
||
| 1431 | $token7 |
||
| 1432 | ), |
||
| 1433 | ], |
||
| 1434 | $token2, |
||
| 1435 | $token15 |
||
| 1436 | ), |
||
| 1437 | $token1 |
||
| 1438 | ), |
||
| 1439 | ] |
||
| 1440 | ), |
||
| 1441 | [ |
||
| 1442 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token5), |
||
| 1443 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token6), |
||
| 1444 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token8), |
||
| 1445 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token9), |
||
| 1446 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token12), |
||
| 1447 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token13), |
||
| 1448 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token14), |
||
| 1449 | ], |
||
| 1450 | ], |
||
| 1451 | [ |
||
| 1452 | '+NOT one', |
||
| 1453 | 'NOT one', |
||
| 1454 | [ |
||
| 1455 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1456 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 1), |
||
| 1457 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1458 | ], |
||
| 1459 | new Query([new LogicalNot(new Term($token3), $token2)]), |
||
| 1460 | [new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1)], |
||
| 1461 | ], |
||
| 1462 | [ |
||
| 1463 | '+AND one', |
||
| 1464 | 'one', |
||
| 1465 | [ |
||
| 1466 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1467 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 1), |
||
| 1468 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1469 | ], |
||
| 1470 | new Query([new Term($token3)]), |
||
| 1471 | [ |
||
| 1472 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1), |
||
| 1473 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 1474 | ], |
||
| 1475 | ], |
||
| 1476 | [ |
||
| 1477 | '+OR one', |
||
| 1478 | 'one', |
||
| 1479 | [ |
||
| 1480 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1481 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 1), |
||
| 1482 | $token3 = new WordToken('one', 4, '', 'one'), |
||
| 1483 | ], |
||
| 1484 | new Query([new Term($token3)]), |
||
| 1485 | [ |
||
| 1486 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1), |
||
| 1487 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 1488 | ], |
||
| 1489 | ], |
||
| 1490 | [ |
||
| 1491 | '-NOT one', |
||
| 1492 | 'NOT one', |
||
| 1493 | [ |
||
| 1494 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1495 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 1), |
||
| 1496 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1497 | ], |
||
| 1498 | new Query([new LogicalNot(new Term($token3), $token2)]), |
||
| 1499 | [new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1)], |
||
| 1500 | ], |
||
| 1501 | [ |
||
| 1502 | '-AND one', |
||
| 1503 | 'one', |
||
| 1504 | [ |
||
| 1505 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1506 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 1), |
||
| 1507 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1508 | ], |
||
| 1509 | new Query([new Term($token3)]), |
||
| 1510 | [ |
||
| 1511 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1), |
||
| 1512 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 1513 | ], |
||
| 1514 | ], |
||
| 1515 | [ |
||
| 1516 | '-OR one', |
||
| 1517 | 'one', |
||
| 1518 | [ |
||
| 1519 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1520 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 1), |
||
| 1521 | $token3 = new WordToken('one', 4, '', 'one'), |
||
| 1522 | ], |
||
| 1523 | new Query([new Term($token3)]), |
||
| 1524 | [ |
||
| 1525 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token1), |
||
| 1526 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_LEFT_OPERAND_IGNORED, $token2), |
||
| 1527 | ], |
||
| 1528 | ], |
||
| 1529 | [ |
||
| 1530 | 'NOT (one', |
||
| 1531 | 'NOT one', |
||
| 1532 | [ |
||
| 1533 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1534 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 1535 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1536 | ], |
||
| 1537 | new Query([new LogicalNot(new Term($token3), $token1)]), |
||
| 1538 | [new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token2)], |
||
| 1539 | ], |
||
| 1540 | [ |
||
| 1541 | 'NOT (one two', |
||
| 1542 | 'NOT one two', |
||
| 1543 | [ |
||
| 1544 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1545 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 1546 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 1547 | $token4 = new WordToken('two', 9, '', 'two'), |
||
| 1548 | ], |
||
| 1549 | new Query([new LogicalNot(new Term($token3), $token1), new Term($token4)]), |
||
| 1550 | [new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token2)], |
||
| 1551 | ], |
||
| 1552 | [ |
||
| 1553 | '-(one', |
||
| 1554 | '-one', |
||
| 1555 | [ |
||
| 1556 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1557 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1558 | $token3 = new WordToken('one', 2, '', 'one'), |
||
| 1559 | ], |
||
| 1560 | new Query([new Prohibited(new Term($token3), $token1)]), |
||
| 1561 | [new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token2)], |
||
| 1562 | ], |
||
| 1563 | [ |
||
| 1564 | '-(one two', |
||
| 1565 | '-one two', |
||
| 1566 | [ |
||
| 1567 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1568 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1569 | $token3 = new WordToken('one', 2, '', 'one'), |
||
| 1570 | $token4 = new WordToken('two', 6, '', 'two'), |
||
| 1571 | ], |
||
| 1572 | new Query([new Prohibited(new Term($token3), $token1), new Term($token4)]), |
||
| 1573 | [new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token2)], |
||
| 1574 | ], |
||
| 1575 | [ |
||
| 1576 | '+(one', |
||
| 1577 | '+one', |
||
| 1578 | [ |
||
| 1579 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1580 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1581 | $token3 = new WordToken('one', 2, '', 'one'), |
||
| 1582 | ], |
||
| 1583 | new Query([new Mandatory(new Term($token3), $token1)]), |
||
| 1584 | [new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token2)], |
||
| 1585 | ], |
||
| 1586 | [ |
||
| 1587 | '+(one two', |
||
| 1588 | '+one two', |
||
| 1589 | [ |
||
| 1590 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1591 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1592 | $token3 = new WordToken('one', 2, '', 'one'), |
||
| 1593 | $token4 = new WordToken('two', 6, '', 'two'), |
||
| 1594 | ], |
||
| 1595 | new Query([new Mandatory(new Term($token3), $token1), new Term($token4)]), |
||
| 1596 | [new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token2)], |
||
| 1597 | ], |
||
| 1598 | [ |
||
| 1599 | '-(one +(two NOT (three', |
||
| 1600 | '-one +two NOT three', |
||
| 1601 | [ |
||
| 1602 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 1603 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1604 | $token3 = new WordToken('one', 2, '', 'one'), |
||
| 1605 | $token4 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 6), |
||
| 1606 | $token5 = new GroupBeginToken('(', 7, '(', null), |
||
| 1607 | $token6 = new WordToken('two', 8, '', 'two'), |
||
| 1608 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 12), |
||
| 1609 | $token8 = new GroupBeginToken('(', 16, '(', null), |
||
| 1610 | $token9 = new WordToken('three', 17, '', 'three'), |
||
| 1611 | ], |
||
| 1612 | new Query( |
||
| 1613 | [ |
||
| 1614 | new Prohibited(new Term($token3), $token1), |
||
| 1615 | new Mandatory(new Term($token6), $token4), |
||
| 1616 | new LogicalNot(new Term($token9), $token7), |
||
| 1617 | ] |
||
| 1618 | ), |
||
| 1619 | [ |
||
| 1620 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token8), |
||
| 1621 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token5), |
||
| 1622 | new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token2), |
||
| 1623 | ], |
||
| 1624 | ], |
||
| 1625 | [ |
||
| 1626 | 'one AND NOT (two', |
||
| 1627 | 'one AND NOT two', |
||
| 1628 | [ |
||
| 1629 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1630 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 1631 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 8), |
||
| 1632 | $token4 = new GroupBeginToken('(', 12, '(', null), |
||
| 1633 | $token5 = new WordToken('two', 13, '', 'two'), |
||
| 1634 | ], |
||
| 1635 | new Query([new LogicalAnd(new Term($token1), new LogicalNot(new Term($token5), $token3), $token2)]), |
||
| 1636 | [new Correction(Parser::CORRECTION_UNMATCHED_GROUP_LEFT_DELIMITER_IGNORED, $token4)], |
||
| 1637 | ], |
||
| 1638 | [ |
||
| 1639 | '(one OR two AND) AND', |
||
| 1640 | '(one OR two)', |
||
| 1641 | [ |
||
| 1642 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 1643 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1644 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 5), |
||
| 1645 | $token4 = new WordToken('two', 8, '', 'two'), |
||
| 1646 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 12), |
||
| 1647 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 15), |
||
| 1648 | $token7 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 17), |
||
| 1649 | ], |
||
| 1650 | new Query([ |
||
| 1651 | new Group([new LogicalOr(new Term($token2), new Term($token4), $token3)], $token1, $token6), |
||
| 1652 | ]), |
||
| 1653 | [ |
||
| 1654 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_RIGHT_OPERAND_IGNORED, $token5), |
||
| 1655 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_RIGHT_OPERAND_IGNORED, $token7), |
||
| 1656 | ], |
||
| 1657 | ], |
||
| 1658 | [ |
||
| 1659 | '(one AND NOT +two)', |
||
| 1660 | '(one AND +two)', |
||
| 1661 | [ |
||
| 1662 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 1663 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1664 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 5), |
||
| 1665 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 9), |
||
| 1666 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 13), |
||
| 1667 | $token6 = new WordToken('two', 14, '', 'two'), |
||
| 1668 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 17), |
||
| 1669 | ], |
||
| 1670 | new Query( |
||
| 1671 | [ |
||
| 1672 | new Group( |
||
| 1673 | [new LogicalAnd(new Term($token2), new Mandatory(new Term($token6), $token5), $token3)], |
||
| 1674 | $token1, |
||
| 1675 | $token7 |
||
| 1676 | ), |
||
| 1677 | ] |
||
| 1678 | ), |
||
| 1679 | [new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token4)], |
||
| 1680 | ], |
||
| 1681 | [ |
||
| 1682 | '(one AND NOT -two)', |
||
| 1683 | '(one AND -two)', |
||
| 1684 | [ |
||
| 1685 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 1686 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1687 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 5), |
||
| 1688 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 9), |
||
| 1689 | $token5 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 13), |
||
| 1690 | $token6 = new WordToken('two', 14, '', 'two'), |
||
| 1691 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 17), |
||
| 1692 | ], |
||
| 1693 | new Query( |
||
| 1694 | [ |
||
| 1695 | new Group( |
||
| 1696 | [new LogicalAnd(new Term($token2), new Prohibited(new Term($token6), $token5), $token3)], |
||
| 1697 | $token1, |
||
| 1698 | $token7 |
||
| 1699 | ), |
||
| 1700 | ] |
||
| 1701 | ), |
||
| 1702 | [new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token4)], |
||
| 1703 | ], |
||
| 1704 | [ |
||
| 1705 | '(one AND NOT -two three)', |
||
| 1706 | '(one AND -two three)', |
||
| 1707 | [ |
||
| 1708 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 1709 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1710 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 5), |
||
| 1711 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 9), |
||
| 1712 | $token5 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 13), |
||
| 1713 | $token6 = new WordToken('two', 14, '', 'two'), |
||
| 1714 | $token7 = new WordToken('three', 18, '', 'three'), |
||
| 1715 | $token8 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 23), |
||
| 1716 | ], |
||
| 1717 | new Query( |
||
| 1718 | [ |
||
| 1719 | new Group( |
||
| 1720 | [ |
||
| 1721 | new LogicalAnd(new Term($token2), new Prohibited(new Term($token6), $token5), $token3), |
||
| 1722 | new Term($token7), |
||
| 1723 | ], |
||
| 1724 | $token1, |
||
| 1725 | $token8 |
||
| 1726 | ), |
||
| 1727 | ] |
||
| 1728 | ), |
||
| 1729 | [new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token4)], |
||
| 1730 | ], |
||
| 1731 | [ |
||
| 1732 | '(one AND NOT +two three)', |
||
| 1733 | '(one AND +two three)', |
||
| 1734 | [ |
||
| 1735 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 1736 | $token2 = new WordToken('one', 1, '', 'one'), |
||
| 1737 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 5), |
||
| 1738 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 9), |
||
| 1739 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 13), |
||
| 1740 | $token6 = new WordToken('two', 14, '', 'two'), |
||
| 1741 | $token7 = new WordToken('three', 18, '', 'three'), |
||
| 1742 | $token8 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 23), |
||
| 1743 | ], |
||
| 1744 | new Query( |
||
| 1745 | [ |
||
| 1746 | new Group( |
||
| 1747 | [ |
||
| 1748 | new LogicalAnd(new Term($token2), new Mandatory(new Term($token6), $token5), $token3), |
||
| 1749 | new Term($token7), |
||
| 1750 | ], |
||
| 1751 | $token1, |
||
| 1752 | $token8 |
||
| 1753 | ), |
||
| 1754 | ] |
||
| 1755 | ), |
||
| 1756 | [new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token4)], |
||
| 1757 | ], |
||
| 1758 | [ |
||
| 1759 | '+()+one', |
||
| 1760 | '+one', |
||
| 1761 | [ |
||
| 1762 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1763 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1764 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 2), |
||
| 1765 | $token4 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 3), |
||
| 1766 | $token5 = new WordToken('one', 4, '', 'one'), |
||
| 1767 | ], |
||
| 1768 | new Query([new Mandatory(new Term($token5), $token4)]), |
||
| 1769 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3)], |
||
| 1770 | ], |
||
| 1771 | [ |
||
| 1772 | '+()!one', |
||
| 1773 | '!one', |
||
| 1774 | [ |
||
| 1775 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 1776 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 1777 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 2), |
||
| 1778 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 3), |
||
| 1779 | $token5 = new WordToken('one', 4, '', 'one'), |
||
| 1780 | ], |
||
| 1781 | new Query([new LogicalNot(new Term($token5), $token4)]), |
||
| 1782 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3)], |
||
| 1783 | ], |
||
| 1784 | [ |
||
| 1785 | 'one AND +()!two', |
||
| 1786 | 'one !two', |
||
| 1787 | [ |
||
| 1788 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1789 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 1790 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 8), |
||
| 1791 | $token4 = new GroupBeginToken('(', 9, '(', null), |
||
| 1792 | $token5 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 10), |
||
| 1793 | $token6 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 11), |
||
| 1794 | $token7 = new WordToken('two', 12, '', 'two'), |
||
| 1795 | ], |
||
| 1796 | new Query([new Term($token1), new LogicalNot(new Term($token7), $token6)]), |
||
| 1797 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token3, $token4, $token5)], |
||
| 1798 | ], |
||
| 1799 | [ |
||
| 1800 | 'NOT +()!one', |
||
| 1801 | '!one', |
||
| 1802 | [ |
||
| 1803 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1804 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 1805 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 1806 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 1807 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 7), |
||
| 1808 | $token6 = new WordToken('one', 8, '', 'one'), |
||
| 1809 | ], |
||
| 1810 | new Query([new LogicalNot(new Term($token6), $token5)]), |
||
| 1811 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3, $token4)], |
||
| 1812 | ], |
||
| 1813 | [ |
||
| 1814 | 'NOT -()!one', |
||
| 1815 | '!one', |
||
| 1816 | [ |
||
| 1817 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1818 | $token2 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 4), |
||
| 1819 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 1820 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 1821 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 7), |
||
| 1822 | $token6 = new WordToken('one', 8, '', 'one'), |
||
| 1823 | ], |
||
| 1824 | new Query([new LogicalNot(new Term($token6), $token5)]), |
||
| 1825 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3, $token4)], |
||
| 1826 | ], |
||
| 1827 | [ |
||
| 1828 | 'NOT ++()!one', |
||
| 1829 | '!one', |
||
| 1830 | [ |
||
| 1831 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1832 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 1833 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 5), |
||
| 1834 | $token4 = new GroupBeginToken('(', 6, '(', null), |
||
| 1835 | $token5 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 7), |
||
| 1836 | $token6 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 8), |
||
| 1837 | $token7 = new WordToken('one', 9, '', 'one'), |
||
| 1838 | ], |
||
| 1839 | new Query([new LogicalNot(new Term($token7), $token6)]), |
||
| 1840 | [ |
||
| 1841 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 1842 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token3, $token4, $token5), |
||
| 1843 | ], |
||
| 1844 | ], |
||
| 1845 | [ |
||
| 1846 | 'NOT -+()!one', |
||
| 1847 | '!one', |
||
| 1848 | [ |
||
| 1849 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1850 | $token2 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 4), |
||
| 1851 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 5), |
||
| 1852 | $token4 = new GroupBeginToken('(', 6, '(', null), |
||
| 1853 | $token5 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 7), |
||
| 1854 | $token6 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 8), |
||
| 1855 | $token7 = new WordToken('one', 9, '', 'one'), |
||
| 1856 | ], |
||
| 1857 | new Query([new LogicalNot(new Term($token7), $token6)]), |
||
| 1858 | [ |
||
| 1859 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 1860 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token3, $token4, $token5), |
||
| 1861 | ], |
||
| 1862 | ], |
||
| 1863 | [ |
||
| 1864 | 'NOT !()!one', |
||
| 1865 | '!one', |
||
| 1866 | [ |
||
| 1867 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1868 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 4), |
||
| 1869 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 1870 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 1871 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 7), |
||
| 1872 | $token6 = new WordToken('one', 8, '', 'one'), |
||
| 1873 | ], |
||
| 1874 | new Query([new LogicalNot(new Term($token6), $token5)]), |
||
| 1875 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3, $token4)], |
||
| 1876 | ], |
||
| 1877 | [ |
||
| 1878 | 'NOT +()+()!one', |
||
| 1879 | '!one', |
||
| 1880 | [ |
||
| 1881 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1882 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 1883 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 1884 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 1885 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 7), |
||
| 1886 | $token6 = new GroupBeginToken('(', 8, '(', null), |
||
| 1887 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 9), |
||
| 1888 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 10), |
||
| 1889 | $token9 = new WordToken('one', 11, '', 'one'), |
||
| 1890 | ], |
||
| 1891 | new Query([new LogicalNot(new Term($token9), $token8)]), |
||
| 1892 | [ |
||
| 1893 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3, $token4), |
||
| 1894 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token5, $token6, $token7), |
||
| 1895 | ], |
||
| 1896 | ], |
||
| 1897 | [ |
||
| 1898 | 'NOT NOT +()+()!one', |
||
| 1899 | '!one', |
||
| 1900 | [ |
||
| 1901 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 1902 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 4), |
||
| 1903 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 8), |
||
| 1904 | $token4 = new GroupBeginToken('(', 9, '(', null), |
||
| 1905 | $token5 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 10), |
||
| 1906 | $token6 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 11), |
||
| 1907 | $token7 = new GroupBeginToken('(', 12, '(', null), |
||
| 1908 | $token8 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 13), |
||
| 1909 | $token9 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 14), |
||
| 1910 | $token10 = new WordToken('one', 15, '', 'one'), |
||
| 1911 | ], |
||
| 1912 | new Query([new LogicalNot(new Term($token10), $token9)]), |
||
| 1913 | [ |
||
| 1914 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3, $token4, $token5), |
||
| 1915 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token6, $token7, $token8), |
||
| 1916 | ], |
||
| 1917 | ], |
||
| 1918 | [ |
||
| 1919 | 'one AND NOT +()+()!two', |
||
| 1920 | 'one !two', |
||
| 1921 | [ |
||
| 1922 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1923 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 1924 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 8), |
||
| 1925 | $token4 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 12), |
||
| 1926 | $token5 = new GroupBeginToken('(', 13, '(', null), |
||
| 1927 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 14), |
||
| 1928 | $token7 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 15), |
||
| 1929 | $token8 = new GroupBeginToken('(', 16, '(', null), |
||
| 1930 | $token9 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 17), |
||
| 1931 | $token10 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 18), |
||
| 1932 | $token11 = new WordToken('two', 19, '', 'two'), |
||
| 1933 | ], |
||
| 1934 | new Query([new Term($token1), new LogicalNot(new Term($token11), $token10)]), |
||
| 1935 | [ |
||
| 1936 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token3, $token4, $token5, $token6), |
||
| 1937 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token7, $token8, $token9), |
||
| 1938 | ], |
||
| 1939 | ], |
||
| 1940 | [ |
||
| 1941 | 'one AND NOT NOT +()+()!two', |
||
| 1942 | 'one !two', |
||
| 1943 | [ |
||
| 1944 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1945 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 4), |
||
| 1946 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 8), |
||
| 1947 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 12), |
||
| 1948 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 16), |
||
| 1949 | $token6 = new GroupBeginToken('(', 17, '(', null), |
||
| 1950 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 18), |
||
| 1951 | $token8 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 19), |
||
| 1952 | $token9 = new GroupBeginToken('(', 20, '(', null), |
||
| 1953 | $token10 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 21), |
||
| 1954 | $token11 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 22), |
||
| 1955 | $token12 = new WordToken('two', 23, '', 'two'), |
||
| 1956 | ], |
||
| 1957 | new Query([new Term($token1), new LogicalNot(new Term($token12), $token11)]), |
||
| 1958 | [ |
||
| 1959 | new Correction( |
||
| 1960 | Parser::CORRECTION_EMPTY_GROUP_IGNORED, |
||
| 1961 | $token2, |
||
| 1962 | $token3, |
||
| 1963 | $token4, |
||
| 1964 | $token5, |
||
| 1965 | $token6, |
||
| 1966 | $token7 |
||
| 1967 | ), |
||
| 1968 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token8, $token9, $token10), |
||
| 1969 | ], |
||
| 1970 | ], |
||
| 1971 | [ |
||
| 1972 | 'one -() +() two', |
||
| 1973 | 'one two', |
||
| 1974 | [ |
||
| 1975 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1976 | $token2 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 4), |
||
| 1977 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 1978 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 1979 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 8), |
||
| 1980 | $token6 = new GroupBeginToken('(', 9, '(', null), |
||
| 1981 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 10), |
||
| 1982 | $token8 = new WordToken('two', 12, '', 'two'), |
||
| 1983 | ], |
||
| 1984 | new Query([new Term($token1), new Term($token8)]), |
||
| 1985 | [ |
||
| 1986 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token3, $token4), |
||
| 1987 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token5, $token6, $token7), |
||
| 1988 | ], |
||
| 1989 | ], |
||
| 1990 | [ |
||
| 1991 | 'one !+ two', |
||
| 1992 | 'one two', |
||
| 1993 | [ |
||
| 1994 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 1995 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 4), |
||
| 1996 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 5), |
||
| 1997 | $token4 = new WordToken('two', 7, '', 'two'), |
||
| 1998 | ], |
||
| 1999 | new Query([new Term($token1), new Term($token4)]), |
||
| 2000 | [ |
||
| 2001 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 2002 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token3), |
||
| 2003 | ], |
||
| 2004 | ], |
||
| 2005 | [ |
||
| 2006 | 'one +! two', |
||
| 2007 | 'one two', |
||
| 2008 | [ |
||
| 2009 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 2010 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 2011 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 5), |
||
| 2012 | $token4 = new WordToken('two', 7, '', 'two'), |
||
| 2013 | ], |
||
| 2014 | new Query([new Term($token1), new Term($token4)]), |
||
| 2015 | [ |
||
| 2016 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token3), |
||
| 2017 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token2), |
||
| 2018 | ], |
||
| 2019 | ], |
||
| 2020 | [ |
||
| 2021 | 'one !- two', |
||
| 2022 | 'one two', |
||
| 2023 | [ |
||
| 2024 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 2025 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 4), |
||
| 2026 | $token3 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 5), |
||
| 2027 | $token4 = new WordToken('two', 7, '', 'two'), |
||
| 2028 | ], |
||
| 2029 | new Query([new Term($token1), new Term($token4)]), |
||
| 2030 | [ |
||
| 2031 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 2032 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token3), |
||
| 2033 | ], |
||
| 2034 | ], |
||
| 2035 | [ |
||
| 2036 | 'one !AND two', |
||
| 2037 | 'one AND two', |
||
| 2038 | [ |
||
| 2039 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 2040 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 4), |
||
| 2041 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 5), |
||
| 2042 | $token4 = new WordToken('two', 9, '', 'two'), |
||
| 2043 | ], |
||
| 2044 | new Query([new LogicalAnd(new Term($token1), new Term($token4), $token3)]), |
||
| 2045 | [new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2)], |
||
| 2046 | ], |
||
| 2047 | [ |
||
| 2048 | 'one !OR two', |
||
| 2049 | 'one OR two', |
||
| 2050 | [ |
||
| 2051 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 2052 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 4), |
||
| 2053 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 5), |
||
| 2054 | $token4 = new WordToken('two', 8, '', 'two'), |
||
| 2055 | ], |
||
| 2056 | new Query([new LogicalOr(new Term($token1), new Term($token4), $token3)]), |
||
| 2057 | [new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2)], |
||
| 2058 | ], |
||
| 2059 | [ |
||
| 2060 | 'one +! two', |
||
| 2061 | 'one two', |
||
| 2062 | [ |
||
| 2063 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 2064 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 2065 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 5), |
||
| 2066 | $token4 = new WordToken('two', 7, '', 'two'), |
||
| 2067 | ], |
||
| 2068 | new Query([new Term($token1), new Term($token4)]), |
||
| 2069 | [ |
||
| 2070 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 2071 | new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token3), |
||
| 2072 | ], |
||
| 2073 | ], |
||
| 2074 | [ |
||
| 2075 | 'NOT+ one', |
||
| 2076 | 'NOT one', |
||
| 2077 | [ |
||
| 2078 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2079 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 3), |
||
| 2080 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 2081 | ], |
||
| 2082 | new Query([new LogicalNot(new Term($token3), $token1)]), |
||
| 2083 | [new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token2)], |
||
| 2084 | ], |
||
| 2085 | [ |
||
| 2086 | 'NOT- one', |
||
| 2087 | 'NOT one', |
||
| 2088 | [ |
||
| 2089 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2090 | $token2 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 3), |
||
| 2091 | $token3 = new WordToken('one', 5, '', 'one'), |
||
| 2092 | ], |
||
| 2093 | new Query([new LogicalNot(new Term($token3), $token1)]), |
||
| 2094 | [new Correction(Parser::CORRECTION_UNARY_OPERATOR_MISSING_OPERAND_IGNORED, $token2)], |
||
| 2095 | ], |
||
| 2096 | [ |
||
| 2097 | 'NOT+one', |
||
| 2098 | '+one', |
||
| 2099 | [ |
||
| 2100 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2101 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 3), |
||
| 2102 | $token3 = new WordToken('one', 4, '', 'one'), |
||
| 2103 | ], |
||
| 2104 | new Query([new Mandatory(new Term($token3), $token2)]), |
||
| 2105 | [new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token1)], |
||
| 2106 | ], |
||
| 2107 | [ |
||
| 2108 | '+()NOT one', |
||
| 2109 | 'NOT one', |
||
| 2110 | [ |
||
| 2111 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 2112 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 2113 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 2), |
||
| 2114 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 3), |
||
| 2115 | $token5 = new WordToken('one', 7, '', 'one'), |
||
| 2116 | ], |
||
| 2117 | new Query([new LogicalNot(new Term($token5), $token4)]), |
||
| 2118 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3)], |
||
| 2119 | ], |
||
| 2120 | [ |
||
| 2121 | '-()NOT one', |
||
| 2122 | 'NOT one', |
||
| 2123 | [ |
||
| 2124 | $token1 = new Token(Tokenizer::TOKEN_PROHIBITED, '-', 0), |
||
| 2125 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 2126 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 2), |
||
| 2127 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 3), |
||
| 2128 | $token5 = new WordToken('one', 7, '', 'one'), |
||
| 2129 | ], |
||
| 2130 | new Query([new LogicalNot(new Term($token5), $token4)]), |
||
| 2131 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3)], |
||
| 2132 | ], |
||
| 2133 | [ |
||
| 2134 | '+()NOT+()one', |
||
| 2135 | 'one', |
||
| 2136 | [ |
||
| 2137 | $token1 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 0), |
||
| 2138 | $token2 = new GroupBeginToken('(', 1, '(', null), |
||
| 2139 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 2), |
||
| 2140 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 3), |
||
| 2141 | $token5 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 6), |
||
| 2142 | $token6 = new GroupBeginToken('(', 7, '(', null), |
||
| 2143 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 8), |
||
| 2144 | $token8 = new WordToken('one', 9, '', 'one'), |
||
| 2145 | ], |
||
| 2146 | new Query([new Term($token8)]), |
||
| 2147 | [ |
||
| 2148 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3), |
||
| 2149 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token4, $token5, $token6, $token7), |
||
| 2150 | ], |
||
| 2151 | ], |
||
| 2152 | [ |
||
| 2153 | 'NOT()+one', |
||
| 2154 | '+one', |
||
| 2155 | [ |
||
| 2156 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2157 | $token2 = new GroupBeginToken('(', 3, '(', null), |
||
| 2158 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 4), |
||
| 2159 | $token4 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 5), |
||
| 2160 | $token5 = new WordToken('one', 6, '', 'one'), |
||
| 2161 | ], |
||
| 2162 | new Query([new Mandatory(new Term($token5), $token4)]), |
||
| 2163 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3)], |
||
| 2164 | ], |
||
| 2165 | [ |
||
| 2166 | 'NOT () NOT one', |
||
| 2167 | 'NOT one', |
||
| 2168 | [ |
||
| 2169 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2170 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 2171 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 5), |
||
| 2172 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 7), |
||
| 2173 | $token5 = new WordToken('one', 11, '', 'one'), |
||
| 2174 | ], |
||
| 2175 | new Query([new LogicalNot(new Term($token5), $token4)]), |
||
| 2176 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3)], |
||
| 2177 | ], |
||
| 2178 | [ |
||
| 2179 | 'NOT () +one', |
||
| 2180 | '+one', |
||
| 2181 | [ |
||
| 2182 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2183 | $token2 = new GroupBeginToken('(', 4, '(', null), |
||
| 2184 | $token3 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 5), |
||
| 2185 | $token4 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 7), |
||
| 2186 | $token5 = new WordToken('one', 8, '', 'one'), |
||
| 2187 | ], |
||
| 2188 | new Query([new Mandatory(new Term($token5), $token4)]), |
||
| 2189 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3)], |
||
| 2190 | ], |
||
| 2191 | [ |
||
| 2192 | 'NOT +()NOT +one', |
||
| 2193 | '+one', |
||
| 2194 | [ |
||
| 2195 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2196 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 2197 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 2198 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 2199 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 7), |
||
| 2200 | $token6 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 11), |
||
| 2201 | $token7 = new WordToken('one', 12, '', 'one'), |
||
| 2202 | ], |
||
| 2203 | new Query([new Mandatory(new Term($token7), $token6)]), |
||
| 2204 | [ |
||
| 2205 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3, $token4), |
||
| 2206 | new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token5), |
||
| 2207 | ], |
||
| 2208 | ], |
||
| 2209 | [ |
||
| 2210 | 'NOT +() NOT +one', |
||
| 2211 | '+one', |
||
| 2212 | [ |
||
| 2213 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2214 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 4), |
||
| 2215 | $token3 = new GroupBeginToken('(', 5, '(', null), |
||
| 2216 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 6), |
||
| 2217 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 8), |
||
| 2218 | $token6 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 12), |
||
| 2219 | $token7 = new WordToken('one', 13, '', 'one'), |
||
| 2220 | ], |
||
| 2221 | new Query([new Mandatory(new Term($token7), $token6)]), |
||
| 2222 | [ |
||
| 2223 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token1, $token2, $token3, $token4), |
||
| 2224 | new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token5), |
||
| 2225 | ], |
||
| 2226 | ], |
||
| 2227 | [ |
||
| 2228 | '(+()NOT one)AND', |
||
| 2229 | '(NOT one)', |
||
| 2230 | [ |
||
| 2231 | $token1 = new GroupBeginToken('(', 0, '(', null), |
||
| 2232 | $token2 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 1), |
||
| 2233 | $token3 = new GroupBeginToken('(', 2, '(', null), |
||
| 2234 | $token4 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 3), |
||
| 2235 | $token5 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 4), |
||
| 2236 | $token6 = new WordToken('one', 8, '', 'one'), |
||
| 2237 | $token7 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 11), |
||
| 2238 | $token8 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 12), |
||
| 2239 | ], |
||
| 2240 | new Query([new Group([new LogicalNot(new Term($token6), $token5)], $token1, $token7)]), |
||
| 2241 | [ |
||
| 2242 | new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token2, $token3, $token4), |
||
| 2243 | new Correction(Parser::CORRECTION_BINARY_OPERATOR_MISSING_RIGHT_OPERAND_IGNORED, $token8), |
||
| 2244 | ], |
||
| 2245 | ], |
||
| 2246 | [ |
||
| 2247 | 'one !NOT two', |
||
| 2248 | 'one NOT two', |
||
| 2249 | [ |
||
| 2250 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 2251 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 4), |
||
| 2252 | $token3 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 5), |
||
| 2253 | $token4 = new WordToken('two', 9, '', 'two'), |
||
| 2254 | ], |
||
| 2255 | new Query([new Term($token1), new LogicalNot(new Term($token4), $token3)]), |
||
| 2256 | [new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2)], |
||
| 2257 | ], |
||
| 2258 | [ |
||
| 2259 | 'NOT NOT +one', |
||
| 2260 | '+one', |
||
| 2261 | [ |
||
| 2262 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2263 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 4), |
||
| 2264 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 8), |
||
| 2265 | $token4 = new WordToken('one', 9, '', 'one'), |
||
| 2266 | ], |
||
| 2267 | new Query([new Mandatory(new Term($token4), $token3)]), |
||
| 2268 | [ |
||
| 2269 | new Correction( |
||
| 2270 | Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, |
||
| 2271 | $token1, |
||
| 2272 | $token2 |
||
| 2273 | ), |
||
| 2274 | ], |
||
| 2275 | ], |
||
| 2276 | [ |
||
| 2277 | 'NOT !+one', |
||
| 2278 | '+one', |
||
| 2279 | [ |
||
| 2280 | $token1 = new Token(Tokenizer::TOKEN_LOGICAL_NOT, 'NOT', 0), |
||
| 2281 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_NOT_2, '!', 4), |
||
| 2282 | $token3 = new Token(Tokenizer::TOKEN_MANDATORY, '+', 5), |
||
| 2283 | $token4 = new WordToken('one', 6, '', 'one'), |
||
| 2284 | ], |
||
| 2285 | new Query([new Mandatory(new Term($token4), $token3)]), |
||
| 2286 | [ |
||
| 2287 | new Correction(Parser::CORRECTION_ADJACENT_UNARY_OPERATOR_PRECEDING_OPERATOR_IGNORED, $token2), |
||
| 2288 | new Correction(Parser::CORRECTION_LOGICAL_NOT_OPERATORS_PRECEDING_PREFERENCE_IGNORED, $token1), |
||
| 2289 | ], |
||
| 2290 | ], |
||
| 2291 | [ |
||
| 2292 | 'one OR two AND () three', |
||
| 2293 | 'one OR two three', |
||
| 2294 | [ |
||
| 2295 | $token1 = new WordToken('one', 0, '', 'one'), |
||
| 2296 | $token2 = new Token(Tokenizer::TOKEN_LOGICAL_OR, 'OR', 4), |
||
| 2297 | $token3 = new WordToken('two', 7, '', 'two'), |
||
| 2298 | $token4 = new Token(Tokenizer::TOKEN_LOGICAL_AND, 'AND', 11), |
||
| 2299 | $token5 = new GroupBeginToken('(', 15, '(', null), |
||
| 2300 | $token6 = new Token(Tokenizer::TOKEN_GROUP_END, ')', 16), |
||
| 2301 | $token7 = new WordToken('three', 18, '', 'three'), |
||
| 2302 | ], |
||
| 2303 | new Query([new LogicalOr(new Term($token1), new Term($token3), $token2), new Term($token7)]), |
||
| 2304 | [new Correction(Parser::CORRECTION_EMPTY_GROUP_IGNORED, $token4, $token5, $token6)], |
||
| 2305 | ], |
||
| 2394 |