Completed
Push — master ( 25ce4f...b9bc4f )
by Thomas
11:58
created

Tokeniser.set_file()   B

Complexity

Conditions 7

Size

Total Lines 24
Code Lines 22

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 7
eloc 22
nop 2
dl 0
loc 24
rs 7.952
c 0
b 0
f 0
1
# encoding: utf-8
2
"""
3
tokeniser.py
4
5
Created by Thomas Mangin on 2015-06-05.
6
Copyright (c) 2009-2017 Exa Networks. All rights reserved.
7
License: 3-clause BSD. (See the COPYRIGHT file)
8
"""
9
10
from exabgp.configuration.core.format import tokens
11
from exabgp.protocol.family import AFI
12
from collections import deque
13
from exabgp.vendoring import six
14
15
16
class Tokeniser (object):
17
18
	class Iterator (object):
19
		fname = ''  # This is ok to have a unique value as API parser do not use files
20
21
		def __init__ (self):
22
			self.next = deque()
23
			self.tokens = []
24
			self.generator = iter([])
25
			self.announce = True
26
27
		def replenish (self, content):
28
			self.next.clear()
29
			self.tokens = content
30
			self.generator = iter(content)
31
			return self
32
33
		def clear (self):
34
			self.replenish([])
35
			self.announce = True
36
37
		def __call__ (self):
38
			if self.next:
39
				return self.next.popleft()
40
41
			try:
42
				return six.next(self.generator)
43
			except StopIteration:
44
				return ''
45
46
		def peek (self):
47
			try:
48
				peaked = six.next(self.generator)
49
				self.next.append(peaked)
50
				return peaked
51
			except StopIteration:
52
				return ''
53
54
	@staticmethod
55
	def _off ():
56
		return iter([])
57
58
	def __init__ (self, scope, error, logger):
59
		self.scope = scope
60
		self.error = error
61
		self.logger = logger
62
		self.finished = False
63
		self.number = 0
64
		self.line = []
65
		self.iterate = Tokeniser.Iterator()
66
		self.end = ''
67
		self.index_column = 0
68
		self.index_line = 0
69
		self.fname = ''
70
		self.type = 'unset'
71
		self.afi = AFI.undefined
72
73
		self._tokens = Tokeniser._off
74
		self._next = None
75
		self._data = None
76
77
	def clear (self):
78
		self.finished = False
79
		self.number = 0
80
		self.line = []
81
		self.iterate.clear()
82
		self.end = ''
83
		self.index_column = 0
84
		self.index_line = 0
85
		self.fname = ''
86
		self.type = 'unset'
87
		if self._data:
88
			self._set(self._data)
89
90
	def params (self):
91
		if len(self.line) <= 2:
92
			return ''
93
		if self.end in ('{','}',';'):
94
			return "'%s'" % "' '".join(self.line[1:-1])
95
		return "'%s'" % "' '".join(self.line[1:])
96
97
	def _tokenise (self,iterator):
98
		for parsed in tokens(iterator):
99
			words = [word for y,x,word in parsed]
100
			self.line = ''.join(words)
101
			# ignore # lines
102
			# set Location information
103
			yield words
104
105
	def _set (self, function):
106
		try:
107
			self._tokens = function
108
			self._next = six.next(self._tokens)
109
		except IOError as exc:
110
			error = str(exc)
111
			if error.count(']'):
112
				self.error.set(error.split(']')[1].strip())
113
			else:
114
				self.error.set(error)
115
			self._tokens = Tokeniser._off
116
			self._next = []
117
			return self.error.set('issue setting the configuration parser')
118
		except StopIteration:
119
			self._tokens = Tokeniser._off
120
			self._next = []
121
			return self.error.set('issue setting the configuration parser, no data')
122
		return True
123
124
	def set_file (self, data):
125
		def _source (fname):
126
			with open(fname,'r') as fileobject:
127
				def formated ():
128
					line = ''
129
					for current in fileobject:
130
						self.index_line += 1
131
						current = current.rstrip()
132
						if current.endswith('\\'):
133
							line += current
134
							continue
135
						elif line:
136
							yield line + current
137
							line = ''
138
						else:
139
							yield current
140
					if line:
141
						yield line
142
143
				for _ in self._tokenise(formated()):
144
					yield _
145
		self.type = 'file'
146
		self.Iterator.fname = data
147
		return self._set(_source(data))
148
149
	def set_text (self, data):
150
		def _source (data):
151
			for _ in self._tokenise(data.split('\n')):
152
				yield _
153
		self.type = 'text'
154
		return self._set(_source(data))
155
156
	def set_api (self, line):
157
		return self._set(self._tokenise(iter([line])))
158
159
	def set_action(self, command):
160
		if command != 'announce':
161
			self.iterate.announce = False
162
163
	def __call__ (self):
164
		self.number += 1
165
		try:
166
			self.line, self._next = self._next, six.next(self._tokens)
167
			self.end = self.line[-1]
168
		except StopIteration:
169
			if not self.finished:
170
				self.finished = True
171
				self.line, self._next = self._next, []
172
				self.end = self.line[-1]
173
			else:
174
				self.line = []
175
				self.end = ''
176
177
		# should we raise a Location if called with no more data ?
178
		self.iterate.replenish(self.line[:-1])
179
180
		return self.line
181