|
1
|
|
|
import re |
|
2
|
|
|
from typing import ClassVar, Final |
|
3
|
|
|
|
|
4
|
|
|
from bs4 import BeautifulSoup |
|
5
|
|
|
from sqlalchemy import ( |
|
6
|
|
|
Boolean, |
|
7
|
|
|
Column, |
|
8
|
|
|
Integer, |
|
9
|
|
|
MetaData, |
|
10
|
|
|
SmallInteger, |
|
11
|
|
|
String, |
|
12
|
|
|
Table, |
|
13
|
|
|
text, |
|
14
|
|
|
) |
|
15
|
|
|
|
|
16
|
|
|
from src.kalauz.new_data_processors.common import ( |
|
17
|
|
|
DataDownloader, |
|
18
|
|
|
) |
|
19
|
|
|
from src.kalauz.new_data_processors.common_excel_processors import ExcelProcessorSimple |
|
20
|
|
|
|
|
21
|
|
|
|
|
22
|
|
|
def _translate_operating_site_type(operating_site_type: str) -> str: |
|
23
|
|
|
dictionary = { |
|
24
|
|
|
"állomás": "station", |
|
25
|
|
|
"egyéb": "other", |
|
26
|
|
|
"elágazás": "spur", |
|
27
|
|
|
"eldöntő pont": "decision_point", |
|
28
|
|
|
"forgalmi kitérő": "crossover", |
|
29
|
|
|
"iparvágány": "industrial_track", |
|
30
|
|
|
"iparvágány kiágazás": "industrial_track_spur", |
|
31
|
|
|
"keresztezés": "crossing", |
|
32
|
|
|
"megálló-elágazóhely": "spur_halt", |
|
33
|
|
|
"megálló-rakodóhely": "loading_halt", |
|
34
|
|
|
"megállóhely": "halt", |
|
35
|
|
|
"megállóhely-iparvágány kiágazás": "industrial_track_spur_halt", |
|
36
|
|
|
"nem definiált": "undefined", |
|
37
|
|
|
"országhatár": "border_crossing", |
|
38
|
|
|
"pályavasúti határpont": "railway_border_crossing", |
|
39
|
|
|
"rakodóhely": "loading_point", |
|
40
|
|
|
"vágányfonódás-elágazás": "gauntlet_spur", |
|
41
|
|
|
} |
|
42
|
|
|
return dictionary[operating_site_type] |
|
43
|
|
|
|
|
44
|
|
|
|
|
45
|
|
|
class OperatingSitesUpdater(ExcelProcessorSimple, DataDownloader): |
|
46
|
|
|
TABLE_NAME: ClassVar[str] = "operating_sites" |
|
47
|
|
|
database_metadata: ClassVar[MetaData] = MetaData() |
|
48
|
|
|
|
|
49
|
|
|
table: ClassVar[Table] = Table( |
|
50
|
|
|
TABLE_NAME, |
|
51
|
|
|
database_metadata, |
|
52
|
|
|
Column(name="name", type_=String(255), nullable=False), |
|
53
|
|
|
Column(name="name_shortened", type_=String(255)), |
|
54
|
|
|
Column(name="name_short", type_=String(255)), |
|
55
|
|
|
Column(name="operator", type_=String(255)), |
|
56
|
|
|
Column(name="type", type_=String(255)), |
|
57
|
|
|
Column( |
|
58
|
|
|
name="code_uic", type_=Integer, nullable=False, index=True, primary_key=True |
|
59
|
|
|
), |
|
60
|
|
|
Column(name="code_telegraph", type_=String(4)), |
|
61
|
|
|
Column(name="category_passenger", type_=SmallInteger), |
|
62
|
|
|
Column(name="category_freight", type_=SmallInteger), |
|
63
|
|
|
Column(name="traffic_passenger", type_=Boolean), |
|
64
|
|
|
Column(name="traffic_freight", type_=Boolean), |
|
65
|
|
|
Column(name="terminus", type_=Boolean), |
|
66
|
|
|
Column(name="request_stop", type_=Boolean), |
|
67
|
|
|
Column(name="train_meeting", type_=Boolean), |
|
68
|
|
|
Column(name="open_to_train_operators", type_=Boolean), |
|
69
|
|
|
) |
|
70
|
|
|
|
|
71
|
|
|
def __init__(self) -> None: |
|
72
|
|
|
super().__init__() |
|
73
|
|
|
|
|
74
|
|
|
self.WEBSITE_DOMAIN: Final = "https://www.kapella2.hu" |
|
75
|
|
|
self.WEBSITE_URL: Final = ( |
|
76
|
|
|
f"/ehuszfelulet/szolgalatihelyek?vizsgalt_idopont=" |
|
77
|
|
|
f"{self.TODAY}&vizsgalt_idoszak_kezdo={self.TODAY}&vizsgalt_idoszak_veg={self.TODAY}" |
|
78
|
|
|
) |
|
79
|
|
|
self.INFRA_ID: int = NotImplemented |
|
80
|
|
|
self.INFRA_ID_URL: str = NotImplemented |
|
81
|
|
|
self.XLS_URL: str = NotImplemented |
|
82
|
|
|
|
|
83
|
|
|
self._data_to_process = self.get_data(self.WEBSITE_DOMAIN + self.WEBSITE_URL) |
|
84
|
|
|
|
|
85
|
|
|
self.logger.info(f"{self.__class__.__name__} initialized!") |
|
86
|
|
|
|
|
87
|
|
|
def get_data(self, url: str) -> bytes: |
|
88
|
|
|
splash_page_soup = self.get_splash_page(url) |
|
89
|
|
|
self.get_infra_id(splash_page_soup, url) |
|
90
|
|
|
list_page = self.download_list_page(url) |
|
91
|
|
|
return self.download_xls_file(list_page) |
|
92
|
|
|
|
|
93
|
|
|
def get_splash_page(self, url: str) -> BeautifulSoup: |
|
94
|
|
|
splash_page = super().get_data(url) |
|
95
|
|
|
splash_page_soup = BeautifulSoup( |
|
96
|
|
|
markup=splash_page, |
|
97
|
|
|
features="lxml", |
|
98
|
|
|
) |
|
99
|
|
|
return splash_page_soup |
|
100
|
|
|
|
|
101
|
|
|
def get_infra_id(self, splash_page_soup, url) -> None: |
|
102
|
|
|
try: |
|
103
|
|
|
select_tag = splash_page_soup.find( |
|
104
|
|
|
name="select", |
|
105
|
|
|
attrs={"name": "infra_id"}, |
|
106
|
|
|
) |
|
107
|
|
|
if not select_tag: |
|
108
|
|
|
self.logger.critical( |
|
109
|
|
|
f"No `select` tag found on the splash page at {url}!" |
|
110
|
|
|
) |
|
111
|
|
|
raise ValueError |
|
112
|
|
|
except ValueError as exception: |
|
113
|
|
|
self.logger.critical(exception) |
|
114
|
|
|
raise |
|
115
|
|
|
# future: report bug (false positive) to mypy developers |
|
116
|
|
|
self.INFRA_ID = int(select_tag.find("option")["value"]) # type: ignore |
|
117
|
|
|
|
|
118
|
|
|
def download_list_page(self, url: str) -> bytes: |
|
119
|
|
|
self.INFRA_ID_URL = f"&infra_id={self.INFRA_ID}" |
|
120
|
|
|
list_page = super().get_data(url + self.INFRA_ID_URL) |
|
121
|
|
|
return list_page |
|
122
|
|
|
|
|
123
|
|
|
def download_xls_file(self, list_page: bytes) -> bytes: |
|
124
|
|
|
self.XLS_URL = re.findall( |
|
125
|
|
|
pattern=r"/ehuszfelulet/excelexport\?id_xls=\w+", |
|
126
|
|
|
string=str(list_page), |
|
127
|
|
|
)[0] |
|
128
|
|
|
return super().get_data(self.WEBSITE_DOMAIN + self.XLS_URL) |
|
129
|
|
|
|
|
130
|
|
|
def rename_columns_manually(self) -> None: |
|
131
|
|
|
# future: report wrong display and copying of hyphen (e.g. Fil'akovo) to pandas and JetBrains developers |
|
132
|
|
|
self.data.rename( |
|
133
|
|
|
columns={ |
|
134
|
|
|
"Hosszú név": "name", |
|
135
|
|
|
"Rövid név": "name_shortened", |
|
136
|
|
|
"Polgári név": "name_short", |
|
137
|
|
|
"Társaság": "operator", |
|
138
|
|
|
"Szolgálati hely típus": "type", |
|
139
|
|
|
"PLC kód": "code_uic", |
|
140
|
|
|
"Távíró kód": "code_telegraph", |
|
141
|
|
|
"Állomáskategória személyvonatok számára": "category_passenger", |
|
142
|
|
|
"Állomáskategória tehervonatok számára": "category_freight", |
|
143
|
|
|
"Személy szállításra megnyitva": "traffic_passenger", |
|
144
|
|
|
"Áru szállításra megnyitva": "traffic_freight", |
|
145
|
|
|
"Menetvonal kezdő/végpontja": "terminus", |
|
146
|
|
|
"Feltételes megállás lehetséges": "request_stop", |
|
147
|
|
|
"Vonattalálkozásra alkalmas": "train_meeting", |
|
148
|
|
|
"Szolg. hely nyílt": "open_to_train_operators", |
|
149
|
|
|
}, |
|
150
|
|
|
inplace=True, |
|
151
|
|
|
) |
|
152
|
|
|
|
|
153
|
|
|
def correct_data_manually(self) -> None: |
|
154
|
|
|
self.data["type"] = self.data["type"].apply( |
|
155
|
|
|
lambda x: _translate_operating_site_type(str(x)) |
|
156
|
|
|
) |
|
157
|
|
|
self.replace_code_uic_letters() |
|
158
|
|
|
|
|
159
|
|
|
def replace_code_uic_letters(self) -> None: |
|
160
|
|
|
country_codes_iso = ["HU", "AT", "SK", "UA", "RO", "RS", "HR", "SI"] |
|
161
|
|
|
for country_code_iso in country_codes_iso: |
|
162
|
|
|
country_code_uic = self.get_uic_code(country_code_iso) |
|
163
|
|
|
self.data["code_uic"] = self.data["code_uic"].str.replace( |
|
164
|
|
|
pat=country_code_iso, |
|
165
|
|
|
repl=country_code_uic, |
|
166
|
|
|
) |
|
167
|
|
|
|
|
168
|
|
|
def get_uic_code(self, country_code_iso: str) -> str: |
|
169
|
|
|
with self.database.engine.begin() as connection: |
|
170
|
|
|
query = """ |
|
171
|
|
|
select code_uic |
|
172
|
|
|
from countries |
|
173
|
|
|
where code_iso = :country_code_iso |
|
174
|
|
|
""" |
|
175
|
|
|
result = connection.execute( |
|
176
|
|
|
text(query), |
|
177
|
|
|
{"country_code_iso": country_code_iso}, |
|
178
|
|
|
).fetchone() |
|
179
|
|
|
|
|
180
|
|
|
try: |
|
181
|
|
|
assert result is not None |
|
182
|
|
|
except AssertionError as exception: |
|
183
|
|
|
self.logger.critical(exception) |
|
184
|
|
|
raise |
|
185
|
|
|
return str(result[0]) |
|
186
|
|
|
|
|
187
|
|
|
def correct_boolean_values(self) -> None: |
|
188
|
|
|
boolean_columns = [ |
|
189
|
|
|
"traffic_passenger", |
|
190
|
|
|
"traffic_freight", |
|
191
|
|
|
"terminus", |
|
192
|
|
|
"request_stop", |
|
193
|
|
|
"train_meeting", |
|
194
|
|
|
"open_to_train_operators", |
|
195
|
|
|
] |
|
196
|
|
|
for column in boolean_columns: |
|
197
|
|
|
self.data[column] = self.data[column].apply(lambda x: x == "igen") |
|
198
|
|
|
|
|
199
|
|
|
def add_data(self) -> None: |
|
200
|
|
|
with self.database.engine.begin() as connection: |
|
201
|
|
|
queries = [ |
|
202
|
|
|
""" |
|
203
|
|
|
insert ignore into operating_sites ( |
|
204
|
|
|
name, |
|
205
|
|
|
name_shortened, |
|
206
|
|
|
name_short, |
|
207
|
|
|
operator, |
|
208
|
|
|
type, |
|
209
|
|
|
code_uic, |
|
210
|
|
|
code_telegraph, |
|
211
|
|
|
category_passenger, |
|
212
|
|
|
category_freight, |
|
213
|
|
|
traffic_passenger, |
|
214
|
|
|
traffic_freight, |
|
215
|
|
|
terminus, |
|
216
|
|
|
request_stop, |
|
217
|
|
|
train_meeting, |
|
218
|
|
|
open_to_train_operators |
|
219
|
|
|
) |
|
220
|
|
|
values ( |
|
221
|
|
|
:name, |
|
222
|
|
|
:name_shortened, |
|
223
|
|
|
:name_short, |
|
224
|
|
|
:operator, |
|
225
|
|
|
:type, |
|
226
|
|
|
:code_uic, |
|
227
|
|
|
:code_telegraph, |
|
228
|
|
|
:category_passenger, |
|
229
|
|
|
:category_freight, |
|
230
|
|
|
:traffic_passenger, |
|
231
|
|
|
:traffic_freight, |
|
232
|
|
|
:terminus, |
|
233
|
|
|
:request_stop, |
|
234
|
|
|
:train_meeting, |
|
235
|
|
|
:open_to_train_operators |
|
236
|
|
|
) |
|
237
|
|
|
""", |
|
238
|
|
|
""" |
|
239
|
|
|
update operating_sites |
|
240
|
|
|
set |
|
241
|
|
|
name = :name, |
|
242
|
|
|
name_shortened = :name_shortened, |
|
243
|
|
|
name_short = :name_short, |
|
244
|
|
|
operator = :operator, |
|
245
|
|
|
type = :type, |
|
246
|
|
|
code_telegraph = :code_telegraph, |
|
247
|
|
|
category_passenger = :category_passenger, |
|
248
|
|
|
category_freight = :category_freight, |
|
249
|
|
|
traffic_passenger = :traffic_passenger, |
|
250
|
|
|
traffic_freight = :traffic_freight, |
|
251
|
|
|
terminus = :terminus, |
|
252
|
|
|
request_stop = :request_stop, |
|
253
|
|
|
train_meeting = :train_meeting, |
|
254
|
|
|
open_to_train_operators = :open_to_train_operators |
|
255
|
|
|
where code_uic = :code_uic |
|
256
|
|
|
""", |
|
257
|
|
|
] |
|
258
|
|
|
|
|
259
|
|
|
for index, row in self.data.iterrows(): |
|
260
|
|
|
for query in queries: |
|
261
|
|
|
connection.execute( |
|
262
|
|
|
text(query), |
|
263
|
|
|
row.to_dict(), |
|
264
|
|
|
) |
|
265
|
|
|
|