backend.fcmapp.ApplicationService.now()   A
last analyzed

Complexity

Conditions 1

Size

Total Lines 3
Code Lines 2

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 2
nop 1
dl 0
loc 3
rs 10
c 0
b 0
f 0
1
'''Application Service layer for Full Cycle Mining
2
Gateway into most of application functionality'''
3
import sys
4
import os
5
import datetime
6
import logging
7
import json
8
from collections import defaultdict
9
import pika
10
from colorama import init, Fore
11
from sqlalchemy.orm import sessionmaker
12
from sqlalchemy import create_engine
13
import domain.minerstatistics
14
import domain.minerpool
15
from domain.mining import Miner, AvailablePool, MinerStatus
16
from domain.rep import MinerRepository, PoolRepository, LoginRepository, RuleParametersRepository, BaseRepository
17
#from domain.miningrules import RuleParameters
18
from messaging.messages import MinerMessageSchema, ConfigurationMessageSchema
19
from messaging.sensormessages import SensorValueSchema
20
from messaging.schema import MinerSchema, MinerStatsSchema, MinerCurrentPoolSchema
21
from helpers.queuehelper import QueueName, Queue, QueueEntry, QueueType
22
import backend.fcmutils as utils
23
#import backend.fcmcamera
24
from backend.fcmcamera import CameraService
25
from backend.fcmcache import Cache, CacheKeys
26
from backend.fcmbus import Bus
27
from backend.fcmcomponent import ComponentName
28
from backend.fcmservice import BaseService, PoolService, ServiceName, InfrastructureService, Configuration, Telegram
29
from backend.fcmminer import Antminer
30
from backend.fcmsensors import SensorService
31
32
class Component(object):
33
    '''A component is a unit of execution of FCM'''
34
    def __init__(self, componentname, option=''):
35
        self.app = ApplicationService(component=componentname, option=option)
36
        #was a queue, now its a channel
37
        self.listeningqueue = None
38
39
    def listen(self):
40
        if self.listeningqueue:
41
            self.app.bus.listen(self.listeningqueue)
42
43
class ApplicationService(BaseService):
44
    '''Application Services'''
45
    programnamefull = ''
46
    programname = ''
47
    component = ComponentName.fullcycle
48
    loglevel = 0
49
    #true if user passed in -now command line argument
50
    isrunnow = False
51
    #dictionary of queues managed by this app
52
    _queues = {}
53
    _channels = []
54
    #the startup directory
55
    __logger = None
56
    __logger_debug = None
57
    __logger_error = None
58
59
    def __init__(self, component=ComponentName.fullcycle, option=None, announceyourself=False):
60
        #TODO: call the one in parent before this
61
        self.homedirectory = os.path.dirname(__file__)
62
        self.initcache()
63
        self.setup_configuration()
64
        super().__init__(self.configuration, self.__cache)
65
        #for some reason super fails
66
67
        self.component = component
68
        if self.component == ComponentName.fullcycle:
69
            self.print('Starting FCM Init')
70
        self.initargs(option)
71
        self.startupstuff()
72
        if self.component == ComponentName.fullcycle:
73
            self.print('Starting FCM Configuration')
74
        self.initlogger()
75
        self.initmessaging()
76
        #this is slow. should be option to opt out of cache?
77
        if self.component == ComponentName.fullcycle:
78
            self.loginfo('Starting FCM Cache')
79
        self.initbus()
80
        self.init_application()
81
        self.init_sensors()
82
83
        if announceyourself:
84
            self.sendqueueitem(QueueEntry(QueueName.Q_LOG, self.stamp('Started {0}'.format(self.component)), QueueType.broadcast))
85
86
    def initargs(self, option):
87
        '''process command line arguments'''
88
        if sys.argv:
89
            self.programnamefull = sys.argv[0]
90
            self.programname = os.path.basename(self.programnamefull)
91
        firstarg = option
92
        if len(sys.argv) > 1:
93
            firstarg = sys.argv[1]
94
        if firstarg is not None:
95
            if firstarg == '-now':
96
                self.isrunnow = True
97
98
    def startupstuff(self):
99
        #used with colorama on windows
100
        init(autoreset=True)
101
102
    def initcache(self):
103
        '''start up cache'''
104
        try:
105
            cachelogin = self.getservice(ServiceName.cache)
106
            self.__cache = Cache(cachelogin)
107
        except Exception as ex:
108
            #cache is offline. try to run in degraded mode
109
            self.logexception(ex)
110
111
    def startup(self):
112
        self.initminercache()
113
        self.initpoolcache()
114
115
    def initbus(self):
116
        '''start up message bus'''
117
        login = self.getservice(ServiceName.messagebus)
118
        self.__bus = Bus(login)
119
120
    def init_sensors(self):
121
        self.sensors = SensorService(self.configuration, self.__cache)
122
123
    def init_application(self):
124
        self.antminer = Antminer(self.configuration, self.sshlogin())
125
        self.telegram = Telegram(self.configuration, self.getservice(ServiceName.telegram))
126
        self.pools = PoolService(self.configuration, self.__cache)
127
        self.camera = CameraService(self.configuration, self.__cache)
128
129
    @property
130
    def bus(self):
131
        return self.__bus
132
133
    #@property
134
    #def cache(self):
135
    #    return self.__cache
136
137
    @property
138
    def isdebug(self):
139
        return sys.flags.debug
140
141
    def setup_configuration(self):
142
        '''configuration is loaded once at startup'''
143
        raw = BaseRepository().readrawfile(self.getconfigfilename('config/fullcycle.conf'))
144
        config = json.loads(raw)
145
146
        self.configuration = Configuration(config)
147
        self.applicationid = self.configuration.get('applicationid')
148
        self.loglevel = self.configuration.get('loglevel')
149
150
    def initpoolcache(self):
151
        if self.__cache.get(CacheKeys.pools) is None:
152
            spools = PoolRepository().readrawfile(self.getconfigfilename('config/pools.conf'))
153
            self.__cache.tryputcache(CacheKeys.pools, spools)
154
        for pool in self.pools.get_all_pools():
155
            #pool isinstance of Pool
156
            availablepool = AvailablePool(pool.pool_type, pool, pool.url, pool.user, pool.password, pool.priority)
157
            minerpool = domain.minerpool.MinerPool(miner=None, priority=0, pool=availablepool)
158
            self.pools.putpool(pool)
159
            self.pools.add_pool(minerpool)
160
161
    def initminercache(self):
162
        '''put known miners into cache'''
163
        if self.__cache.get(CacheKeys.miners) is None:
164
            sminers = MinerRepository().readrawfile(self.getconfigfilename('config/miners.conf'))
165
            self.__cache.tryputcache(CacheKeys.miners, sminers)
166
167
        for miner in self.miners():
168
            #status is not persisted yet so init from name
169
            if miner.is_manually_disabled():
170
                miner.status = MinerStatus.Disabled
171
            if self.getminer(miner) is None:
172
                self.putminer(miner)
173
174
    def initlogger(self):
175
        '''set up logging application info'''
176
        self.__logger = self.setup_logger('fcmapp', 'fcm.log', logging.INFO)
177
178
        self.__logger_debug = self.setup_logger('fcmdebug', 'fcm.bug', logging.DEBUG)
179
180
        self.__logger_error = self.setup_logger('fcmerror', 'fcm.err', logging.ERROR)
181
182
    def setup_logger(self, logger_name, log_file, level=logging.INFO):
183
        '''start logger'''
184
        logr = logging.getLogger(logger_name)
185
        formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
186
        #by default will append. use mode='w' to overwrite
187
        file_handler = logging.FileHandler(log_file)
188
        file_handler.setFormatter(formatter)
189
        logr.addHandler(file_handler)
190
        # is setting stream necessary
191
        stream_handler = logging.StreamHandler()
192
        stream_handler.setFormatter(formatter)
193
        logr.setLevel(level)
194
        return logr
195
196
    def loginfo(self, message):
197
        '''log informational message'''
198
        logmsg = '{0}: {1}'.format(self.programname, message)
199
        self.__logger.info(logmsg)
200
        print(message)
201
202
    def logerror(self, message):
203
        '''log error'''
204
        logmsg = '{0}: {1}'.format(self.programname, message)
205
        self.__logger_error.error(logmsg)
206
        print(Fore.RED+logmsg)
207
208
    def logdebug(self, message):
209
        '''log debug message'''
210
        if not self.loglevel or self.loglevel == 0:
211
            return
212
        logmsg = '{0}: {1}'.format(self.programname, message)
213
        self.__logger_debug.debug(logmsg)
214
        print(Fore.GREEN+logmsg)
215
216
    def print(self, message):
217
        '''echo message to screen'''
218
        print('{0}: {1}'.format(self.now(), message))
219
220
    def now(self):
221
        '''current time formatted as friendly string'''
222
        return utils.formattime(datetime.datetime.now())
223
224
    #region lookups
225
    #todo: move to configurations section
226
    def miners(self):
227
        '''configured miners'''
228
        customerid = self.configuration.get('customerid')
229
        miners = MinerRepository().readminers(self.getconfigfilename('config/miners.conf'))
230
        for miner in miners:
231
            miner.customerid = customerid
232
        return miners
233
234
    def knownminers(self):
235
        '''for now just return a list of miners
236
        later should consider returning a list that is easily searched and filtered
237
        '''
238
        dknownminers = self.__cache.gethashset(CacheKeys.knownminers)
239
        if dknownminers is not None and dknownminers:
240
            #get list of miners from cache
241
            return utils.deserializelistofstrings(list(dknownminers.values()), MinerSchema())
242
        knownminers = self.miners()
243
        return knownminers
244
245
    def allminers(self):
246
        '''combined list of discovered miners and configured miners'''
247
        allminers = self.knownminers()
248
        for miner in self.miners():
249
            foundminer = [x for x in allminers if x.key() == miner.key()]
250
            if not foundminer:
251
                allminers.append(miner)
252
        return allminers
253
254
    def minersummary(self, max_number=10):
255
        '''show a summary of known miners
256
        '''
257
        mode = self.configuration.get('summary')
258
        if not mode:
259
            mode = 'auto'
260
        knownminers = self.knownminers()
261
        if len(knownminers) <= max_number:
262
            return '\n'.join([m.summary() for m in knownminers])
263
        groupbystatus = defaultdict(list)
264
        for miner in knownminers:
265
            groupbystatus[miner.status].append(miner)
266
        return '\n'.join(['{0}: {1}'.format(s, self.summary_by_status(s, groupbystatus[s])) for s in groupbystatus])
267
268
    def summary_by_status(self, key, minerlist):
269
        if key == 'online':
270
            return '{0} miners hash {1}'.format(self.summarize_count(minerlist), self.summarize_hash(minerlist))
271
        return self.summarize_count(minerlist)
272
273
    def summarize_count(self, minerlist):
274
        return len(minerlist)
275
276
    def summarize_hash(self, minerlist):
277
        return sum(miner.minerstats.currenthash for miner in minerlist)
278
279
    def addknownminer(self, miner):
280
        '''add miner to known miners list'''
281
        val = self.serialize(miner)
282
        self.__cache.putinhashset(CacheKeys.knownminers, miner.key(), val)
283
284
    def updateknownminer(self, miner):
285
        '''update known miner in cache'''
286
        if miner.is_key_updated:
287
            self.logdebug('delete cache: {0}'.format(miner.key_original))
288
            self.__cache.hdel(CacheKeys.knownminers, miner.key_original)
289
        sminer = self.__cache.getfromhashset(CacheKeys.knownminers, miner.key())
290
        memminer = utils.deserialize(MinerSchema(), utils.safestring(sminer))
291
        if memminer is None:
292
            memminer = miner
293
        else:
294
            #merge new values
295
            memminer.updatefrom(miner)
296
        val = self.serialize(memminer)
297
        self.logdebug('put cache: {0}'.format(miner.key()))
298
        self.__cache.putinhashset(CacheKeys.knownminers, miner.key(), val)
299
300
    def sshlogin(self):
301
        '''return contents of login file'''
302
        return self.readlogin('ftp.conf')
303
304
    def readlogin(self, filename):
305
        '''read login file configuration'''
306
        login = LoginRepository().readlogins(self.getconfigfilename('config/'+filename))
307
        return login
308
309
    def ruleparameters(self):
310
        '''rules parameters'''
311
        return RuleParametersRepository().readrules(self.getconfigfilename('config/'+'rules.conf'))
312
313
    def getservice(self, servicename):
314
        '''get a service by name. should be repository'''
315
        file_name = self.getconfigfilename('config/services.conf')
316
        with open(file_name, encoding='utf-8-sig') as config_file:
317
            content = json.loads(config_file.read())
318
        svc = None #dummy initializer to make scrutinize happy
319
        services = [InfrastructureService(**s) for s in content]
320
        return next((svc for svc in services if svc.name == servicename), None)
321
322
    def getservice_useroverride(self, servicename):
323
        service = self.getservice(servicename)
324
        service.user = self.component
325
        return service
326
    #endregion lookups
327
328
    def listen(self, qlisten):
329
        """Goes into listening mode on a queue"""
330
        try:
331
            self.bus.listen(qlisten)
332
        except KeyboardInterrupt:
333
            self.shutdown()
334
        except BaseException as unhandled:
335
            self.unhandledexception(unhandled)
336
337
    def registerqueue(self, qregister: Queue):
338
        '''register a queue'''
339
        self.logdebug(self.stamp('Registered queue {0}'.format(qregister.queue_name)))
340
        if qregister.queue_name not in self._queues.keys():
341
            self._queues[qregister.queue_name] = qregister
342
343
    def shutdown(self, exitstatus=0):
344
        '''shut down app services'''
345
        self.loginfo('Shutting down fcm app...')
346
        self.close_channels()
347
        self.closequeues()
348
        if self.__bus:
349
            self.bus.close()
350
        if self.__cache is not None:
351
            self.__cache.close()
352
        sys.exit(exitstatus)
353
354
    def closequeue(self, thequeue):
355
        '''close the queue'''
356
        if not thequeue:
357
            return
358
        try:
359
            if thequeue is not None:
360
                self.logdebug(self.stamp('closing queue {0}'.format(thequeue.queue_name)))
361
                thequeue.close()
362
            del self._queues[thequeue.queue_name]
363
        except Exception as ex:
364
            self.logexception(ex)
365
366
    def closequeues(self):
367
        '''close a bunch of queues'''
368
        for k in list(self._queues):
369
            self.closequeue(self._queues[k])
370
371
    def close_channel(self, chan):
372
        if not chan:
373
            return
374
        try:
375
            if chan.name in self._channels:
376
                self.logdebug(self.stamp('closing channel {0}'.format(chan.name)))
377
                chan.close()
378
                del self._channels[chan.name]
379
        except Exception as ex:
380
            self.logexception(ex)
381
382
    def close_channels(self):
383
        '''close all channels'''
384
        for chan in list(self._channels):
385
            self.close_channel(self._channels[chan])
386
387
    def unhandledexception(self, unhandled):
388
        '''what to do when there is an exception that app cannot handle'''
389
        self.logexception(unhandled)
390
391
    def exceptionmessage(self, ex):
392
        '''gets exception message even when it doesnt have one'''
393
        exc_type, _, exc_tb = sys.exc_info()
394
        exmsg = getattr(ex, 'message', repr(ex))
395
        return '{0}:{1}:{2}'.format(exc_type, exc_tb.tb_lineno, exmsg)
396
397
    def logexception(self, ex):
398
        '''log an exception'''
399
        self.logerror(self.exceptionmessage(ex))
400
401
    def sendlog(self, logmessage):
402
        '''send message to log queue'''
403
        item = QueueEntry(QueueName.Q_LOG, logmessage, 'broadcast')
404
        self.sendqueueitem(item)
405
        print(logmessage)
406
407
    def subscribe(self, name, callback, no_acknowledge=True, prefetch=1):
408
        '''subscribe to a queue'''
409
        chan = self.bus.subscribe(name, callback, no_acknowledge=no_acknowledge, prefetch_count=prefetch)
410
        print('Waiting for messages on {0}. To exit press CTRL+C'.format(name))
411
        return chan
412
413
    def listen_to_broadcast(self, broadcast_name, callback, no_acknowledge=True):
414
        thebroadcast = self.bus.subscribe_broadcast(broadcast_name, callback, no_acknowledge)
415
        print('Waiting for messages on {0}. To exit press CTRL+C'.format(broadcast_name))
416
        self.bus.listen(thebroadcast)
417
        #never returns becuase listen is blocking
418
        return thebroadcast
419
420
    def trypublish(self, queue_name, msg: str):
421
        '''publish a message to the queue'''
422
        try:
423
            self.bus.publish(queue_name, msg)
424
            return True
425
        except pika.exceptions.ConnectionClosed as ex:
426
            logmessage = 'Error publishing to {0} {1}'.format(queue_name, self.exceptionmessage(ex))
427
            self.logerror(logmessage)
428
            return False
429
430
    def trybroadcast(self, exchange_name, msg):
431
        '''broadcast a message to all queue listeners'''
432
        try:
433
            self.bus.broadcast(exchange_name, msg)
434
            return True
435
        except pika.exceptions.ConnectionClosed as conxex:
436
            self.logerror('Error broadcasting to {0} {1}'.format(exchange_name, self.exceptionmessage(conxex)))
437
            return False
438
439
    def putminer(self, miner: Miner):
440
        '''put miner in cache'''
441
        if miner and miner.key() and not miner.is_unknown:
442
            valu = self.serialize(miner)
443
            self.__cache.tryputcache('miner.{0}'.format(miner.key()), valu)
444
445
    def getminer(self, miner: Miner) -> Miner:
446
        '''strategies for getting miner from cache
447
        originally was key=miner.name but that was not good
448
        changed to key='miner.'+minerid
449
        '''
450
        valu = self.cache.trygetvaluefromcache('miner.{0}'.format(miner.key()))
451
        if valu is None:
452
            return None
453
        minerfromstore = utils.deserialize(MinerSchema(), utils.safestring(valu))
454
        if not minerfromstore.key():
455
            #do not allow entry with no key
456
            return None
457
        minerfromstore.store = 'mem'
458
        return minerfromstore
459
460
    def getknownminer(self, miner: Miner) -> Miner:
461
        '''get a known miner'''
462
        return self.getknownminerbykey(miner.key())
463
464
    def getminerbyname(self, minername):
465
        filtered = [x for x in self.miners() if x.name == minername]
466
        if filtered: return filtered[0]
467
        return None
468
469
    def getknownminerbykey(self, minername):
470
        str_miner = self.__cache.getfromhashset(CacheKeys.knownminers, minername)
471
        if str_miner is None:
472
            return None
473
        return utils.deserialize(MinerSchema(), utils.safestring(str_miner))
474
475
    def getknownminerbyname(self, minername):
476
        '''this could be slow if there are lots of miners'''
477
        known = self.knownminers()
478
        for miner in known:
479
            if miner.name == minername:
480
                return miner
481
        return None
482
483
    def putminerandstats(self, miner: Miner, minerstats, minerpool):
484
        '''put miner and status in cache'''
485
        self.putminer(miner)
486
        schema = MinerStatsSchema()
487
        valstats = schema.dumps(minerstats).data
488
        self.__cache.tryputcache(miner.key() + '.stats', valstats)
489
        schema = MinerCurrentPoolSchema()
490
        valpool = schema.dumps(minerpool).data
491
        self.__cache.tryputcache(miner.key() + '.pool', valpool)
492
493
    def getstats(self, miner: Miner):
494
        '''get stats entity'''
495
        valu = self.cache.trygetvaluefromcache(miner.name + '.stats')
496
        if valu is None: return None
497
        entity = domain.minerstatistics.MinerStatistics(miner, **utils.deserialize(MinerStatsSchema(), valu))
498
        return entity
499
500
    def getminerstatscached(self):
501
        '''iterator for cached stats'''
502
        for miner in self.miners():
503
            yield (self.getminer(miner), self.getstats(miner), self.pools.getpool(miner))
504
505
    def messagedecodeminer(self, body) -> Miner:
506
        '''deserialize a miner message'''
507
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
508
        schema = MinerMessageSchema()
509
        minermessage_dict = schema.load(message_envelope.bodyjson()).data
510
        minermessage_entity = schema.make_minermessage(minermessage_dict)
511
        miner = minermessage_entity.miner
512
        return miner
513
514
    def messagedecodeminerstats(self, body):
515
        '''deserialize miner stats'''
516
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
517
        schema = MinerMessageSchema()
518
        minermessage_dict = schema.load(message_envelope.bodyjson()).data
519
        minermessage_entity = schema.make_minermessage(minermessage_dict)
520
        return minermessage_entity
521
522
    def messagedecodeminercommand(self, body):
523
        '''deserialize  miner command'''
524
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
525
        schema = MinerMessageSchema()
526
        minermessage_dict = schema.load(message_envelope.bodyjson()).data
527
        minermessage_entity = schema.make_minermessage(minermessage_dict)
528
        return minermessage_entity
529
530
    def messagedecodesensor(self, body):
531
        '''deserialize sensor value '''
532
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
533
        schema = SensorValueSchema()
534
        #minermessage_dict = schema.load(message_envelope.bodyjson()).data
535
        entity = schema.load(message_envelope.bodyjson()).data
536
        return message_envelope, entity
537
538
    def messagedecode_configuration(self, body):
539
        '''deserialize  configuration command'''
540
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
541
        schema = ConfigurationMessageSchema()
542
        configurationmessage_dict = schema.load(message_envelope.bodyjson()).data
543
        configurationmessage_entity = schema.make_configurationmessage(configurationmessage_dict)
544
        return configurationmessage_entity
545
546
    def createmessagestats(self, miner, minerstats, minerpool):
547
        #always save the miner so the next guy can get latest changes
548
        #only put in cache if it came from cache
549
        if miner.store == 'mem':
550
            self.putminer(miner)
551
        message = super().createmessageenvelope()
552
        message = message.make_minerstats(miner, minerstats, minerpool)
553
        return super().serializemessageenvelope(message)
554
555
    def createmessagecommand(self, miner, command):
556
        '''create message command'''
557
        if miner.store == 'mem':
558
            self.putminer(miner)
559
        message = super().createmessageenvelope()
560
        message = message.make_minercommand(miner, command)
561
        return super().serializemessageenvelope(message)
562
563
    def messageencode(self, miner: Miner):
564
        '''command is optional, however should convert this call into minercommand'''
565
        #always save the miner so the next guy can get latest changes
566
        if miner.store == 'mem':
567
            self.putminer(miner)
568
        message = super().createmessageenvelope()
569
        message = message.make_minerstats(miner, minerstats=None, minerpool=None)
570
        return self._schemamsg.dumps(message).data
571
572
    def stamp(self, message):
573
        return '{0}:{1}: {2}'.format(self.now(), self.applicationid, message)
574
575
    def alert(self, message):
576
        '''send alert message'''
577
        return self.sendqueueitem(QueueEntry(QueueName.Q_ALERT, self.stamp(message), QueueType.broadcast))
578
579
    def send(self, q_name, message):
580
        '''send message to queue'''
581
        success = self.trypublish(q_name, message)
582
        return success
583
584
    def enqueue(self, queuelist):
585
        '''send a list of queue messages'''
586
        if queuelist is None:
587
            return
588
        if not queuelist.hasentries():
589
            return
590
        #todo: group by queuename
591
        for entry in queuelist.entries:
592
            self.sendqueueitem(entry)
593
594
    def sendqueueitem(self, entry):
595
        '''send one queue item'''
596
        if entry.eventtype == 'broadcast':
597
            send_result = self.trybroadcast(entry.queuename, entry.message)
598
            return send_result
599
        return self.send(entry.queuename, entry.message)
600
601
    def readtemperature(self):
602
        temp, humid = self.sensors.readtemperature()
603
        self.sendsensor(temp)
604
        self.sendsensor(humid)
605
        return temp, humid
606
607
    def sendsensor(self, reading):
608
        if not reading:
609
            return
610
        message = super().createmessageenvelope()
611
        sensorjson = message.jsonserialize(SensorValueSchema(), reading)
612
        self.sendqueueitem(QueueEntry(QueueName.Q_SENSOR, super().serializemessageenvelope(message.make_any('sensorvalue', sensorjson)), QueueType.broadcast))
613
614
    def getsession(self):
615
        service = self.getservice(ServiceName.database)
616
        engine = create_engine(service.connection, echo=False)
617
        Session = sessionmaker(bind=engine)
618
        return Session()
619
620
    def log_mineractivity(self, minerlog):
621
        try:
622
            session = self.getsession()
623
            session.add(minerlog)
624
            session.commit()
625
            return True
626
        except BaseException as ex:
627
            self.logexception(ex)
628
        return False
629
630
    def save_miner(self, miner: Miner):
631
        found = self.getknownminer(miner)
632
        if found is None:
633
            self.addknownminer(miner)
634
            #miners = MinerRepository()
635
            #todo:add the miner to the json config
636
        else:
637
            found.updatefrom(miner)
638
            self.putminer(found)
639
640
def main():
641
    full_cycle = ApplicationService()
642
    full_cycle.loginfo('Full Cycle was run in a script')
643
    full_cycle.shutdown()
644
645
if __name__ == "__main__":
646
    main()
647