Passed
Push — master ( 93c134...fbe99c )
by Dave
01:10
created

ApplicationService.messagedecodesensor()   A

Complexity

Conditions 1

Size

Total Lines 7
Code Lines 5

Duplication

Lines 0
Ratio 0 %

Importance

Changes 0
Metric Value
cc 1
eloc 5
nop 2
dl 0
loc 7
rs 10
c 0
b 0
f 0
1
'''Application Service layer for Full Cycle Mining
2
Gateway into most of application functionality'''
3
import sys
4
import os
5
import datetime
6
import logging
7
import json
8
from collections import defaultdict
9
import pika
10
from colorama import init, Fore
11
from sqlalchemy.orm import sessionmaker
12
from sqlalchemy import create_engine
13
import domain.minerstatistics
14
import domain.minerpool
15
from domain.mining import Miner, AvailablePool, MinerStatus
16
from domain.rep import MinerRepository, PoolRepository, LoginRepository, RuleParametersRepository, BaseRepository
17
#from domain.miningrules import RuleParameters
18
from messaging.messages import MinerMessageSchema, ConfigurationMessageSchema
19
from messaging.sensormessages import SensorValueSchema
20
from messaging.schema import MinerSchema, MinerStatsSchema, MinerCurrentPoolSchema
21
from helpers.queuehelper import QueueName, Queue, QueueEntry, QueueType
22
import backend.fcmutils as utils
23
#import backend.fcmcamera
24
from backend.fcmcamera import CameraService
25
from backend.fcmcache import Cache, CacheKeys
26
from backend.fcmbus import Bus
27
from backend.fcmcomponent import ComponentName
28
from backend.fcmservice import BaseService, PoolService, ServiceName, InfrastructureService, Configuration, Telegram
29
from backend.fcmminer import Antminer
30
from backend.fcmsensors import SensorService
31
32
class Component(object):
33
    '''A component is a unit of execution of FCM'''
34
    def __init__(self, componentname, option=''):
35
        self.app = ApplicationService(component=componentname, option=option)
36
        #was a queue, now its a channel
37
        self.listeningqueue = None
38
39
    def listen(self):
40
        if self.listeningqueue:
41
            self.app.bus.listen(self.listeningqueue)
42
43
class ApplicationService(BaseService):
44
    '''Application Services'''
45
    programnamefull = ''
46
    programname = ''
47
    component = ComponentName.fullcycle
48
    loglevel = 0
49
    #true if user passed in -now command line argument
50
    isrunnow = False
51
    #dictionary of queues managed by this app
52
    _queues = {}
53
    _channels = []
54
    #the startup directory
55
    __logger = None
56
    __logger_debug = None
57
    __logger_error = None
58
59
    def __init__(self, component=ComponentName.fullcycle, option=None, announceyourself=False):
60
        #TODO: call the one in parent before this
61
        self.homedirectory = os.path.dirname(__file__)
62
        self.initcache()
63
        self.setup_configuration()
64
        super().__init__(self.configuration, self.__cache)
65
        #for some reason super fails
66
67
        self.component = component
68
        if self.component == ComponentName.fullcycle:
69
            self.print('Starting FCM Init')
70
        self.initargs(option)
71
        self.startupstuff()
72
        if self.component == ComponentName.fullcycle:
73
            self.print('Starting FCM Configuration')
74
        self.initlogger()
75
        self.initmessaging()
76
        #this is slow. should be option to opt out of cache?
77
        if self.component == ComponentName.fullcycle:
78
            self.loginfo('Starting FCM Cache')
79
        self.initbus()
80
        self.init_application()
81
        self.init_sensors()
82
83
        if announceyourself:
84
            self.sendqueueitem(QueueEntry(QueueName.Q_LOG, self.stamp('Started {0}'.format(self.component)), QueueType.broadcast))
85
86
    def initargs(self, option):
87
        '''process command line arguments'''
88
        if sys.argv:
89
            self.programnamefull = sys.argv[0]
90
            self.programname = os.path.basename(self.programnamefull)
91
        firstarg = option
92
        if len(sys.argv) > 1:
93
            firstarg = sys.argv[1]
94
        if firstarg is not None:
95
            if firstarg == '-now':
96
                self.isrunnow = True
97
98
    def startupstuff(self):
99
        #used with colorama on windows
100
        init(autoreset=True)
101
102
    def initcache(self):
103
        '''start up cache'''
104
        try:
105
            cachelogin = self.getservice(ServiceName.cache)
106
            self.__cache = Cache(cachelogin)
107
        except Exception as ex:
108
            #cache is offline. try to run in degraded mode
109
            self.logexception(ex)
110
111
    def startup(self):
112
        self.initminercache()
113
        self.initpoolcache()
114
115
    def initbus(self):
116
        '''start up message bus'''
117
        login = self.getservice(ServiceName.messagebus)
118
        self.__bus = Bus(login)
119
120
    def init_sensors(self):
121
        self.sensors = SensorService(self.configuration, self.__cache)
122
123
    def init_application(self):
124
        self.antminer = Antminer(self.configuration, self.sshlogin())
125
        self.telegram = Telegram(self.configuration, self.getservice(ServiceName.telegram))
126
        self.pools = PoolService(self.configuration, self.__cache)
127
        self.camera = CameraService(self.configuration, self.__cache)
128
129
    @property
130
    def bus(self):
131
        return self.__bus
132
133
    #@property
134
    #def cache(self):
135
    #    return self.__cache
136
137
    @property
138
    def isdebug(self):
139
        return sys.flags.debug
140
141
    def setup_configuration(self):
142
        '''configuration is loaded once at startup'''
143
        raw = BaseRepository().readrawfile(self.getconfigfilename('config/fullcycle.conf'))
144
        config = json.loads(raw)
145
146
        self.configuration = Configuration(config)
147
        self.applicationid = self.configuration.get('applicationid')
148
        self.loglevel = self.configuration.get('loglevel')
149
150
    def initpoolcache(self):
151
        if self.__cache.get(CacheKeys.pools) is None:
152
            spools = PoolRepository().readrawfile(self.getconfigfilename('config/pools.conf'))
153
            self.__cache.tryputcache(CacheKeys.pools, spools)
154
        for pool in self.pools.get_all_pools():
155
            #pool isinstance of Pool
156
            availablepool = AvailablePool(pool.pool_type, pool, pool.url, pool.user, pool.password, pool.priority)
157
            minerpool = domain.minerpool.MinerPool(miner=None, priority=0, pool=availablepool)
158
            self.pools.putpool(pool)
159
            self.pools.add_pool(minerpool)
160
161
    def initminercache(self):
162
        '''put known miners into cache'''
163
        if self.__cache.get(CacheKeys.miners) is None:
164
            sminers = MinerRepository().readrawfile(self.getconfigfilename('config/miners.conf'))
165
            self.__cache.tryputcache(CacheKeys.miners, sminers)
166
167
        for miner in self.miners():
168
            #status is not persisted yet so init from name
169
            if miner.is_manually_disabled():
170
                miner.status = MinerStatus.Disabled
171
            if self.getminer(miner) is None:
172
                self.putminer(miner)
173
174
    def initlogger(self):
175
        '''set up logging application info'''
176
        self.__logger = self.setup_logger('fcmapp', 'fcm.log', logging.INFO)
177
178
        self.__logger_debug = self.setup_logger('fcmdebug', 'fcm.bug', logging.DEBUG)
179
180
        self.__logger_error = self.setup_logger('fcmerror', 'fcm.err', logging.ERROR)
181
182
    def setup_logger(self, logger_name, log_file, level=logging.INFO):
183
        '''start logger'''
184
        logr = logging.getLogger(logger_name)
185
        formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s')
186
        #by default will append. use mode='w' to overwrite
187
        file_handler = logging.FileHandler(log_file)
188
        file_handler.setFormatter(formatter)
189
        logr.addHandler(file_handler)
190
        # is setting stream necessary
191
        stream_handler = logging.StreamHandler()
192
        stream_handler.setFormatter(formatter)
193
        logr.setLevel(level)
194
        return logr
195
196
    def loginfo(self, message):
197
        '''log informational message'''
198
        logmsg = '{0}: {1}'.format(self.programname, message)
199
        self.__logger.info(logmsg)
200
        print(message)
201
202
    def logerror(self, message):
203
        '''log error'''
204
        logmsg = '{0}: {1}'.format(self.programname, message)
205
        self.__logger_error.error(logmsg)
206
        print(Fore.RED+logmsg)
207
208
    def logdebug(self, message):
209
        '''log debug message'''
210
        if not self.loglevel or self.loglevel == 0:
211
            return
212
        logmsg = '{0}: {1}'.format(self.programname, message)
213
        self.__logger_debug.debug(logmsg)
214
        print(Fore.GREEN+logmsg)
215
216
    def print(self, message):
217
        '''echo message to screen'''
218
        print('{0}: {1}'.format(self.now(), message))
219
220
    def now(self):
221
        '''current time formatted as friendly string'''
222
        return utils.formattime(datetime.datetime.now())
223
224
    #region lookups
225
    #todo: move to configurations section
226
    def miners(self):
227
        '''configured miners'''
228
        miners = MinerRepository().readminers(self.getconfigfilename('config/miners.conf'))
229
        return miners
230
231
    def knownminers(self):
232
        '''for now just return a list of miners
233
        later should consider returning a list that is easily searched and filtered
234
        '''
235
        dknownminers = self.__cache.gethashset(CacheKeys.knownminers)
236
        if dknownminers is not None and dknownminers:
237
            #get list of miners from cache
238
            return utils.deserializelistofstrings(list(dknownminers.values()), MinerSchema())
239
        knownminers = self.miners()
240
        return knownminers
241
242
    def allminers(self):
243
        '''combined list of discovered miners and configured miners'''
244
        allminers = self.knownminers()
245
        for miner in self.miners():
246
            foundminer = [x for x in allminers if x.key() == miner.key()]
247
            if not foundminer:
248
                allminers.append(miner)
249
        return allminers
250
251
    def minersummary(self, max_number=10):
252
        '''show a summary of known miners
253
        '''
254
        mode = self.configuration.get('summary')
255
        if not mode:
256
            mode = 'auto'
257
        knownminers = self.knownminers()
258
        if len(knownminers) <= max_number:
259
            return '\n'.join([m.summary() for m in knownminers])
260
        groupbystatus = defaultdict(list)
261
        for miner in knownminers:
262
            groupbystatus[miner.status].append(miner)
263
        return '\n'.join(['{0}: {1}'.format(s, self.summary_by_status(s, groupbystatus[s])) for s in groupbystatus])
264
265
    def summary_by_status(self, key, minerlist):
266
        if key == 'online':
267
            return '{0} miners hash {1}'.format(self.summarize_count(minerlist), self.summarize_hash(minerlist))
268
        return self.summarize_count(minerlist)
269
270
    def summarize_count(self, minerlist):
271
        return len(minerlist)
272
273
    def summarize_hash(self, minerlist):
274
        return sum(miner.minerstats.currenthash for miner in minerlist)
275
276
    def addknownminer(self, miner):
277
        '''add miner to known miners list'''
278
        val = self.serialize(miner)
279
        self.__cache.putinhashset(CacheKeys.knownminers, miner.key(), val)
280
281
    def updateknownminer(self, miner):
282
        '''update known miner in cache'''
283
        sminer = self.__cache.getfromhashset(CacheKeys.knownminers, miner.key())
284
        memminer = utils.deserialize(MinerSchema(), utils.safestring(sminer))
285
        if memminer is None:
286
            memminer = miner
287
        else:
288
            #merge new values
289
            memminer.updatefrom(miner)
290
        val = self.serialize(memminer)
291
        self.__cache.putinhashset(CacheKeys.knownminers, miner.key(), val)
292
293
    def sshlogin(self):
294
        '''return contents of login file'''
295
        return self.readlogin('ftp.conf')
296
297
    def readlogin(self, filename):
298
        '''read login file configuration'''
299
        login = LoginRepository().readlogins(self.getconfigfilename('config/'+filename))
300
        return login
301
302
    def ruleparameters(self):
303
        '''rules parameters'''
304
        return RuleParametersRepository().readrules(self.getconfigfilename('config/'+'rules.conf'))
305
306
    def getservice(self, servicename):
307
        '''get a service by name. should be repository'''
308
        file_name = self.getconfigfilename('config/services.conf')
309
        with open(file_name, encoding='utf-8-sig') as config_file:
310
            content = json.loads(config_file.read())
311
        svc = None #dummy initializer to make scrutinize happy
312
        services = [InfrastructureService(**s) for s in content]
313
        return next((svc for svc in services if svc.name == servicename), None)
314
315
    def getservice_useroverride(self, servicename):
316
        service = self.getservice(servicename)
317
        service.user = self.component
318
        return service
319
    #endregion lookups
320
321
    def listen(self, qlisten):
322
        """Goes into listening mode on a queue"""
323
        try:
324
            self.bus.listen(qlisten)
325
        except KeyboardInterrupt:
326
            self.shutdown()
327
        except BaseException as unhandled:
328
            self.unhandledexception(unhandled)
329
330
    def registerqueue(self, qregister: Queue):
331
        '''register a queue'''
332
        self.logdebug(self.stamp('Registered queue {0}'.format(qregister.queue_name)))
333
        if qregister.queue_name not in self._queues.keys():
334
            self._queues[qregister.queue_name] = qregister
335
336
    def shutdown(self, exitstatus=0):
337
        '''shut down app services'''
338
        self.loginfo('Shutting down fcm app...')
339
        self.close_channels()
340
        self.closequeues()
341
        if self.__bus:
342
            self.bus.close()
343
        if self.__cache is not None:
344
            self.__cache.close()
345
        sys.exit(exitstatus)
346
347
    def closequeue(self, thequeue):
348
        '''close the queue'''
349
        if not thequeue:
350
            return
351
        try:
352
            if thequeue is not None:
353
                self.logdebug(self.stamp('closing queue {0}'.format(thequeue.queue_name)))
354
                thequeue.close()
355
            del self._queues[thequeue.queue_name]
356
        except Exception as ex:
357
            self.logexception(ex)
358
359
    def closequeues(self):
360
        '''close a bunch of queues'''
361
        for k in list(self._queues):
362
            self.closequeue(self._queues[k])
363
364
    def close_channel(self, chan):
365
        if not chan:
366
            return
367
        try:
368
            if chan.name in self._channels:
369
                self.logdebug(self.stamp('closing channel {0}'.format(chan.name)))
370
                chan.close()
371
                del self._channels[chan.name]
372
        except Exception as ex:
373
            self.logexception(ex)
374
375
    def close_channels(self):
376
        '''close all channels'''
377
        for chan in list(self._channels):
378
            self.close_channel(self._channels[chan])
379
380
    def unhandledexception(self, unhandled):
381
        '''what to do when there is an exception that app cannot handle'''
382
        self.logexception(unhandled)
383
384
    def exceptionmessage(self, ex):
385
        '''gets exception message even when it doesnt have one'''
386
        exc_type, _, exc_tb = sys.exc_info()
387
        exmsg = getattr(ex, 'message', repr(ex))
388
        return '{0}:{1}:{2}'.format(exc_type, exc_tb.tb_lineno, exmsg)
389
390
    def logexception(self, ex):
391
        '''log an exception'''
392
        self.logerror(self.exceptionmessage(ex))
393
394
    def sendlog(self, logmessage):
395
        '''send message to log queue'''
396
        item = QueueEntry(QueueName.Q_LOG, logmessage, 'broadcast')
397
        self.sendqueueitem(item)
398
        print(logmessage)
399
400
    def subscribe(self, name, callback, no_acknowledge=True, prefetch=1):
401
        '''subscribe to a queue'''
402
        chan = self.bus.subscribe(name, callback, no_acknowledge=no_acknowledge, prefetch_count=prefetch)
403
        print('Waiting for messages on {0}. To exit press CTRL+C'.format(name))
404
        return chan
405
406
    def listen_to_broadcast(self, broadcast_name, callback, no_acknowledge=True):
407
        thebroadcast = self.bus.subscribe_broadcast(broadcast_name, callback, no_acknowledge)
408
        print('Waiting for messages on {0}. To exit press CTRL+C'.format(broadcast_name))
409
        self.bus.listen(thebroadcast)
410
        #never returns becuase listen is blocking
411
        return thebroadcast
412
413
    def trypublish(self, queue_name, msg: str):
414
        '''publish a message to the queue'''
415
        try:
416
            self.bus.publish(queue_name, msg)
417
            return True
418
        except pika.exceptions.ConnectionClosed as ex:
419
            logmessage = 'Error publishing to {0} {1}'.format(queue_name, self.exceptionmessage(ex))
420
            self.logerror(logmessage)
421
            return False
422
423
    def trybroadcast(self, exchange_name, msg):
424
        '''broadcast a message to all queue listeners'''
425
        try:
426
            self.bus.broadcast(exchange_name, msg)
427
            return True
428
        except pika.exceptions.ConnectionClosed as conxex:
429
            self.logerror('Error broadcasting to {0} {1}'.format(exchange_name, self.exceptionmessage(conxex)))
430
            return False
431
432
    def putminer(self, miner: Miner):
433
        '''put miner in cache'''
434
        if miner and miner.key() and not miner.is_unknown:
435
            valu = self.serialize(miner)
436
            self.__cache.tryputcache('miner.{0}'.format(miner.key()), valu)
437
438
    def getminer(self, miner: Miner) -> Miner:
439
        '''strategies for getting miner from cache
440
        originally was key=miner.name but that was not good
441
        changed to key='miner.'+minerid
442
        '''
443
        valu = self.cache.trygetvaluefromcache('miner.{0}'.format(miner.key()))
444
        if valu is None:
445
            return None
446
        minerfromstore = utils.deserialize(MinerSchema(), utils.safestring(valu))
447
        if not minerfromstore.key():
448
            #do not allow entry with no key
449
            return None
450
        minerfromstore.store = 'mem'
451
        return minerfromstore
452
453
    def getknownminer(self, miner: Miner) -> Miner:
454
        '''get a known miner'''
455
        return self.getknownminerbykey(miner.key())
456
457
    def getminerbyname(self, minername):
458
        filtered = [x for x in self.miners() if x.name == minername]
459
        if filtered: return filtered[0]
460
        return None
461
462
    def getknownminerbykey(self, minername):
463
        str_miner = self.__cache.getfromhashset(CacheKeys.knownminers, minername)
464
        if str_miner is None:
465
            return None
466
        return utils.deserialize(MinerSchema(), utils.safestring(str_miner))
467
468
    def getknownminerbyname(self, minername):
469
        '''this could be slow if there are lots of miners'''
470
        known = self.knownminers()
471
        for miner in known:
472
            if miner.name == minername:
473
                return miner
474
        return None
475
476
    def putminerandstats(self, miner: Miner, minerstats, minerpool):
477
        '''put miner and status in cache'''
478
        self.putminer(miner)
479
        schema = MinerStatsSchema()
480
        valstats = schema.dumps(minerstats).data
481
        self.__cache.tryputcache(miner.key() + '.stats', valstats)
482
        schema = MinerCurrentPoolSchema()
483
        valpool = schema.dumps(minerpool).data
484
        self.__cache.tryputcache(miner.key() + '.pool', valpool)
485
486
    def getstats(self, miner: Miner):
487
        '''get stats entity'''
488
        valu = self.cache.trygetvaluefromcache(miner.name + '.stats')
489
        if valu is None: return None
490
        entity = domain.minerstatistics.MinerStatistics(miner, **utils.deserialize(MinerStatsSchema(), valu))
491
        return entity
492
493
    def getminerstatscached(self):
494
        '''iterator for cached stats'''
495
        for miner in self.miners():
496
            yield (self.getminer(miner), self.getstats(miner), self.pools.getpool(miner))
497
498
    def messagedecodeminer(self, body) -> Miner:
499
        '''deserialize a miner message'''
500
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
501
        schema = MinerMessageSchema()
502
        minermessage_dict = schema.load(message_envelope.bodyjson()).data
503
        minermessage_entity = schema.make_minermessage(minermessage_dict)
504
        miner = minermessage_entity.miner
505
        return miner
506
507
    def messagedecodeminerstats(self, body):
508
        '''deserialize miner stats'''
509
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
510
        schema = MinerMessageSchema()
511
        minermessage_dict = schema.load(message_envelope.bodyjson()).data
512
        minermessage_entity = schema.make_minermessage(minermessage_dict)
513
        return minermessage_entity
514
515
    def messagedecodeminercommand(self, body):
516
        '''deserialize  miner command'''
517
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
518
        schema = MinerMessageSchema()
519
        minermessage_dict = schema.load(message_envelope.bodyjson()).data
520
        minermessage_entity = schema.make_minermessage(minermessage_dict)
521
        return minermessage_entity
522
523
    def messagedecodesensor(self, body):
524
        '''deserialize sensor value '''
525
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
526
        schema = SensorValueSchema()
527
        #minermessage_dict = schema.load(message_envelope.bodyjson()).data
528
        entity = schema.load(message_envelope.bodyjson()).data
529
        return message_envelope, entity
530
531
    def messagedecode_configuration(self, body):
532
        '''deserialize  configuration command'''
533
        message_envelope = super().deserializemessageenvelope(utils.safestring(body))
534
        schema = ConfigurationMessageSchema()
535
        configurationmessage_dict = schema.load(message_envelope.bodyjson()).data
536
        configurationmessage_entity = schema.make_configurationmessage(configurationmessage_dict)
537
        return configurationmessage_entity
538
539
    def createmessagestats(self, miner, minerstats, minerpool):
540
        #always save the miner so the next guy can get latest changes
541
        #only put in cache if it came from cache
542
        if miner.store == 'mem':
543
            self.putminer(miner)
544
        message = super().createmessageenvelope()
545
        message = message.make_minerstats(miner, minerstats, minerpool)
546
        return super().serializemessageenvelope(message)
547
548
    def createmessagecommand(self, miner, command):
549
        '''create message command'''
550
        if miner.store == 'mem':
551
            self.putminer(miner)
552
        message = super().createmessageenvelope()
553
        message = message.make_minercommand(miner, command)
554
        return super().serializemessageenvelope(message)
555
556
    def messageencode(self, miner: Miner):
557
        '''command is optional, however should convert this call into minercommand'''
558
        #always save the miner so the next guy can get latest changes
559
        if miner.store == 'mem':
560
            self.putminer(miner)
561
        message = super().createmessageenvelope()
562
        message = message.make_minerstats(miner, minerstats=None, minerpool=None)
563
        return self._schemamsg.dumps(message).data
564
565
    def stamp(self, message):
566
        return '{0}:{1}: {2}'.format(self.now(), self.applicationid, message)
567
568
    def alert(self, message):
569
        '''send alert message'''
570
        return self.sendqueueitem(QueueEntry(QueueName.Q_ALERT, self.stamp(message), QueueType.broadcast))
571
572
    def send(self, q_name, message):
573
        '''send message to queue'''
574
        success = self.trypublish(q_name, message)
575
        return success
576
577
    def enqueue(self, queuelist):
578
        '''send a list of queue messages'''
579
        if queuelist is None:
580
            return
581
        if not queuelist.hasentries():
582
            return
583
        #todo: group by queuename
584
        for entry in queuelist.entries:
585
            self.sendqueueitem(entry)
586
587
    def sendqueueitem(self, entry):
588
        '''send one queue item'''
589
        if entry.eventtype == 'broadcast':
590
            send_result = self.trybroadcast(entry.queuename, entry.message)
591
            return send_result
592
        return self.send(entry.queuename, entry.message)
593
594
    def readtemperature(self):
595
        temp, humid = self.sensors.readtemperature()
596
        self.sendsensor(temp)
597
        self.sendsensor(humid)
598
        return temp, humid
599
600
    def sendsensor(self, reading):
601
        if not reading:
602
            return
603
        message = super().createmessageenvelope()
604
        sensorjson = message.jsonserialize(SensorValueSchema(), reading)
605
        self.sendqueueitem(QueueEntry(QueueName.Q_SENSOR, super().serializemessageenvelope(message.make_any('sensorvalue', sensorjson)), QueueType.broadcast))
606
607
    def getsession(self):
608
        service = self.getservice(ServiceName.database)
609
        engine = create_engine(service.connection, echo=False)
610
        Session = sessionmaker(bind=engine)
611
        return Session()
612
613
    def log_mineractivity(self, minerlog):
614
        try:
615
            session = self.getsession()
616
            session.add(minerlog)
617
            session.commit()
618
            return True
619
        except BaseException as ex:
620
            self.logexception(ex)
621
        return False
622
623
    def save_miner(self, miner: Miner):
624
        found = self.getknownminer(miner)
625
        if found is None:
626
            self.addknownminer(miner)
627
            #miners = MinerRepository()
628
            #todo:add the miner to the json config
629
        else:
630
            found.updatefrom(miner)
631
            self.putminer(found)
632
633
def main():
634
    full_cycle = ApplicationService()
635
    full_cycle.loginfo('Full Cycle was run in a script')
636
    full_cycle.shutdown()
637
638
if __name__ == "__main__":
639
    main()
640