GitHub Access Token became invalid

It seems like the GitHub access token used for retrieving details about this repository from GitHub became invalid. This might prevent certain types of inspections from being run (in particular, everything related to pull requests).
Please ask an admin of your repository to re-new the access token on this website.
Passed
Push — master ( d45b0a...2e6b2b )
by dup
02:05
created

configuration.Conf.extract_site_list()   A

Complexity

Conditions 3

Size

Total Lines 12
Code Lines 7

Duplication

Lines 0
Ratio 0 %

Code Coverage

Tests 7
CRAP Score 3

Importance

Changes 0
Metric Value
eloc 7
dl 0
loc 12
ccs 7
cts 7
cp 1
rs 10
c 0
b 0
f 0
cc 3
nop 2
crap 3
1
#!/usr/bin/env python
2
# -*- coding: utf8 -*-
3
#
4
#  versions.py : checks releases and versions of programs through RSS
5
#                or Atom feeds and tells you
6
#
7
#  (C) Copyright 2016 - 2018 Olivier Delhomme
8
#  e-mail : [email protected]
9
#
10
#  This program is free software; you can redistribute it and/or modify
11
#  it under the terms of the GNU General Public License as published by
12
#  the Free Software Foundation; either version 3, or (at your option)
13
#  any later version.
14
#
15
#  This program is distributed in the hope that it will be useful,
16
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
17
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
18
#  GNU General Public License for more details.
19
#
20
#  You should have received a copy of the GNU General Public License
21
#  along with this program; if not, write to the Free Software Foundation,
22
#  Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
23
#
24
25 1
import codecs
26 1
import sys
27 1
import locale
28 1
import argparse
29 1
import os
30 1
import re
31 1
import errno
32 1
import time
33 1
import doctest
34 1
import feedparser
35 1
import yaml
36 1
import operator
37 1
import common
38 1
import caches
39
40 1
__author__ = "Olivier Delhomme <[email protected]>"
41 1
__date__ = "06.11.2018"
42 1
__version__ = "1.5.2"
43
44
45 1
def make_directories(path):
46
    """
47
    Makes all directories in path if possible. It is not an error if
48
    path already exists.
49
    """
50
51 1
    try:
52 1
        os.makedirs(path)
53
54 1
    except OSError as exc:
55
56 1
        if exc.errno != errno.EEXIST or os.path.isdir(path) is not True:
57
            raise
58
59
# End of make_directories() function
60
61 1
class Conf:
62
    """
63
    Class to store configuration of the program and check version.
64
    """
65
66 1
    config_dir = ''
67 1
    local_dir = ''
68 1
    config_filename = ''
69 1
    description = {}
70 1
    options = None
71
72 1
    def __init__(self):
73
        """
74
        Inits the class
75
        """
76 1
        self.config_dir = os.path.expanduser("~/.config/versions")
77 1
        self.local_dir = os.path.expanduser("~/.local/versions")
78 1
        self.config_filename = ''  # At this stage we do not know if a filename has been set on the command line
79 1
        self.description = {}
80 1
        self.options = None
81
82
        # Make sure that the directories exists
83 1
        make_directories(self.config_dir)
84 1
        make_directories(self.local_dir)
85
86 1
        self._get_command_line_arguments()
87
88
    # End of init() function
89
90
91 1
    def load_yaml_from_config_file(self, filename):
92
        """
93
        Loads definitions from the YAML config file filename
94
        >>> conf = Conf()
95
        >>> conf.load_yaml_from_config_file('./bad_formatted.yaml')
96
        Error in configuration file ./bad_formatted.yaml at position: 9:1
97
        """
98
99 1
        config_file = codecs.open(filename, 'r', encoding='utf-8')
100
101 1
        try:
102 1
            self.description = yaml.safe_load(config_file)
103 1
        except yaml.YAMLError as err:
104 1
            if hasattr(err, 'problem_mark'):
105 1
                mark = err.problem_mark
106 1
                print(u'Error in configuration file {} at position: {}:{}'.format(filename, mark.line+1, mark.column+1))
107
            else:
108
                print(u'Error in configuration file {}'.format(filename))
109
110 1
        config_file.close()
111
112
    # End of load_yaml_from_config_file() function
113
114
115 1
    def _get_command_line_arguments(self):
116
        """
117
        Defines and gets all the arguments for the command line using
118
        argparse module. This function is called in the __init__ function
119
        of this class.
120
        """
121 1
        str_version = 'versions.py - %s' % __version__
122
123 1
        parser = argparse.ArgumentParser(description='This program checks releases and versions of programs through RSS or Atom feeds')
124
125 1
        parser.add_argument('-v', '--version', action='version', version=str_version)
126 1
        parser.add_argument('-f', '--file', action='store', dest='filename', help='Configuration file with projects to check', default='')
127 1
        parser.add_argument('-l', '--list-cache', action='store_true', dest='list_cache', help='Lists all projects and their version in cache', default=False)
128 1
        parser.add_argument('-d', '--debug', action='store_true', dest='debug', help='Starts in debug mode and prints things that may help', default=False)
129
130 1
        self.options = parser.parse_args()
131
132 1
        if self.options.filename != '':
133 1
            self.config_filename = self.options.filename
134
        else:
135 1
            self.config_filename = os.path.join(self.config_dir, 'versions.yaml')
136
137
    # End of get_command_line_arguments() function
138
139
140 1
    def extract_site_definition(self, site_name):
141
        """
142
        extracts whole site definition
143
        """
144
145 1
        if site_name in self.description:
146 1
            return self.description[site_name]
147
        else:
148
            return dict()
149
150
    # End of extract_site_definition()
151
152
153 1
    def extract_regex_from_site(self, site_name):
154
        """
155
        Extracts a regex from a site as defined in the YAML file.
156
        Returns the regex if it exists or None otherwise.
157
        """
158
159 1
        return self.extract_variable_from_site(site_name, 'regex', None)
160
161
    # End of extract_regex_from_site() function
162
163
164 1
    def extract_multiproject_from_site(self, site_name):
165
        """
166
        Extracts from a site its separator list for its multiple
167
        projects in one title. It returns None if multiproject
168
        is not defined and the list of separators instead
169
        """
170
171 1
        return self.extract_variable_from_site(site_name, 'multiproject', None)
172
173
    # End of extract…multiproject_from_site() function
174
175
176 1
    def extract_variable_from_site(self, site_name, variable, default_return):
177
        """
178
        Extracts variable from site site_name if it exists and return
179
        default_return otherwise
180
        """
181
182 1
        site_definition = self.extract_site_definition(site_name)
183
184 1
        if variable in site_definition:
185 1
            value = site_definition[variable]
186 1
            if value is None:
187 1
                print(u'Warning: no variable "{}" for site "{}".'.format(variable, site_name))
188 1
                value = default_return
189
        else:
190 1
            value = default_return
191
192 1
        return value
193
194
    # End of extract_variable_from_site() function
195
196
197 1
    def extract_project_list_from_site(self, site_name):
198
        """
199
        Extracts a project list from a site as defined in the YAML file.
200
        """
201
202 1
        return self.extract_variable_from_site(site_name, 'projects', [])
203
204
    # End of extract_project_list_from_site() function
205
206
207 1
    def extract_project_url(self, site_name):
208
        """
209
        Extracts the url definition where to check project version.
210
        """
211
212 1
        return self.extract_variable_from_site(site_name, 'url', '')
213
214
    # End of extract_project_url() function
215
216
217 1
    def extract_project_entry(self, site_name):
218
        """
219
        Extracts the entry definition (if any) of a site.
220
        """
221
222 1
        return self.extract_variable_from_site(site_name, 'entry', '')
223
224
    # End of extract_project_entry() function.
225
226
227 1
    def is_site_of_type(self, site_name, site_type):
228
        """
229
        Returns True if site_name is of type 'site_type'
230
        """
231
232 1
        site_definition = self.extract_site_definition(site_name)
233 1
        if 'type' in site_definition:
234 1
            return (site_definition['type'] == site_type)
235
        else:
236
            return False
237
238
    # End of is_site_of_type() function
239
240
241 1
    def extract_site_list(self, site_type):
242
        """
243
        Extracts all sites from a specific type (byproject or list)
244
        """
245
246 1
        all_site_list = list(self.description.keys())
247 1
        site_list = []
248 1
        for site_name in all_site_list:
249 1
            if self.is_site_of_type(site_name, site_type):
250 1
                site_list.insert(0, site_name)
251
252 1
        return site_list
253
254
    # End of extract_site_list() function
255
256
257 1
    def make_site_cache_list_name(self):
258
        """
259
        Formats list of cache filenames for all sites.
260
        """
261
262 1
        all_site_list = list(self.description.keys())
263 1
        cache_list = []
264 1
        for site_name in all_site_list:
265 1
            site_cache = u'{}.cache'.format(site_name)
266 1
            cache_list.insert(0, site_cache)
267
268 1
        return cache_list
269
270
    # End of make_site_cache_list_name() function
271
272
273 1
    def print_cache_or_check_versions(self):
274
        """
275
        Decide to pretty print projects and their associated version that
276
        are already in the cache or to check versions of that projects upon
277
        selections made at the command line
278
        """
279
280 1
        common.print_debug(self.options.debug, u'Loading yaml config file')
281 1
        self.load_yaml_from_config_file(self.config_filename)
282
283 1
        if self.options.list_cache is True:
284
            # Pretty prints all caches.
285 1
            cache_list = self.make_site_cache_list_name()
286 1
            caches.print_versions_from_cache(self.local_dir, cache_list)
287
288
        else:
289
            # Checks version from online feeds
290 1
            self.check_versions()
291
292
    # End of print_list_or_check_versions() function.
293
294
295 1
    def check_versions(self):
296
        """
297
        Checks versions by parsing online feeds.
298
        """
299
300
        # Checks projects from by project sites such as github and sourceforge
301 1
        byproject_site_list = self.extract_site_list('byproject')
302
303 1
        for site_name in byproject_site_list:
304 1
            common.print_debug(self.options.debug, u'Checking {} projects'.format(site_name))
305 1
            (project_list, project_url, cache_filename, project_entry) = self.get_infos_for_site(site_name)
306 1
            feed_filename = u'{}.feed'.format(site_name)
307 1
            check_versions_feeds_by_projects(project_list, self.local_dir, self.options.debug, project_url, cache_filename, feed_filename, project_entry)
308
309
        # Checks projects from 'list' tupe sites such as freshcode.club
310 1
        list_site_list = self.extract_site_list('list')
311 1
        for site_name in list_site_list:
312 1
            common.print_debug(self.options.debug, u'Checking {} updates'.format(site_name))
313 1
            (project_list, project_url, cache_filename, project_entry) = self.get_infos_for_site(site_name)
314 1
            regex = self.extract_regex_from_site(site_name)
315 1
            multiproject = self.extract_multiproject_from_site(site_name)
316 1
            feed_filename = u'{}.feed'.format(site_name)
317 1
            check_versions_for_list_sites(project_list, project_url, cache_filename, feed_filename, self.local_dir, self.options.debug, regex, multiproject)
318
319
    # End of check_versions() function
320
321
322 1
    def get_infos_for_site(self, site_name):
323
        """
324
        Returns informations about a site as a tuple
325
        (list of projects, url to check, filename of the cache)
326
        """
327
328 1
        project_list = self.extract_project_list_from_site(site_name)
329 1
        project_url = self.extract_project_url(site_name)
330 1
        project_entry = self.extract_project_entry(site_name)
331 1
        cache_filename = u'{}.cache'.format(site_name)
332
333 1
        return (project_list, project_url, cache_filename, project_entry)
334
335
    # End of get_infos_for_site() function
336
# End of Conf class
337
338
339
340
341
342
343
344 1
def manage_http_status(feed, url):
345
    """
346
    Manages http status code present in feed and prints
347
    an error in case of a 3xx, 4xx or 5xx and stops
348
    doing anything for the feed by returning None.
349
    """
350
351 1
    err = feed.status / 100
352
353 1
    if err > 2:
354 1
        print(u'Error {} while fetching "{}".'.format(feed.status, url))
355 1
        feed = None
356
357 1
    return feed
358
359
# End of manage_http_status() function
360
361
362 1
def manage_non_http_errors(feed, url):
363
    """
364
    Tries to manage non http errors and gives
365
    a message to the user.
366
    """
367
368 1
    if feed.bozo:
369 1
        if feed.bozo_exception:
370 1
            exc = feed.bozo_exception
371 1
            if hasattr(exc, 'reason'):
372 1
                message = exc.reason
373
            else:
374
                message = 'unaddressed'
375
376 1
            print(u'Error {} while fetching "{}".'.format(message, url))
377
378
        else:
379
            print(u'Error while fetching url "{}".'.format(url))
380
381
# End of manage_non_http_errors() function
382
383
384 1
def get_feed_entries_from_url(url):
385
    """
386
    Gets feed entries from an url that should be an
387
    RSS or Atom feed.
388
    >>> get_feed_entries_from_url("http://delhomme.org/notfound.html")
389
    Error 404 while fetching "http://delhomme.org/notfound.html".
390
    >>> feed = get_feed_entries_from_url("http://blog.delhomme.org/index.php?feed/atom")
391
    >>> feed.status
392
    200
393
    """
394
395 1
    feed = feedparser.parse(url)
396
397 1
    if 'status' in feed:
398 1
        feed = manage_http_status(feed, url)
399
    else:
400
        # An error happened such that the feed does not contain an HTTP response
401 1
        manage_non_http_errors(feed, url)
402 1
        feed = None
403
404 1
    return feed
405
406
# End of get_feed_entries_from_url() function
407
408
409 1
def format_project_feed_filename(feed_filename, name):
410
    """
411
    Returns a valid filename formatted based on feed_filename (the site name)
412
    and name the name of the project
413
    """
414
415 1
    (root, ext) = os.path.splitext(feed_filename)
416 1
    norm_name = name.replace('/', '_')
417
418 1
    filename = "{}_{}{}".format(root, norm_name, ext)
419
420 1
    return filename
421
422
# End of format_project_feed_filename() function
423
424
425 1
def is_entry_last_checked(entry):
426
    """
427
    Returns true if entry is equal to last checked and
428
    false otherwise.
429
    >>> is_entry_last_checked('last checked')
430
    True
431
    >>> is_entry_last_checked('')
432
    False
433
    >>> is_entry_last_checked('latest')
434
    False
435
    """
436
437 1
    return entry == 'last checked'
438
439
# End of is_entry_last_checked() function
440
441
442 1
def get_values_from_project(project):
443
    """
444
    Gets the values of 'regex' and 'name' keys if found and
445
    returns a tuple (valued, name, regex, entry)
446
    """
447
448 1
    regex = ''
449 1
    entry = ''
450 1
    name = project
451 1
    valued = False
452
453 1
    if type(project) is dict:
454 1
        if 'name' in project:
455 1
            name = project['name']
456
457 1
        if 'regex' in project:
458 1
            regex = project['regex']
459 1
            valued = True
460
461 1
        if 'entry' in project:
462 1
            entry = project['entry']
463 1
            valued = True
464
465 1
    return (valued, name, regex, entry)
466
467
# End of get_values_from_project() function
468
469
470
471
472
473
474
475
476
477 1
def sort_feed_list(feed_list, feed):
478
    """
479
    Sorts the feed list with the right attribute which depends on the feed.
480
    sort is reversed because feed_list is build by inserting ahead when
481
    parsing the feed from the most recent to the oldest entry.
482
    Returns a sorted list (by date) the first entry is the newest one.
483
    """
484
485 1
    if feed.entries[0]:
486 1
        if 'published_parsed' in feed.entries[0]:
487
            feed_list = sorted(feed_list, key=operator.attrgetter('published_parsed'), reverse=True)
488 1
        elif 'updated_parsed' in feed.entries[0]:
489 1
            feed_list = sorted(feed_list, key=operator.attrgetter('updated_parsed'), reverse=True)
490
491 1
    return feed_list
492
493
# End of sort_feed_list() function
494
495
496 1
def get_releases_filtering_feed(debug, local_dir, filename, feed, entry):
497
    """
498
    Filters the feed and returns a list of releases with one
499
    or more elements
500
    """
501
502 1
    feed_list = []
503
504 1
    if is_entry_last_checked(entry):
505 1
        feed_info = caches.FeedCache(local_dir, filename)
506 1
        feed_info.read_cache_feed()
507 1
        feed_list = make_list_of_newer_feeds(feed, feed_info, debug)
508 1
        feed_list = sort_feed_list(feed_list, feed)
509
510
        # Updating feed_info with the latest parsed feed entry date
511 1
        if len(feed_list) >= 1:
512 1
            published_date = get_entry_published_date(feed_list[0])
513 1
            feed_info.update_cache_feed(published_date)
514
515 1
        feed_info.write_cache_feed()
516
517
    else:
518 1
        feed_list.insert(0, feed.entries[0])
519
520 1
    return feed_list
521
522
523 1
def get_latest_release_by_title(project, debug, feed_url, local_dir, feed_filename, project_entry):
524
    """
525
    Gets the latest release or the releases between the last checked time of
526
    a program on a site of type 'byproject'.
527
    project must be a string that represents the project (user/repository in
528
    github for instance).
529
    Returns a tuple which contains the name of the project, a list of versions
530
    and a boolean that indicates if we checked by last checked time (True) or
531
    by release (False).
532
    """
533
534 1
    feed_list = []
535
536 1
    (valued, name, regex, entry) = get_values_from_project(project)
537
    
538 1
    if is_entry_last_checked(project_entry):
539 1
        last_checked = True
540 1
        entry = project_entry
541
    else:
542 1
        last_checked = is_entry_last_checked(entry)
543 1
    filename = format_project_feed_filename(feed_filename, name)
544
545 1
    url = feed_url.format(name)
546 1
    feed = get_feed_entries_from_url(url)
547
548 1
    if feed is not None and len(feed.entries) > 0:
549 1
        feed_list = get_releases_filtering_feed(debug, local_dir, filename, feed, entry)
550
551 1
        if valued and regex != '':
552
            # Here we match the whole list against the regex and replace the
553
            # title's entry of the result of that match upon success.
554 1
            for entry in feed_list:
555 1
                res = re.match(regex, entry.title)
556
                # Here we should make a new list with the matched entries and leave tho other ones
557 1
                if res:
558 1
                    entry.title = res.group(1)
559 1
                common.print_debug(debug, u'\tname: {}\n\tversion: {}\n\tregex: {} : {}'.format(name, entry.title, regex, res))
560
561 1
        common.print_debug(debug, u'\tProject {}: {}'.format(name, entry.title))
562
563 1
    return (name, feed_list, last_checked)
564
565
# End of get_latest_release_by_title() function
566
567
568 1
def check_versions_feeds_by_projects(project_list, local_dir, debug, feed_url, cache_filename, feed_filename, project_entry):
569
    """
570
    Checks project's versions on feed_url if any are defined in the yaml
571
    file under the specified tag that got the project_list passed as an argument.
572
    """
573
574 1
    site_cache = caches.FileCache(local_dir, cache_filename)
575
576 1
    for project in project_list:
577 1
        (name, feed_list, last_checked) = get_latest_release_by_title(project, debug, feed_url, local_dir, feed_filename, project_entry)
578
579
580 1
        if len(feed_list) >= 1:
581
            # Updating the cache with the latest version (the first entry)
582 1
            version = feed_list[0].title
583
584 1
            if not last_checked:
585
                # printing only for latest release as last checked is
586
                # already filtered and to be printed entirely
587 1
                site_cache.print_if_newest_version(name, version, debug)
588
589 1
            site_cache.update_cache_dict(name, version, debug)
590
591 1
            if not last_checked:
592
                # we already printed this.
593 1
                del feed_list[0]
594
595 1
        for entry in feed_list:
596 1
            common.print_project_version(name, entry.title)
597
598 1
    site_cache.write_cache_file()
599
600
# End of check_versions_feeds_by_projects() function
601
602
#####################################################################$$
603
604 1
def cut_title_with_default_method(title):
605
    """
606
    Cuts title with a default method and a fallback
607
    >>> cut_title_with_default_method('versions 1.3.2')
608
    ('versions', '1.3.2')
609
    >>> cut_title_with_default_method('no_version_project')
610
    ('no_version_project', '')
611
    """
612
613 1
    try:
614 1
        (project, version) = title.strip().split(' ', 1)
615
616 1
    except ValueError:
617 1
        project = title.strip()
618 1
        version = ''
619
620 1
    return (project, version)
621
622
# End of cut_title_with_default_method() function
623
624
625 1
def cut_title_with_regex_method(title, regex):
626
    """
627
    Cuts title using a regex. If it does not success
628
    fallback to default.
629
    >>> cut_title_with_regex_method('versions 1.3.2', '([\w]+)\s([\d\.]+)')
630
    ('versions', '1.3.2', False)
631
    >>> cut_title_with_regex_method('versions 1.3.2', '([\w]+)notgood\s([\d\.]+)')
632
    ('', '', True)
633
    """
634
635 1
    default = False
636 1
    project = ''
637 1
    version = ''
638
639 1
    res = re.match(regex, title)
640 1
    if res:
641 1
        project = res.group(1)
642 1
        version = res.group(2)
643
    else:
644 1
        default = True
645
646 1
    return (project, version, default)
647
648
# End of cut_title_with_regex_method() function
649
650
651 1
def cut_title_in_project_version(title, regex):
652
    """
653
    Cuts the title into a tuple (project, version) where possible with a regex
654
    or if there is no regex or the regex did not match cuts the title with a
655
    default method
656
    """
657 1
    default = False
658
659 1
    if regex is not None:
660 1
        (project, version, default) = cut_title_with_regex_method(title, regex)
661
    else:
662 1
        default = True
663
664 1
    if default:
665 1
        (project, version) = cut_title_with_default_method(title)
666
667 1
    return (project, version)
668
669
# End of cut_title_in_project_version() function
670
671
672 1
def get_entry_published_date(entry):
673
    """
674
    Returns the published date of an entry.
675
    Selects the right field to do so
676
    """
677
678 1
    if 'published_parsed' in entry:
679 1
        published_date = entry.published_parsed
680 1
    elif 'updated_parsed' in entry:
681 1
        published_date = entry.updated_parsed
682
    elif 'pubDate' in entry:    # rss-0.91.dtd (netscape)
683
        published_date = entry.pubDate
684
685 1
    return published_date
686
687
# End of get_entry_published_date() function
688
689
690 1
def make_list_of_newer_feeds(feed, feed_info, debug):
691
    """
692
    Compares feed entries and keep those that are newer than the latest
693
    check we've done and inserting the newer ones in reverse order in
694
    a list to be returned
695
    """
696
697 1
    feed_list = []
698
699
    # inserting into a list in reverse order to keep the most recent
700
    # version in case of multiple release of the same project in the
701
    # feeds
702 1
    for a_feed in feed.entries:
703
704 1
        if a_feed:
705 1
            published_date = get_entry_published_date(a_feed)
706
707 1
            common.print_debug(debug, u'\tFeed entry ({0}): Feed title: "{1:16}"'.format(time.strftime('%x %X', published_date), a_feed.title))
708
709 1
            if feed_info.is_newer(published_date):
710 1
                feed_list.insert(0, a_feed)
711
        else:
712 1
            print(u'Warning: empty feed in {}'.format(feed))
713
714 1
    return feed_list
715
716
# End of make_list_of_newer_feeds() function
717
718
719 1
def lower_list_of_strings(project_list):
720
    """
721
    Lowers every string in the list to ease sorting and comparisons
722
    """
723
724 1
    project_list_low = [project.lower() for project in project_list]
725
726 1
    return project_list_low
727
728
# End of lower_list_of_strings() function
729
730
731 1
def split_multiproject_title_into_list(title, multiproject):
732
    """
733
    Splits title into a list of projects according to multiproject being
734
    a list of separators
735
    """
736
737 1
    if multiproject is not None:
738 1
        titles = re.split(multiproject, title)
739
    else:
740 1
        titles = [title]
741
742 1
    return titles
743
744
# End of split_multiproject_title_into_list() function
745
746
747
748
749 1
def check_and_update_feed(feed_list, project_list, cache, debug, regex, multiproject):
750
    """
751
    Checks every feed entry in the list against project list cache and
752
    then updates the dictionnary then writes the cache file to the disk.
753
     - feed_list    is a list of feed (from feedparser module)
754
     - project_list is the list of project as read from the yaml
755
                    configuration file
756
     - cache is an initialized instance of FileCache
757
    """
758
759
    # Lowers the list before searching in it
760 1
    project_list_low = lower_list_of_strings(project_list)
761
762
    # Checking every feed entry that are newer than the last check
763
    # and updates the dictionnary accordingly
764 1
    for entry in feed_list:
765
766 1
        titles = split_multiproject_title_into_list(entry.title, multiproject)
767
768 1
        for title in titles:
769 1
            (project, version) = cut_title_in_project_version(title, regex)
770 1
            common.print_debug(debug, u'\tChecking {0:16}: {1}'.format(project, version))
771 1
            if project.lower() in project_list_low:
772 1
                cache.print_if_newest_version(project, version, debug)
773 1
                cache.update_cache_dict(project, version, debug)
774
775 1
    cache.write_cache_file()
776
777
# End of check_and_update_feed() function
778
779
780
781
782 1
def check_versions_for_list_sites(feed_project_list, url, cache_filename, feed_filename, local_dir, debug, regex, multiproject):
783
    """
784
    Checks projects of 'list' type sites such as freshcode's web site's RSS
785
    """
786
787 1
    freshcode_cache = caches.FileCache(local_dir, cache_filename)
788
789 1
    feed_info = caches.FeedCache(local_dir, feed_filename)
790 1
    feed_info.read_cache_feed()
791
792 1
    feed = get_feed_entries_from_url(url)
793
794 1
    if feed is not None:
795 1
        common.print_debug(debug, u'\tFound {} entries'.format(len(feed.entries)))
796 1
        feed_list = make_list_of_newer_feeds(feed, feed_info, debug)
797 1
        common.print_debug(debug, u'\tFound {} new entries (relative to {})'.format(len(feed_list), feed_info.date_minutes))
798
799 1
        check_and_update_feed(feed_list, feed_project_list, freshcode_cache, debug, regex, multiproject)
800
801
        # Updating feed_info with the latest parsed feed entry date
802 1
        feed_info.update_cache_feed(feed.entries[0].published_parsed)
803
804 1
    feed_info.write_cache_feed()
805
806
# End of check_versions_for_list_sites() function
807
808
809