Arthur de Jong

Open Source / Free Software developer

summaryrefslogtreecommitdiffstats
path: root/webcheck/plugins/sitemap.py
blob: 5c13d5946413a6103b9092d6d585c0bd8d75bd8a (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79

# sitemap.py - plugin to generate a sitemap
#
# Copyright (C) 1998, 1999 Albert Hopkins (marduk)
# Copyright (C) 2002 Mike W. Meyer
# Copyright (C) 2005, 2006, 2007, 2011 Arthur de Jong
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301 USA
#
# The files produced as output from the software do not automatically fall
# under the copyright of the software, unless explicitly stated otherwise.

"""Present a site map of the checked site."""

__title__ = 'site map'
__author__ = 'Arthur de Jong'
__outputfile__ = 'index.html'

from webcheck import config
from webcheck.db import Session, Link
from webcheck.output import render


def get_children(link, explored):
    """Determine the page children of this link, combining the children of
    embedded items and following redirects."""
    # get all internal children
    qry = link.children.filter(Link.is_internal == True)
    if link.depth:
        qry = qry.filter((Link.depth > link.depth) | (Link.depth == None))
    # follow redirects and return all direct children
    for child in (x.follow_link() for x in qry):
        if child and child.is_page and child.is_internal and child.id not in explored:
            explored.add(child.id)
            yield child
    # add embedded element's pagechildren (think frames)
    for embed in link.embedded.filter(Link.is_internal == True).filter(Link.is_page == True):
        if embed.id not in explored and \
           (embed.depth == None or embed.depth > link.depth):
            for child in get_children(embed, explored):
                yield child


def explore(links, explored=None, depth=0):
    """Recursively do a breadth first traversal of the graph of links on the
    site."""
    if explored is None:
        explored = set(x.id for x in links)
    for link in links:
        children = []
        if depth <= config.REPORT_SITEMAP_LEVEL:
            children = list(get_children(link, explored))
            children.sort(lambda a, b: cmp(a.url, b.url))
        if children:
            yield link, explore(children, explored, depth + 1)
        else:
            yield link, None


def generate(crawler):
    """Output the sitemap."""
    session = Session()
    links = [session.query(Link).filter_by(url=url).first()
             for url in crawler.base_urls]
    links = explore(links)
    render(__outputfile__, crawler=crawler, title=__title__,
           links=links)