]>
git.proxmox.com Git - mirror_edk2.git/blob - AppPkg/Applications/Python/Python-2.7.2/Lib/robotparser.py
3 Copyright (C) 2000 Bastian Kleineidam
5 You can choose between two licenses when using this package:
7 2) PSF license for Python 2.2
9 The robots.txt Exclusion Protocol is implemented as specified in
10 http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
15 __all__
= ["RobotFileParser"]
18 class RobotFileParser
:
19 """ This class provides a set of methods to read, parse and answer
20 questions about a single robots.txt file.
24 def __init__(self
, url
=''):
26 self
.default_entry
= None
27 self
.disallow_all
= False
28 self
.allow_all
= False
33 """Returns the time the robots.txt file was last fetched.
35 This is useful for long-running web spiders that need to
36 check for new robots.txt files periodically.
39 return self
.last_checked
42 """Sets the time the robots.txt file was last fetched to the
47 self
.last_checked
= time
.time()
49 def set_url(self
, url
):
50 """Sets the URL referring to a robots.txt file."""
52 self
.host
, self
.path
= urlparse
.urlparse(url
)[1:3]
55 """Reads the robots.txt URL and feeds it to the parser."""
57 f
= opener
.open(self
.url
)
58 lines
= [line
.strip() for line
in f
]
60 self
.errcode
= opener
.errcode
61 if self
.errcode
in (401, 403):
62 self
.disallow_all
= True
63 elif self
.errcode
>= 400:
65 elif self
.errcode
== 200 and lines
:
68 def _add_entry(self
, entry
):
69 if "*" in entry
.useragents
:
70 # the default entry is considered last
71 if self
.default_entry
is None:
72 # the first default entry wins
73 self
.default_entry
= entry
75 self
.entries
.append(entry
)
77 def parse(self
, lines
):
78 """parse the input lines from a robots.txt file.
79 We allow that a user-agent: line is not preceded by
80 one or more blank lines."""
83 # 1: saw user-agent line
84 # 2: saw an allow or disallow line
96 self
._add
_entry
(entry
)
99 # remove optional comment and strip line
106 line
= line
.split(':', 1)
108 line
[0] = line
[0].strip().lower()
109 line
[1] = urllib
.unquote(line
[1].strip())
110 if line
[0] == "user-agent":
112 self
._add
_entry
(entry
)
114 entry
.useragents
.append(line
[1])
116 elif line
[0] == "disallow":
118 entry
.rulelines
.append(RuleLine(line
[1], False))
120 elif line
[0] == "allow":
122 entry
.rulelines
.append(RuleLine(line
[1], True))
125 self
._add
_entry
(entry
)
128 def can_fetch(self
, useragent
, url
):
129 """using the parsed robots.txt decide if useragent can fetch url"""
130 if self
.disallow_all
:
134 # search for given user agent matches
135 # the first match counts
136 parsed_url
= urlparse
.urlparse(urllib
.unquote(url
))
137 url
= urlparse
.urlunparse(('', '', parsed_url
.path
,
138 parsed_url
.params
, parsed_url
.query
, parsed_url
.fragment
))
139 url
= urllib
.quote(url
)
142 for entry
in self
.entries
:
143 if entry
.applies_to(useragent
):
144 return entry
.allowance(url
)
145 # try the default entry last
146 if self
.default_entry
:
147 return self
.default_entry
.allowance(url
)
148 # agent not found ==> access granted
153 return ''.join([str(entry
) + "\n" for entry
in self
.entries
])
157 """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
158 (allowance==False) followed by a path."""
159 def __init__(self
, path
, allowance
):
160 if path
== '' and not allowance
:
161 # an empty value means allow all
163 self
.path
= urllib
.quote(path
)
164 self
.allowance
= allowance
166 def applies_to(self
, filename
):
167 return self
.path
== "*" or filename
.startswith(self
.path
)
170 return (self
.allowance
and "Allow" or "Disallow") + ": " + self
.path
174 """An entry has one or more user-agents and zero or more rulelines"""
181 for agent
in self
.useragents
:
182 ret
.extend(["User-agent: ", agent
, "\n"])
183 for line
in self
.rulelines
:
184 ret
.extend([str(line
), "\n"])
187 def applies_to(self
, useragent
):
188 """check if this entry applies to the specified agent"""
189 # split the name token and make it lower case
190 useragent
= useragent
.split("/")[0].lower()
191 for agent
in self
.useragents
:
193 # we have the catch-all agent
195 agent
= agent
.lower()
196 if agent
in useragent
:
200 def allowance(self
, filename
):
202 - our agent applies to this entry
203 - filename is URL decoded"""
204 for line
in self
.rulelines
:
205 if line
.applies_to(filename
):
206 return line
.allowance
209 class URLopener(urllib
.FancyURLopener
):
210 def __init__(self
, *args
):
211 urllib
.FancyURLopener
.__init
__(self
, *args
)
214 def prompt_user_passwd(self
, host
, realm
):
215 ## If robots.txt file is accessible only with a password,
216 ## we act as if the file wasn't there.
219 def http_error_default(self
, url
, fp
, errcode
, errmsg
, headers
):
220 self
.errcode
= errcode
221 return urllib
.FancyURLopener
.http_error_default(self
, url
, fp
, errcode
,