RIFF¤ WEBPVP8 ˜ ðÑ *ôô>‘HŸK¥¤"§£±¨àð �PNG ��� IHDR���0���0����`n���� cHRM��z&��������������u0���`��:���p��Q<���bKGD�������������tIME� 6���� AIDATX��]pU����{��{�G�n$$@ -�-jВAک��P��1O���j��:cU|hg�}�C�ʋU�:�A A�����|$$$������}�^}8i *�د�?w�9gݵ��:{��F"��&��4� �@��@q_����Ow��9<<|�ԩ�;w���"�#GV�^}Ճ7�|s˖-{��|��G�-[��R�Q�aőTR)� �2!�e�63��X�U������{��Z�m۶�^}�5����}����v�R��J�J,�$� B�$Ф")�i��D�9+䜆B.�qf�үÇoذaժU�i>��so����a^���۷����3�<�ꫯnݺ��"���\���gn0�h��B�i� hR��,W*�e�65��LS�ر�0ēO>���755���ι@O?��ҥK������:::����1��X�9S�%�'�I���$��Sg�ڥ3'zF/� ��Z�8ES6WZ#�.�\��P�R�2����-[�,_�|�`�j�Xl6M���b�Z�* �jҟ0��҉B""� y22r�ܹ�CG�;���{7nn��ma���������2S���� <��x�⡡�|p.Pw��x��y�C�=��O� ��CB}�ӵ����?�wq`8�e����{+Չɱ�\{&�LL������vww��]��?q��Y�������P(���vuu9���S?w���ݻ�v<���mш ��D�DR&�j�X�����y�>���dyg�7��b��s�p��E���BDR� t����������=w�\�[;:[&._�Dlڸv�]��u�驚�/Y�a�����d�%��*)1� �V\G���[F]����X���i����J:z�N�9�?xϭ+X�V|���U�n� �g���hҜ�b�aҢ^��j�f�ZV�K��w.�ؾp�}{� �/e��{O�o�����R�8���i� !jB`�3n;Mw.=7�S�'��F�yW��x�}LXNfz�����b�p�b�bq~�����s�4�H�2��q^�r��n���\aFf4�����n�c��Q�n�X�,����ӑҤd5DDDd9'd��,�l��.���)?��������#}C�k����ܖq����m�a��y��8D���,K)�I�$I�8NE�aH)�|�VCDƘ��DF0D@M�"3�+�����Kcú��+n}��-�k:W�K������3ƄRʌ��m۞��q�9WJ���d�0�,�8��ٶ�⦈B�h���䩡�eDCj&$ �1�Y��\�>{~`�M����ͷt�#?VZ'@D²l"�,+�ɤc0�r��R*��X��n�_kMD�1"""ι�:%X��� $�b:i�b4�X<�/"r�����[Vo��T��0V�i���sΓ$IG���4M�c�R�eq��QS�+5{�{�,��a"��D�2�CҐ(I`CS�s�ʥ�R&�p�����۴(�N���e�sq�Ғ|%�����k�=�0S3@�`�i9�K��Պ�0T�Y��ؼD*��DE��2�gO�bŷrY����z�T��R�҉҆SX�Tϑ����/�Zi�eL��X��jdx䏿�eَ���7��П"?�����0y����<��R�N8�\X9ӥx ��^�L�l�ކ��u����/���Q�B�^p�f��\3��]�INƑ�L�"--�r�!8?���Rp~����v��|�M��$����_M�����GgF�/U�^�^p�Dk�ٲ�vM� ����L$�eGʓ01*���Two��f��#���ޱ��7��!5rdy"D��V���\ט��[y�f���@���Um����2w\�fg_z�y���v|`$_ow402�|���c��Zt��J��Yw���b8+L����R�t߉O�xS���'�L��`/lY�� ���Z;f�nX���f��$�<���Ϩ�J��z"�y��%�qzz��(B��Y�@D�)�uS���<�\.[�����*T(�8v�̗����rrr2I�� N5w����^k=�}�:�S�yӌ��ً+�����NY碧f����0)��h. �R����4�Ա�>�X,:����c�0�0�Rn6���mii�MW*�j��i�I�0�,ˊ���È�8���j�$I�hkk�`||�B!b��l6�8Ncc��U��<���1f�&"FQdYV�B�b���j3�>�!��j>���g���g��RD����8���r�08�u�7��]3������~�,b�P���%tEXtdate:create�2025-02-07T10:02:54+00:00� t����%tEXtdate:modify�2025-02-07T10:02:54+00:00�P�6���(tEXtdate:timestamp�2025-02-07T10:02:54+00:00�E������IEND�B`�
| Server IP : 128.227.220.250 / Your IP : 216.73.216.35 Web Server : Apache/2.4.64 (Unix) OpenSSL/1.0.2k-fips PHP/7.4.33 System : Linux dumont.ece.ufl.edu 3.10.0-1160.95.1.el7.x86_64 #1 SMP Mon Jul 24 13:59:37 UTC 2023 x86_64 User : daemon ( 2) PHP Version : 7.4.33 Disable Function : NONE MySQL : OFF | cURL : ON | WGET : ON | Perl : ON | Python : ON | Sudo : ON | Pkexec : ON Directory : /usr/lib64/python2.7/ |
Upload File : |
""" robotparser.py
Copyright (C) 2000 Bastian Kleineidam
You can choose between two licenses when using this package:
1) GNU GPLv2
2) PSF license for Python 2.2
The robots.txt Exclusion Protocol is implemented as specified in
http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
"""
import urlparse
import urllib
__all__ = ["RobotFileParser"]
class RobotFileParser:
""" This class provides a set of methods to read, parse and answer
questions about a single robots.txt file.
"""
def __init__(self, url=''):
self.entries = []
self.default_entry = None
self.disallow_all = False
self.allow_all = False
self.set_url(url)
self.last_checked = 0
def mtime(self):
"""Returns the time the robots.txt file was last fetched.
This is useful for long-running web spiders that need to
check for new robots.txt files periodically.
"""
return self.last_checked
def modified(self):
"""Sets the time the robots.txt file was last fetched to the
current time.
"""
import time
self.last_checked = time.time()
def set_url(self, url):
"""Sets the URL referring to a robots.txt file."""
self.url = url
self.host, self.path = urlparse.urlparse(url)[1:3]
def read(self):
"""Reads the robots.txt URL and feeds it to the parser."""
opener = URLopener()
f = opener.open(self.url)
lines = [line.strip() for line in f]
f.close()
self.errcode = opener.errcode
if self.errcode in (401, 403):
self.disallow_all = True
elif self.errcode >= 400:
self.allow_all = True
elif self.errcode == 200 and lines:
self.parse(lines)
def _add_entry(self, entry):
if "*" in entry.useragents:
# the default entry is considered last
if self.default_entry is None:
# the first default entry wins
self.default_entry = entry
else:
self.entries.append(entry)
def parse(self, lines):
"""parse the input lines from a robots.txt file.
We allow that a user-agent: line is not preceded by
one or more blank lines."""
# states:
# 0: start state
# 1: saw user-agent line
# 2: saw an allow or disallow line
state = 0
linenumber = 0
entry = Entry()
for line in lines:
linenumber += 1
if not line:
if state == 1:
entry = Entry()
state = 0
elif state == 2:
self._add_entry(entry)
entry = Entry()
state = 0
# remove optional comment and strip line
i = line.find('#')
if i >= 0:
line = line[:i]
line = line.strip()
if not line:
continue
line = line.split(':', 1)
if len(line) == 2:
line[0] = line[0].strip().lower()
line[1] = urllib.unquote(line[1].strip())
if line[0] == "user-agent":
if state == 2:
self._add_entry(entry)
entry = Entry()
entry.useragents.append(line[1])
state = 1
elif line[0] == "disallow":
if state != 0:
entry.rulelines.append(RuleLine(line[1], False))
state = 2
elif line[0] == "allow":
if state != 0:
entry.rulelines.append(RuleLine(line[1], True))
state = 2
if state == 2:
self._add_entry(entry)
def can_fetch(self, useragent, url):
"""using the parsed robots.txt decide if useragent can fetch url"""
if self.disallow_all:
return False
if self.allow_all:
return True
# search for given user agent matches
# the first match counts
parsed_url = urlparse.urlparse(urllib.unquote(url))
url = urlparse.urlunparse(('', '', parsed_url.path,
parsed_url.params, parsed_url.query, parsed_url.fragment))
url = urllib.quote(url)
if not url:
url = "/"
for entry in self.entries:
if entry.applies_to(useragent):
return entry.allowance(url)
# try the default entry last
if self.default_entry:
return self.default_entry.allowance(url)
# agent not found ==> access granted
return True
def __str__(self):
return ''.join([str(entry) + "\n" for entry in self.entries])
class RuleLine:
"""A rule line is a single "Allow:" (allowance==True) or "Disallow:"
(allowance==False) followed by a path."""
def __init__(self, path, allowance):
if path == '' and not allowance:
# an empty value means allow all
allowance = True
self.path = urllib.quote(path)
self.allowance = allowance
def applies_to(self, filename):
return self.path == "*" or filename.startswith(self.path)
def __str__(self):
return (self.allowance and "Allow" or "Disallow") + ": " + self.path
class Entry:
"""An entry has one or more user-agents and zero or more rulelines"""
def __init__(self):
self.useragents = []
self.rulelines = []
def __str__(self):
ret = []
for agent in self.useragents:
ret.extend(["User-agent: ", agent, "\n"])
for line in self.rulelines:
ret.extend([str(line), "\n"])
return ''.join(ret)
def applies_to(self, useragent):
"""check if this entry applies to the specified agent"""
# split the name token and make it lower case
useragent = useragent.split("/")[0].lower()
for agent in self.useragents:
if agent == '*':
# we have the catch-all agent
return True
agent = agent.lower()
if agent in useragent:
return True
return False
def allowance(self, filename):
"""Preconditions:
- our agent applies to this entry
- filename is URL decoded"""
for line in self.rulelines:
if line.applies_to(filename):
return line.allowance
return True
class URLopener(urllib.FancyURLopener):
def __init__(self, *args):
urllib.FancyURLopener.__init__(self, *args)
self.errcode = 200
def prompt_user_passwd(self, host, realm):
## If robots.txt file is accessible only with a password,
## we act as if the file wasn't there.
return None, None
def http_error_default(self, url, fp, errcode, errmsg, headers):
self.errcode = errcode
return urllib.FancyURLopener.http_error_default(self, url, fp, errcode,
errmsg, headers)