source file: /System/Library/Frameworks/Python.framework/Versions/2.3/lib/python2.3/urllib.py
file stats: 983 lines, 135 executed: 13.7% covered
1. """Open an arbitrary URL. 2. 3. See the following document for more info on URLs: 4. "Names and Addresses, URIs, URLs, URNs, URCs", at 5. http://www.w3.org/pub/WWW/Addressing/Overview.html 6. 7. See also the HTTP spec (from which the error codes are derived): 8. "HTTP - Hypertext Transfer Protocol", at 9. http://www.w3.org/pub/WWW/Protocols/ 10. 11. Related standards and specs: 12. - RFC1808: the "relative URL" spec. (authoritative status) 13. - RFC1738 - the "URL standard". (authoritative status) 14. - RFC1630 - the "URI spec". (informational status) 15. 16. The object returned by URLopener().open(file) will differ per 17. protocol. All you know is that is has methods read(), readline(), 18. readlines(), fileno(), close() and info(). The read*(), fileno() 19. and close() methods work like those of open files. 20. The info() method returns a mimetools.Message object which can be 21. used to query various info about the object, if available. 22. (mimetools.Message objects are queried with the getheader() method.) 23. """ 24. 25. import string 26. import socket 27. import os 28. import time 29. import sys 30. 31. __all__ = ["urlopen", "URLopener", "FancyURLopener", "urlretrieve", 32. "urlcleanup", "quote", "quote_plus", "unquote", "unquote_plus", 33. "urlencode", "url2pathname", "pathname2url", "splittag", 34. "localhost", "thishost", "ftperrors", "basejoin", "unwrap", 35. "splittype", "splithost", "splituser", "splitpasswd", "splitport", 36. "splitnport", "splitquery", "splitattr", "splitvalue", 37. "splitgophertype", "getproxies"] 38. 39. __version__ = '1.15' # XXX This version is not always updated :-( 40. 41. MAXFTPCACHE = 10 # Trim the ftp cache beyond this size 42. 43. # Helper for non-unix systems 44. if os.name == 'mac': 45. from macurl2path import url2pathname, pathname2url 46. elif os.name == 'nt': 47. from nturl2path import url2pathname, pathname2url 48. elif os.name == 'riscos': 49. from rourl2path import url2pathname, pathname2url 50. else: 51. def url2pathname(pathname): 52. return unquote(pathname) 53. def pathname2url(pathname): 54. return quote(pathname) 55. 56. # This really consists of two pieces: 57. # (1) a class which handles opening of all sorts of URLs 58. # (plus assorted utilities etc.) 59. # (2) a set of functions for parsing URLs 60. # XXX Should these be separated out into different modules? 61. 62. 63. # Shortcut for basic usage 64. _urlopener = None 65. def urlopen(url, data=None, proxies=None): 66. """urlopen(url [, data]) -> open file-like object""" 67. global _urlopener 68. if proxies is not None: 69. opener = FancyURLopener(proxies=proxies) 70. elif not _urlopener: 71. opener = FancyURLopener() 72. _urlopener = opener 73. else: 74. opener = _urlopener 75. if data is None: 76. return opener.open(url) 77. else: 78. return opener.open(url, data) 79. def urlretrieve(url, filename=None, reporthook=None, data=None): 80. global _urlopener 81. if not _urlopener: 82. _urlopener = FancyURLopener() 83. return _urlopener.retrieve(url, filename, reporthook, data) 84. def urlcleanup(): 85. if _urlopener: 86. _urlopener.cleanup() 87. 88. 89. ftpcache = {} 90. class URLopener: 91. """Class to open URLs. 92. This is a class rather than just a subroutine because we may need 93. more than one set of global protocol-specific options. 94. Note -- this is a base class for those who don't want the 95. automatic handling of errors type 302 (relocated) and 401 96. (authorization needed).""" 97. 98. __tempfiles = None 99. 100. version = "Python-urllib/%s" % __version__ 101. 102. # Constructor 103. def __init__(self, proxies=None, **x509): 104. if proxies is None: 105. proxies = getproxies() 106. assert hasattr(proxies, 'has_key'), "proxies must be a mapping" 107. self.proxies = proxies 108. self.key_file = x509.get('key_file') 109. self.cert_file = x509.get('cert_file') 110. self.addheaders = [('User-agent', self.version)] 111. self.__tempfiles = [] 112. self.__unlink = os.unlink # See cleanup() 113. self.tempcache = None 114. # Undocumented feature: if you assign {} to tempcache, 115. # it is used to cache files retrieved with 116. # self.retrieve(). This is not enabled by default 117. # since it does not work for changing documents (and I 118. # haven't got the logic to check expiration headers 119. # yet). 120. self.ftpcache = ftpcache 121. # Undocumented feature: you can use a different 122. # ftp cache by assigning to the .ftpcache member; 123. # in case you want logically independent URL openers 124. # XXX This is not threadsafe. Bah. 125. 126. def __del__(self): 127. self.close() 128. 129. def close(self): 130. self.cleanup() 131. 132. def cleanup(self): 133. # This code sometimes runs when the rest of this module 134. # has already been deleted, so it can't use any globals 135. # or import anything. 136. if self.__tempfiles: 137. for file in self.__tempfiles: 138. try: 139. self.__unlink(file) 140. except OSError: 141. pass 142. del self.__tempfiles[:] 143. if self.tempcache: 144. self.tempcache.clear() 145. 146. def addheader(self, *args): 147. """Add a header to be used by the HTTP interface only 148. e.g. u.addheader('Accept', 'sound/basic')""" 149. self.addheaders.append(args) 150. 151. # External interface 152. def open(self, fullurl, data=None): 153. """Use URLopener().open(file) instead of open(file, 'r').""" 154. fullurl = unwrap(toBytes(fullurl)) 155. if self.tempcache and fullurl in self.tempcache: 156. filename, headers = self.tempcache[fullurl] 157. fp = open(filename, 'rb') 158. return addinfourl(fp, headers, fullurl) 159. urltype, url = splittype(fullurl) 160. if not urltype: 161. urltype = 'file' 162. if urltype in self.proxies: 163. proxy = self.proxies[urltype] 164. urltype, proxyhost = splittype(proxy) 165. host, selector = splithost(proxyhost) 166. url = (host, fullurl) # Signal special case to open_*() 167. else: 168. proxy = None 169. name = 'open_' + urltype 170. self.type = urltype 171. if '-' in name: 172. # replace - with _ 173. name = '_'.join(name.split('-')) 174. if not hasattr(self, name): 175. if proxy: 176. return self.open_unknown_proxy(proxy, fullurl, data) 177. else: 178. return self.open_unknown(fullurl, data) 179. try: 180. if data is None: 181. return getattr(self, name)(url) 182. else: 183. return getattr(self, name)(url, data) 184. except socket.error, msg: 185. raise IOError, ('socket error', msg), sys.exc_info()[2] 186. 187. def open_unknown(self, fullurl, data=None): 188. """Overridable interface to open unknown URL type.""" 189. type, url = splittype(fullurl) 190. raise IOError, ('url error', 'unknown url type', type) 191. 192. def open_unknown_proxy(self, proxy, fullurl, data=None): 193. """Overridable interface to open unknown URL type.""" 194. type, url = splittype(fullurl) 195. raise IOError, ('url error', 'invalid proxy for %s' % type, proxy) 196. 197. # External interface 198. def retrieve(self, url, filename=None, reporthook=None, data=None): 199. """retrieve(url) returns (filename, headers) for a local object 200. or (tempfilename, headers) for a remote object.""" 201. url = unwrap(toBytes(url)) 202. if self.tempcache and url in self.tempcache: 203. return self.tempcache[url] 204. type, url1 = splittype(url) 205. if filename is None and (not type or type == 'file'): 206. try: 207. fp = self.open_local_file(url1) 208. hdrs = fp.info() 209. del fp 210. return url2pathname(splithost(url1)[1]), hdrs 211. except IOError, msg: 212. pass 213. fp = self.open(url, data) 214. headers = fp.info() 215. if filename: 216. tfp = open(filename, 'wb') 217. else: 218. import tempfile 219. garbage, path = splittype(url) 220. garbage, path = splithost(path or "") 221. path, garbage = splitquery(path or "") 222. path, garbage = splitattr(path or "") 223. suffix = os.path.splitext(path)[1] 224. (fd, filename) = tempfile.mkstemp(suffix) 225. self.__tempfiles.append(filename) 226. tfp = os.fdopen(fd, 'wb') 227. result = filename, headers 228. if self.tempcache is not None: 229. self.tempcache[url] = result 230. bs = 1024*8 231. size = -1 232. blocknum = 1 233. if reporthook: 234. if "content-length" in headers: 235. size = int(headers["Content-Length"]) 236. reporthook(0, bs, size) 237. block = fp.read(bs) 238. if reporthook: 239. reporthook(1, bs, size) 240. while block: 241. tfp.write(block) 242. block = fp.read(bs) 243. blocknum = blocknum + 1 244. if reporthook: 245. reporthook(blocknum, bs, size) 246. fp.close() 247. tfp.close() 248. del fp 249. del tfp 250. return result 251. 252. # Each method named open_<type> knows how to open that type of URL 253. 254. def open_http(self, url, data=None): 255. """Use HTTP protocol.""" 256. import httplib 257. user_passwd = None 258. if isinstance(url, str): 259. host, selector = splithost(url) 260. if host: 261. user_passwd, host = splituser(host) 262. host = unquote(host) 263. realhost = host 264. else: 265. host, selector = url 266. urltype, rest = splittype(selector) 267. url = rest 268. user_passwd = None 269. if urltype.lower() != 'http': 270. realhost = None 271. else: 272. realhost, rest = splithost(rest) 273. if realhost: 274. user_passwd, realhost = splituser(realhost) 275. if user_passwd: 276. selector = "%s://%s%s" % (urltype, realhost, rest) 277. if proxy_bypass(realhost): 278. host = realhost 279. 280. #print "proxy via http:", host, selector 281. if not host: raise IOError, ('http error', 'no host given') 282. if user_passwd: 283. import base64 284. auth = base64.encodestring(user_passwd).strip() 285. else: 286. auth = None 287. h = httplib.HTTP(host) 288. if data is not None: 289. h.putrequest('POST', selector) 290. h.putheader('Content-type', 'application/x-www-form-urlencoded') 291. h.putheader('Content-length', '%d' % len(data)) 292. else: 293. h.putrequest('GET', selector) 294. if auth: h.putheader('Authorization', 'Basic %s' % auth) 295. if realhost: h.putheader('Host', realhost) 296. for args in self.addheaders: h.putheader(*args) 297. h.endheaders() 298. if data is not None: 299. h.send(data) 300. errcode, errmsg, headers = h.getreply() 301. fp = h.getfile() 302. if errcode == 200: 303. return addinfourl(fp, headers, "http:" + url) 304. else: 305. if data is None: 306. return self.http_error(url, fp, errcode, errmsg, headers) 307. else: 308. return self.http_error(url, fp, errcode, errmsg, headers, data) 309. 310. def http_error(self, url, fp, errcode, errmsg, headers, data=None): 311. """Handle http errors. 312. Derived class can override this, or provide specific handlers 313. named http_error_DDD where DDD is the 3-digit error code.""" 314. # First check if there's a specific handler for this error 315. name = 'http_error_%d' % errcode 316. if hasattr(self, name): 317. method = getattr(self, name) 318. if data is None: 319. result = method(url, fp, errcode, errmsg, headers) 320. else: 321. result = method(url, fp, errcode, errmsg, headers, data) 322. if result: return result 323. return self.http_error_default(url, fp, errcode, errmsg, headers) 324. 325. def http_error_default(self, url, fp, errcode, errmsg, headers): 326. """Default error handler: close the connection and raise IOError.""" 327. void = fp.read() 328. fp.close() 329. raise IOError, ('http error', errcode, errmsg, headers) 330. 331. if hasattr(socket, "ssl"): 332. def open_https(self, url, data=None): 333. """Use HTTPS protocol.""" 334. import httplib 335. user_passwd = None 336. if isinstance(url, str): 337. host, selector = splithost(url) 338. if host: 339. user_passwd, host = splituser(host) 340. host = unquote(host) 341. realhost = host 342. else: 343. host, selector = url 344. urltype, rest = splittype(selector) 345. url = rest 346. user_passwd = None 347. if urltype.lower() != 'https': 348. realhost = None 349. else: 350. realhost, rest = splithost(rest) 351. if realhost: 352. user_passwd, realhost = splituser(realhost) 353. if user_passwd: 354. selector = "%s://%s%s" % (urltype, realhost, rest) 355. #print "proxy via https:", host, selector 356. if not host: raise IOError, ('https error', 'no host given') 357. if user_passwd: 358. import base64 359. auth = base64.encodestring(user_passwd).strip() 360. else: 361. auth = None 362. h = httplib.HTTPS(host, 0, 363. key_file=self.key_file, 364. cert_file=self.cert_file) 365. if data is not None: 366. h.putrequest('POST', selector) 367. h.putheader('Content-type', 368. 'application/x-www-form-urlencoded') 369. h.putheader('Content-length', '%d' % len(data)) 370. else: 371. h.putrequest('GET', selector) 372. if auth: h.putheader('Authorization: Basic %s' % auth) 373. if realhost: h.putheader('Host', realhost) 374. for args in self.addheaders: h.putheader(*args) 375. h.endheaders() 376. if data is not None: 377. h.send(data) 378. errcode, errmsg, headers = h.getreply() 379. fp = h.getfile() 380. if errcode == 200: 381. return addinfourl(fp, headers, "https:" + url) 382. else: 383. if data is None: 384. return self.http_error(url, fp, errcode, errmsg, headers) 385. else: 386. return self.http_error(url, fp, errcode, errmsg, headers, 387. data) 388. 389. def open_gopher(self, url): 390. """Use Gopher protocol.""" 391. import gopherlib 392. host, selector = splithost(url) 393. if not host: raise IOError, ('gopher error', 'no host given') 394. host = unquote(host) 395. type, selector = splitgophertype(selector) 396. selector, query = splitquery(selector) 397. selector = unquote(selector) 398. if query: 399. query = unquote(query) 400. fp = gopherlib.send_query(selector, query, host) 401. else: 402. fp = gopherlib.send_selector(selector, host) 403. return addinfourl(fp, noheaders(), "gopher:" + url) 404. 405. def open_file(self, url): 406. """Use local file or FTP depending on form of URL.""" 407. if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/': 408. return self.open_ftp(url) 409. else: 410. return self.open_local_file(url) 411. 412. def open_local_file(self, url): 413. """Use local file.""" 414. import mimetypes, mimetools, rfc822, StringIO 415. host, file = splithost(url) 416. localname = url2pathname(file) 417. try: 418. stats = os.stat(localname) 419. except OSError, e: 420. raise IOError(e.errno, e.strerror, e.filename) 421. size = stats.st_size 422. modified = rfc822.formatdate(stats.st_mtime) 423. mtype = mimetypes.guess_type(url)[0] 424. headers = mimetools.Message(StringIO.StringIO( 425. 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % 426. (mtype or 'text/plain', size, modified))) 427. if not host: 428. urlfile = file 429. if file[:1] == '/': 430. urlfile = 'file://' + file 431. return addinfourl(open(localname, 'rb'), 432. headers, urlfile) 433. host, port = splitport(host) 434. if not port \ 435. and socket.gethostbyname(host) in (localhost(), thishost()): 436. urlfile = file 437. if file[:1] == '/': 438. urlfile = 'file://' + file 439. return addinfourl(open(localname, 'rb'), 440. headers, urlfile) 441. raise IOError, ('local file error', 'not on local host') 442. 443. def open_ftp(self, url): 444. """Use FTP protocol.""" 445. import mimetypes, mimetools, StringIO 446. host, path = splithost(url) 447. if not host: raise IOError, ('ftp error', 'no host given') 448. host, port = splitport(host) 449. user, host = splituser(host) 450. if user: user, passwd = splitpasswd(user) 451. else: passwd = None 452. host = unquote(host) 453. user = unquote(user or '') 454. passwd = unquote(passwd or '') 455. host = socket.gethostbyname(host) 456. if not port: 457. import ftplib 458. port = ftplib.FTP_PORT 459. else: 460. port = int(port) 461. path, attrs = splitattr(path) 462. path = unquote(path) 463. dirs = path.split('/') 464. dirs, file = dirs[:-1], dirs[-1] 465. if dirs and not dirs[0]: dirs = dirs[1:] 466. if dirs and not dirs[0]: dirs[0] = '/' 467. key = user, host, port, '/'.join(dirs) 468. # XXX thread unsafe! 469. if len(self.ftpcache) > MAXFTPCACHE: 470. # Prune the cache, rather arbitrarily 471. for k in self.ftpcache.keys(): 472. if k != key: 473. v = self.ftpcache[k] 474. del self.ftpcache[k] 475. v.close() 476. try: 477. if not key in self.ftpcache: 478. self.ftpcache[key] = \ 479. ftpwrapper(user, passwd, host, port, dirs) 480. if not file: type = 'D' 481. else: type = 'I' 482. for attr in attrs: 483. attr, value = splitvalue(attr) 484. if attr.lower() == 'type' and \ 485. value in ('a', 'A', 'i', 'I', 'd', 'D'): 486. type = value.upper() 487. (fp, retrlen) = self.ftpcache[key].retrfile(file, type) 488. mtype = mimetypes.guess_type("ftp:" + url)[0] 489. headers = "" 490. if mtype: 491. headers += "Content-Type: %s\n" % mtype 492. if retrlen is not None and retrlen >= 0: 493. headers += "Content-Length: %d\n" % retrlen 494. headers = mimetools.Message(StringIO.StringIO(headers)) 495. return addinfourl(fp, headers, "ftp:" + url) 496. except ftperrors(), msg: 497. raise IOError, ('ftp error', msg), sys.exc_info()[2] 498. 499. def open_data(self, url, data=None): 500. """Use "data" URL.""" 501. # ignore POSTed data 502. # 503. # syntax of data URLs: 504. # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data 505. # mediatype := [ type "/" subtype ] *( ";" parameter ) 506. # data := *urlchar 507. # parameter := attribute "=" value 508. import StringIO, mimetools 509. try: 510. [type, data] = url.split(',', 1) 511. except ValueError: 512. raise IOError, ('data error', 'bad data URL') 513. if not type: 514. type = 'text/plain;charset=US-ASCII' 515. semi = type.rfind(';') 516. if semi >= 0 and '=' not in type[semi:]: 517. encoding = type[semi+1:] 518. type = type[:semi] 519. else: 520. encoding = '' 521. msg = [] 522. msg.append('Date: %s'%time.strftime('%a, %d %b %Y %T GMT', 523. time.gmtime(time.time()))) 524. msg.append('Content-type: %s' % type) 525. if encoding == 'base64': 526. import base64 527. data = base64.decodestring(data) 528. else: 529. data = unquote(data) 530. msg.append('Content-length: %d' % len(data)) 531. msg.append('') 532. msg.append(data) 533. msg = '\n'.join(msg) 534. f = StringIO.StringIO(msg) 535. headers = mimetools.Message(f, 0) 536. f.fileno = None # needed for addinfourl 537. return addinfourl(f, headers, url) 538. 539. 540. class FancyURLopener(URLopener): 541. """Derived class with handlers for errors we can handle (perhaps).""" 542. 543. def __init__(self, *args, **kwargs): 544. URLopener.__init__(self, *args, **kwargs) 545. self.auth_cache = {} 546. self.tries = 0 547. self.maxtries = 10 548. 549. def http_error_default(self, url, fp, errcode, errmsg, headers): 550. """Default error handling -- don't raise an exception.""" 551. return addinfourl(fp, headers, "http:" + url) 552. 553. def http_error_302(self, url, fp, errcode, errmsg, headers, data=None): 554. """Error 302 -- relocated (temporarily).""" 555. self.tries += 1 556. if self.maxtries and self.tries >= self.maxtries: 557. if hasattr(self, "http_error_500"): 558. meth = self.http_error_500 559. else: 560. meth = self.http_error_default 561. self.tries = 0 562. return meth(url, fp, 500, 563. "Internal Server Error: Redirect Recursion", headers) 564. result = self.redirect_internal(url, fp, errcode, errmsg, headers, 565. data) 566. self.tries = 0 567. return result 568. 569. def redirect_internal(self, url, fp, errcode, errmsg, headers, data): 570. if 'location' in headers: 571. newurl = headers['location'] 572. elif 'uri' in headers: 573. newurl = headers['uri'] 574. else: 575. return 576. void = fp.read() 577. fp.close() 578. # In case the server sent a relative URL, join with original: 579. newurl = basejoin(self.type + ":" + url, newurl) 580. return self.open(newurl) 581. 582. def http_error_301(self, url, fp, errcode, errmsg, headers, data=None): 583. """Error 301 -- also relocated (permanently).""" 584. return self.http_error_302(url, fp, errcode, errmsg, headers, data) 585. 586. def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): 587. """Error 303 -- also relocated (essentially identical to 302).""" 588. return self.http_error_302(url, fp, errcode, errmsg, headers, data) 589. 590. def http_error_307(self, url, fp, errcode, errmsg, headers, data=None): 591. """Error 307 -- relocated, but turn POST into error.""" 592. if data is None: 593. return self.http_error_302(url, fp, errcode, errmsg, headers, data) 594. else: 595. return self.http_error_default(url, fp, errcode, errmsg, headers) 596. 597. def http_error_401(self, url, fp, errcode, errmsg, headers, data=None): 598. """Error 401 -- authentication required. 599. See this URL for a description of the basic authentication scheme: 600. http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt""" 601. if not 'www-authenticate' in headers: 602. URLopener.http_error_default(self, url, fp, 603. errcode, errmsg, headers) 604. stuff = headers['www-authenticate'] 605. import re 606. match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff) 607. if not match: 608. URLopener.http_error_default(self, url, fp, 609. errcode, errmsg, headers) 610. scheme, realm = match.groups() 611. if scheme.lower() != 'basic': 612. URLopener.http_error_default(self, url, fp, 613. errcode, errmsg, headers) 614. name = 'retry_' + self.type + '_basic_auth' 615. if data is None: 616. return getattr(self,name)(url, realm) 617. else: 618. return getattr(self,name)(url, realm, data) 619. 620. def retry_http_basic_auth(self, url, realm, data=None): 621. host, selector = splithost(url) 622. i = host.find('@') + 1 623. host = host[i:] 624. user, passwd = self.get_user_passwd(host, realm, i) 625. if not (user or passwd): return None 626. host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host 627. newurl = 'http://' + host + selector 628. if data is None: 629. return self.open(newurl) 630. else: 631. return self.open(newurl, data) 632. 633. def retry_https_basic_auth(self, url, realm, data=None): 634. host, selector = splithost(url) 635. i = host.find('@') + 1 636. host = host[i:] 637. user, passwd = self.get_user_passwd(host, realm, i) 638. if not (user or passwd): return None 639. host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host 640. newurl = '//' + host + selector 641. return self.open_https(newurl, data) 642. 643. def get_user_passwd(self, host, realm, clear_cache = 0): 644. key = realm + '@' + host.lower() 645. if key in self.auth_cache: 646. if clear_cache: 647. del self.auth_cache[key] 648. else: 649. return self.auth_cache[key] 650. user, passwd = self.prompt_user_passwd(host, realm) 651. if user or passwd: self.auth_cache[key] = (user, passwd) 652. return user, passwd 653. 654. def prompt_user_passwd(self, host, realm): 655. """Override this in a GUI environment!""" 656. import getpass 657. try: 658. user = raw_input("Enter username for %s at %s: " % (realm, 659. host)) 660. passwd = getpass.getpass("Enter password for %s in %s at %s: " % 661. (user, realm, host)) 662. return user, passwd 663. except KeyboardInterrupt: 664. print 665. return None, None 666. 667. 668. # Utility functions 669. 670. _localhost = None 671. def localhost(): 672. """Return the IP address of the magic hostname 'localhost'.""" 673. global _localhost 674. if _localhost is None: 675. _localhost = socket.gethostbyname('localhost') 676. return _localhost 677. 678. _thishost = None 679. def thishost(): 680. """Return the IP address of the current host.""" 681. global _thishost 682. if _thishost is None: 683. _thishost = socket.gethostbyname(socket.gethostname()) 684. return _thishost 685. 686. _ftperrors = None 687. def ftperrors(): 688. """Return the set of errors raised by the FTP class.""" 689. global _ftperrors 690. if _ftperrors is None: 691. import ftplib 692. _ftperrors = ftplib.all_errors 693. return _ftperrors 694. 695. _noheaders = None 696. def noheaders(): 697. """Return an empty mimetools.Message object.""" 698. global _noheaders 699. if _noheaders is None: 700. import mimetools 701. import StringIO 702. _noheaders = mimetools.Message(StringIO.StringIO(), 0) 703. _noheaders.fp.close() # Recycle file descriptor 704. return _noheaders 705. 706. 707. # Utility classes 708. 709. class ftpwrapper: 710. """Class used by open_ftp() for cache of open FTP connections.""" 711. 712. def __init__(self, user, passwd, host, port, dirs): 713. self.user = user 714. self.passwd = passwd 715. self.host = host 716. self.port = port 717. self.dirs = dirs 718. self.init() 719. 720. def init(self): 721. import ftplib 722. self.busy = 0 723. self.ftp = ftplib.FTP() 724. self.ftp.connect(self.host, self.port) 725. self.ftp.login(self.user, self.passwd) 726. for dir in self.dirs: 727. self.ftp.cwd(dir) 728. 729. def retrfile(self, file, type): 730. import ftplib 731. self.endtransfer() 732. if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1 733. else: cmd = 'TYPE ' + type; isdir = 0 734. try: 735. self.ftp.voidcmd(cmd) 736. except ftplib.all_errors: 737. self.init() 738. self.ftp.voidcmd(cmd) 739. conn = None 740. if file and not isdir: 741. # Use nlst to see if the file exists at all 742. try: 743. self.ftp.nlst(file) 744. except ftplib.error_perm, reason: 745. raise IOError, ('ftp error', reason), sys.exc_info()[2] 746. # Restore the transfer mode! 747. self.ftp.voidcmd(cmd) 748. # Try to retrieve as a file 749. try: 750. cmd = 'RETR ' + file 751. conn = self.ftp.ntransfercmd(cmd) 752. except ftplib.error_perm, reason: 753. if str(reason)[:3] != '550': 754. raise IOError, ('ftp error', reason), sys.exc_info()[2] 755. if not conn: 756. # Set transfer mode to ASCII! 757. self.ftp.voidcmd('TYPE A') 758. # Try a directory listing 759. if file: cmd = 'LIST ' + file 760. else: cmd = 'LIST' 761. conn = self.ftp.ntransfercmd(cmd) 762. self.busy = 1 763. # Pass back both a suitably decorated object and a retrieval length 764. return (addclosehook(conn[0].makefile('rb'), 765. self.endtransfer), conn[1]) 766. def endtransfer(self): 767. if not self.busy: 768. return 769. self.busy = 0 770. try: 771. self.ftp.voidresp() 772. except ftperrors(): 773. pass 774. 775. def close(self): 776. self.endtransfer() 777. try: 778. self.ftp.close() 779. except ftperrors(): 780. pass 781. 782. class addbase: 783. """Base class for addinfo and addclosehook.""" 784. 785. def __init__(self, fp): 786. self.fp = fp 787. self.read = self.fp.read 788. self.readline = self.fp.readline 789. if hasattr(self.fp, "readlines"): self.readlines = self.fp.readlines 790. if hasattr(self.fp, "fileno"): self.fileno = self.fp.fileno 791. if hasattr(self.fp, "__iter__"): 792. self.__iter__ = self.fp.__iter__ 793. if hasattr(self.fp, "next"): 794. self.next = self.fp.next 795. 796. def __repr__(self): 797. return '<%s at %s whose fp = %s>' % (self.__class__.__name__, 798. `id(self)`, `self.fp`) 799. 800. def close(self): 801. self.read = None 802. self.readline = None 803. self.readlines = None 804. self.fileno = None 805. if self.fp: self.fp.close() 806. self.fp = None 807. 808. class addclosehook(addbase): 809. """Class to add a close hook to an open file.""" 810. 811. def __init__(self, fp, closehook, *hookargs): 812. addbase.__init__(self, fp) 813. self.closehook = closehook 814. self.hookargs = hookargs 815. 816. def close(self): 817. addbase.close(self) 818. if self.closehook: 819. self.closehook(*self.hookargs) 820. self.closehook = None 821. self.hookargs = None 822. 823. class addinfo(addbase): 824. """class to add an info() method to an open file.""" 825. 826. def __init__(self, fp, headers): 827. addbase.__init__(self, fp) 828. self.headers = headers 829. 830. def info(self): 831. return self.headers 832. 833. class addinfourl(addbase): 834. """class to add info() and geturl() methods to an open file.""" 835. 836. def __init__(self, fp, headers, url): 837. addbase.__init__(self, fp) 838. self.headers = headers 839. self.url = url 840. 841. def info(self): 842. return self.headers 843. 844. def geturl(self): 845. return self.url 846. 847. 848. def basejoin(base, url): 849. """Utility to combine a URL with a base URL to form a new URL.""" 850. type, path = splittype(url) 851. if type: 852. # if url is complete (i.e., it contains a type), return it 853. return url 854. host, path = splithost(path) 855. type, basepath = splittype(base) # inherit type from base 856. if host: 857. # if url contains host, just inherit type 858. if type: return type + '://' + host + path 859. else: 860. # no type inherited, so url must have started with // 861. # just return it 862. return url 863. host, basepath = splithost(basepath) # inherit host 864. basepath, basetag = splittag(basepath) # remove extraneous cruft 865. basepath, basequery = splitquery(basepath) # idem 866. if path[:1] != '/': 867. # non-absolute path name 868. if path[:1] in ('#', '?'): 869. # path is just a tag or query, attach to basepath 870. i = len(basepath) 871. else: 872. # else replace last component 873. i = basepath.rfind('/') 874. if i < 0: 875. # basepath not absolute 876. if host: 877. # host present, make absolute 878. basepath = '/' 879. else: 880. # else keep non-absolute 881. basepath = '' 882. else: 883. # remove last file component 884. basepath = basepath[:i+1] 885. # Interpret ../ (important because of symlinks) 886. while basepath and path[:3] == '../': 887. path = path[3:] 888. i = basepath[:-1].rfind('/') 889. if i > 0: 890. basepath = basepath[:i+1] 891. elif i == 0: 892. basepath = '/' 893. break 894. else: 895. basepath = '' 896. 897. path = basepath + path 898. if host and path and path[0] != '/': 899. path = '/' + path 900. if type and host: return type + '://' + host + path 901. elif type: return type + ':' + path 902. elif host: return '//' + host + path # don't know what this means 903. else: return path 904. 905. 906. # Utilities to parse URLs (most of these return None for missing parts): 907. # unwrap('<URL:type://host/path>') --> 'type://host/path' 908. # splittype('type:opaquestring') --> 'type', 'opaquestring' 909. # splithost('//host[:port]/path') --> 'host[:port]', '/path' 910. # splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]' 911. # splitpasswd('user:passwd') -> 'user', 'passwd' 912. # splitport('host:port') --> 'host', 'port' 913. # splitquery('/path?query') --> '/path', 'query' 914. # splittag('/path#tag') --> '/path', 'tag' 915. # splitattr('/path;attr1=value1;attr2=value2;...') -> 916. # '/path', ['attr1=value1', 'attr2=value2', ...] 917. # splitvalue('attr=value') --> 'attr', 'value' 918. # splitgophertype('/Xselector') --> 'X', 'selector' 919. # unquote('abc%20def') -> 'abc def' 920. # quote('abc def') -> 'abc%20def') 921. 922. try: 923. unicode 924. except NameError: 925. def _is_unicode(x): 926. return 0 927. else: 928. def _is_unicode(x): 929. return isinstance(x, unicode) 930. 931. def toBytes(url): 932. """toBytes(u"URL") --> 'URL'.""" 933. # Most URL schemes require ASCII. If that changes, the conversion 934. # can be relaxed 935. if _is_unicode(url): 936. try: 937. url = url.encode("ASCII") 938. except UnicodeError: 939. raise UnicodeError("URL " + repr(url) + 940. " contains non-ASCII characters") 941. return url 942. 943. def unwrap(url): 944. """unwrap('<URL:type://host/path>') --> 'type://host/path'.""" 945. url = url.strip() 946. if url[:1] == '<' and url[-1:] == '>': 947. url = url[1:-1].strip() 948. if url[:4] == 'URL:': url = url[4:].strip() 949. return url 950. 951. _typeprog = None 952. def splittype(url): 953. """splittype('type:opaquestring') --> 'type', 'opaquestring'.""" 954. global _typeprog 955. if _typeprog is None: 956. import re 957. _typeprog = re.compile('^([^/:]+):') 958. 959. match = _typeprog.match(url) 960. if match: 961. scheme = match.group(1) 962. return scheme.lower(), url[len(scheme) + 1:] 963. return None, url 964. 965. _hostprog = None 966. def splithost(url): 967. """splithost('//host[:port]/path') --> 'host[:port]', '/path'.""" 968. global _hostprog 969. if _hostprog is None: 970. import re 971. _hostprog = re.compile('^//([^/]*)(.*)$') 972. 973. match = _hostprog.match(url) 974. if match: return match.group(1, 2) 975. return None, url 976. 977. _userprog = None 978. def splituser(host): 979. """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'.""" 980. global _userprog 981. if _userprog is None: 982. import re 983. _userprog = re.compile('^(.*)@(.*)$') 984. 985. match = _userprog.match(host) 986. if match: return map(unquote, match.group(1, 2)) 987. return None, host 988. 989. _passwdprog = None 990. def splitpasswd(user): 991. """splitpasswd('user:passwd') -> 'user', 'passwd'.""" 992. global _passwdprog 993. if _passwdprog is None: 994. import re 995. _passwdprog = re.compile('^([^:]*):(.*)$') 996. 997. match = _passwdprog.match(user) 998. if match: return match.group(1, 2) 999. return user, None 1000. 1001. # splittag('/path#tag') --> '/path', 'tag' 1002. _portprog = None 1003. def splitport(host): 1004. """splitport('host:port') --> 'host', 'port'.""" 1005. global _portprog 1006. if _portprog is None: 1007. import re 1008. _portprog = re.compile('^(.*):([0-9]+)$') 1009. 1010. match = _portprog.match(host) 1011. if match: return match.group(1, 2) 1012. return host, None 1013. 1014. _nportprog = None 1015. def splitnport(host, defport=-1): 1016. """Split host and port, returning numeric port. 1017. Return given default port if no ':' found; defaults to -1. 1018. Return numerical port if a valid number are found after ':'. 1019. Return None if ':' but not a valid number.""" 1020. global _nportprog 1021. if _nportprog is None: 1022. import re 1023. _nportprog = re.compile('^(.*):(.*)$') 1024. 1025. match = _nportprog.match(host) 1026. if match: 1027. host, port = match.group(1, 2) 1028. try: 1029. if not port: raise ValueError, "no digits" 1030. nport = int(port) 1031. except ValueError: 1032. nport = None 1033. return host, nport 1034. return host, defport 1035. 1036. _queryprog = None 1037. def splitquery(url): 1038. """splitquery('/path?query') --> '/path', 'query'.""" 1039. global _queryprog 1040. if _queryprog is None: 1041. import re 1042. _queryprog = re.compile('^(.*)\?([^?]*)$') 1043. 1044. match = _queryprog.match(url) 1045. if match: return match.group(1, 2) 1046. return url, None 1047. 1048. _tagprog = None 1049. def splittag(url): 1050. """splittag('/path#tag') --> '/path', 'tag'.""" 1051. global _tagprog 1052. if _tagprog is None: 1053. import re 1054. _tagprog = re.compile('^(.*)#([^#]*)$') 1055. 1056. match = _tagprog.match(url) 1057. if match: return match.group(1, 2) 1058. return url, None 1059. 1060. def splitattr(url): 1061. """splitattr('/path;attr1=value1;attr2=value2;...') -> 1062. '/path', ['attr1=value1', 'attr2=value2', ...].""" 1063. words = url.split(';') 1064. return words[0], words[1:] 1065. 1066. _valueprog = None 1067. def splitvalue(attr): 1068. """splitvalue('attr=value') --> 'attr', 'value'.""" 1069. global _valueprog 1070. if _valueprog is None: 1071. import re 1072. _valueprog = re.compile('^([^=]*)=(.*)$') 1073. 1074. match = _valueprog.match(attr) 1075. if match: return match.group(1, 2) 1076. return attr, None 1077. 1078. def splitgophertype(selector): 1079. """splitgophertype('/Xselector') --> 'X', 'selector'.""" 1080. if selector[:1] == '/' and selector[1:2]: 1081. return selector[1], selector[2:] 1082. return None, selector 1083. 1084. def unquote(s): 1085. """unquote('abc%20def') -> 'abc def'.""" 1086. mychr = chr 1087. myatoi = int 1088. list = s.split('%') 1089. res = [list[0]] 1090. myappend = res.append 1091. del list[0] 1092. for item in list: 1093. if item[1:2]: 1094. try: 1095. myappend(mychr(myatoi(item[:2], 16)) 1096. + item[2:]) 1097. except ValueError: 1098. myappend('%' + item) 1099. else: 1100. myappend('%' + item) 1101. return "".join(res) 1102. 1103. def unquote_plus(s): 1104. """unquote('%7e/abc+def') -> '~/abc def'""" 1105. if '+' in s: 1106. # replace '+' with ' ' 1107. s = ' '.join(s.split('+')) 1108. return unquote(s) 1109. 1110. always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ' 1111. 'abcdefghijklmnopqrstuvwxyz' 1112. '0123456789' '_.-') 1113. 1114. _fast_safe_test = always_safe + '/' 1115. _fast_safe = None 1116. 1117. def _fast_quote(s): 1118. global _fast_safe 1119. if _fast_safe is None: 1120. _fast_safe = {} 1121. for c in _fast_safe_test: 1122. _fast_safe[c] = c 1123. res = list(s) 1124. for i in range(len(res)): 1125. c = res[i] 1126. if not c in _fast_safe: 1127. res[i] = '%%%02X' % ord(c) 1128. return ''.join(res) 1129. 1130. def quote(s, safe = '/'): 1131. """quote('abc def') -> 'abc%20def' 1132. 1133. Each part of a URL, e.g. the path info, the query, etc., has a 1134. different set of reserved characters that must be quoted. 1135. 1136. RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists 1137. the following reserved characters. 1138. 1139. reserved = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" | 1140. "$" | "," 1141. 1142. Each of these characters is reserved in some component of a URL, 1143. but not necessarily in all of them. 1144. 1145. By default, the quote function is intended for quoting the path 1146. section of a URL. Thus, it will not encode '/'. This character 1147. is reserved, but in typical usage the quote function is being 1148. called on a path where the existing slash characters are used as 1149. reserved characters. 1150. """ 1151. safe = always_safe + safe 1152. if _fast_safe_test == safe: 1153. return _fast_quote(s) 1154. res = list(s) 1155. for i in range(len(res)): 1156. c = res[i] 1157. if c not in safe: 1158. res[i] = '%%%02X' % ord(c) 1159. return ''.join(res) 1160. 1161. def quote_plus(s, safe = ''): 1162. """Quote the query fragment of a URL; replacing ' ' with '+'""" 1163. if ' ' in s: 1164. l = s.split(' ') 1165. for i in range(len(l)): 1166. l[i] = quote(l[i], safe) 1167. return '+'.join(l) 1168. else: 1169. return quote(s, safe) 1170. 1171. def urlencode(query,doseq=0): 1172. """Encode a sequence of two-element tuples or dictionary into a URL query string. 1173. 1174. If any values in the query arg are sequences and doseq is true, each 1175. sequence element is converted to a separate parameter. 1176. 1177. If the query arg is a sequence of two-element tuples, the order of the 1178. parameters in the output will match the order of parameters in the 1179. input. 1180. """ 1181. 1182. if hasattr(query,"items"): 1183. # mapping objects 1184. query = query.items() 1185. else: 1186. # it's a bother at times that strings and string-like objects are 1187. # sequences... 1188. try: 1189. # non-sequence items should not work with len() 1190. # non-empty strings will fail this 1191. if len(query) and not isinstance(query[0], tuple): 1192. raise TypeError 1193. # zero-length sequences of all types will get here and succeed, 1194. # but that's a minor nit - since the original implementation 1195. # allowed empty dicts that type of behavior probably should be 1196. # preserved for consistency 1197. except TypeError: 1198. ty,va,tb = sys.exc_info() 1199. raise TypeError, "not a valid non-string sequence or mapping object", tb 1200. 1201. l = [] 1202. if not doseq: 1203. # preserve old behavior 1204. for k, v in query: 1205. k = quote_plus(str(k)) 1206. v = quote_plus(str(v)) 1207. l.append(k + '=' + v) 1208. else: 1209. for k, v in query: 1210. k = quote_plus(str(k)) 1211. if isinstance(v, str): 1212. v = quote_plus(v) 1213. l.append(k + '=' + v) 1214. elif _is_unicode(v): 1215. # is there a reasonable way to convert to ASCII? 1216. # encode generates a string, but "replace" or "ignore" 1217. # lose information and "strict" can raise UnicodeError 1218. v = quote_plus(v.encode("ASCII","replace")) 1219. l.append(k + '=' + v) 1220. else: 1221. try: 1222. # is this a sufficient test for sequence-ness? 1223. x = len(v) 1224. except TypeError: 1225. # not a sequence 1226. v = quote_plus(str(v)) 1227. l.append(k + '=' + v) 1228. else: 1229. # loop over the sequence 1230. for elt in v: 1231. l.append(k + '=' + quote_plus(str(elt))) 1232. return '&'.join(l) 1233. 1234. # Proxy handling 1235. def getproxies_environment(): 1236. """Return a dictionary of scheme -> proxy server URL mappings. 1237. 1238. Scan the environment for variables named <scheme>_proxy; 1239. this seems to be the standard convention. If you need a 1240. different way, you can pass a proxies dictionary to the 1241. [Fancy]URLopener constructor. 1242. 1243. """ 1244. proxies = {} 1245. for name, value in os.environ.items(): 1246. name = name.lower() 1247. if value and name[-6:] == '_proxy': 1248. proxies[name[:-6]] = value 1249. return proxies 1250. 1251. if os.name == 'mac': 1252. def getproxies(): 1253. """Return a dictionary of scheme -> proxy server URL mappings. 1254. 1255. By convention the mac uses Internet Config to store 1256. proxies. An HTTP proxy, for instance, is stored under 1257. the HttpProxy key. 1258. 1259. """ 1260. try: 1261. import ic 1262. except ImportError: 1263. return {} 1264. 1265. try: 1266. config = ic.IC() 1267. except ic.error: 1268. return {} 1269. proxies = {} 1270. # HTTP: 1271. if 'UseHTTPProxy' in config and config['UseHTTPProxy']: 1272. try: 1273. value = config['HTTPProxyHost'] 1274. except ic.error: 1275. pass 1276. else: 1277. proxies['http'] = 'http://%s' % value 1278. # FTP: XXXX To be done. 1279. # Gopher: XXXX To be done. 1280. return proxies 1281. 1282. def proxy_bypass(x): 1283. return 0 1284. 1285. elif os.name == 'nt': 1286. def getproxies_registry(): 1287. """Return a dictionary of scheme -> proxy server URL mappings. 1288. 1289. Win32 uses the registry to store proxies. 1290. 1291. """ 1292. proxies = {} 1293. try: 1294. import _winreg 1295. except ImportError: 1296. # Std module, so should be around - but you never know! 1297. return proxies 1298. try: 1299. internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 1300. r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') 1301. proxyEnable = _winreg.QueryValueEx(internetSettings, 1302. 'ProxyEnable')[0] 1303. if proxyEnable: 1304. # Returned as Unicode but problems if not converted to ASCII 1305. proxyServer = str(_winreg.QueryValueEx(internetSettings, 1306. 'ProxyServer')[0]) 1307. if '=' in proxyServer: 1308. # Per-protocol settings 1309. for p in proxyServer.split(';'): 1310. protocol, address = p.split('=', 1) 1311. # See if address has a type:// prefix 1312. import re 1313. if not re.match('^([^/:]+)://', address): 1314. address = '%s://%s' % (protocol, address) 1315. proxies[protocol] = address 1316. else: 1317. # Use one setting for all protocols 1318. if proxyServer[:5] == 'http:': 1319. proxies['http'] = proxyServer 1320. else: 1321. proxies['http'] = 'http://%s' % proxyServer 1322. proxies['ftp'] = 'ftp://%s' % proxyServer 1323. internetSettings.Close() 1324. except (WindowsError, ValueError, TypeError): 1325. # Either registry key not found etc, or the value in an 1326. # unexpected format. 1327. # proxies already set up to be empty so nothing to do 1328. pass 1329. return proxies 1330. 1331. def getproxies(): 1332. """Return a dictionary of scheme -> proxy server URL mappings. 1333. 1334. Returns settings gathered from the environment, if specified, 1335. or the registry. 1336. 1337. """ 1338. return getproxies_environment() or getproxies_registry() 1339. 1340. def proxy_bypass(host): 1341. try: 1342. import _winreg 1343. import re 1344. except ImportError: 1345. # Std modules, so should be around - but you never know! 1346. return 0 1347. try: 1348. internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, 1349. r'Software\Microsoft\Windows\CurrentVersion\Internet Settings') 1350. proxyEnable = _winreg.QueryValueEx(internetSettings, 1351. 'ProxyEnable')[0] 1352. proxyOverride = str(_winreg.QueryValueEx(internetSettings, 1353. 'ProxyOverride')[0]) 1354. # ^^^^ Returned as Unicode but problems if not converted to ASCII 1355. except WindowsError: 1356. return 0 1357. if not proxyEnable or not proxyOverride: 1358. return 0 1359. # try to make a host list from name and IP address. 1360. host = [host] 1361. try: 1362. addr = socket.gethostbyname(host[0]) 1363. if addr != host: 1364. host.append(addr) 1365. except socket.error: 1366. pass 1367. # make a check value list from the registry entry: replace the 1368. # '<local>' string by the localhost entry and the corresponding 1369. # canonical entry. 1370. proxyOverride = proxyOverride.split(';') 1371. i = 0 1372. while i < len(proxyOverride): 1373. if proxyOverride[i] == '<local>': 1374. proxyOverride[i:i+1] = ['localhost', 1375. '127.0.0.1', 1376. socket.gethostname(), 1377. socket.gethostbyname( 1378. socket.gethostname())] 1379. i += 1 1380. # print proxyOverride 1381. # now check if we match one of the registry values. 1382. for test in proxyOverride: 1383. test = test.replace(".", r"\.") # mask dots 1384. test = test.replace("*", r".*") # change glob sequence 1385. test = test.replace("?", r".") # change glob char 1386. for val in host: 1387. # print "%s <--> %s" %( test, val ) 1388. if re.match(test, val, re.I): 1389. return 1 1390. return 0 1391. 1392. else: 1393. # By default use environment variables 1394. getproxies = getproxies_environment 1395. 1396. def proxy_bypass(host): 1397. return 0 1398. 1399. # Test and time quote() and unquote() 1400. def test1(): 1401. s = '' 1402. for i in range(256): s = s + chr(i) 1403. s = s*4 1404. t0 = time.time() 1405. qs = quote(s) 1406. uqs = unquote(qs) 1407. t1 = time.time() 1408. if uqs != s: 1409. print 'Wrong!' 1410. print `s` 1411. print `qs` 1412. print `uqs` 1413. print round(t1 - t0, 3), 'sec' 1414. 1415. 1416. def reporthook(blocknum, blocksize, totalsize): 1417. # Report during remote transfers 1418. print "Block number: %d, Block size: %d, Total size: %d" % ( 1419. blocknum, blocksize, totalsize) 1420. 1421. # Test program 1422. def test(args=[]): 1423. if not args: 1424. args = [ 1425. '/etc/passwd', 1426. 'file:/etc/passwd', 1427. 'file://localhost/etc/passwd', 1428. 'ftp://ftp.python.org/pub/python/README', 1429. ## 'gopher://gopher.micro.umn.edu/1/', 1430. 'http://www.python.org/index.html', 1431. ] 1432. if hasattr(URLopener, "open_https"): 1433. args.append('https://synergy.as.cmu.edu/~geek/') 1434. try: 1435. for url in args: 1436. print '-'*10, url, '-'*10 1437. fn, h = urlretrieve(url, None, reporthook) 1438. print fn 1439. if h: 1440. print '======' 1441. for k in h.keys(): print k + ':', h[k] 1442. print '======' 1443. fp = open(fn, 'rb') 1444. data = fp.read() 1445. del fp 1446. if '\r' in data: 1447. table = string.maketrans("", "") 1448. data = data.translate(table, "\r") 1449. print data 1450. fn, h = None, None 1451. print '-'*40 1452. finally: 1453. urlcleanup() 1454. 1455. def main(): 1456. import getopt, sys 1457. try: 1458. opts, args = getopt.getopt(sys.argv[1:], "th") 1459. except getopt.error, msg: 1460. print msg 1461. print "Use -h for help" 1462. return 1463. t = 0 1464. for o, a in opts: 1465. if o == '-t': 1466. t = t + 1 1467. if o == '-h': 1468. print "Usage: python urllib.py [-t] [url ...]" 1469. print "-t runs self-test;", 1470. print "otherwise, contents of urls are printed" 1471. return 1472. if t: 1473. if t > 1: 1474. test1() 1475. test(args) 1476. else: 1477. if not args: 1478. print "Use -h for help" 1479. for url in args: 1480. print urlopen(url).read(), 1481. 1482. # Run test program when run as a script 1483. if __name__ == '__main__': 1484. main()