You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

628 lines
21 KiB

10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
10 years ago
  1. from __future__ import unicode_literals
  2. import binascii
  3. import collections
  4. import email
  5. import getpass
  6. import io
  7. import optparse
  8. import os
  9. import re
  10. import shlex
  11. import shutil
  12. import socket
  13. import subprocess
  14. import sys
  15. import itertools
  16. import xml.etree.ElementTree
  17. try:
  18. import urllib.request as compat_urllib_request
  19. except ImportError: # Python 2
  20. import urllib2 as compat_urllib_request
  21. try:
  22. import urllib.error as compat_urllib_error
  23. except ImportError: # Python 2
  24. import urllib2 as compat_urllib_error
  25. try:
  26. import urllib.parse as compat_urllib_parse
  27. except ImportError: # Python 2
  28. import urllib as compat_urllib_parse
  29. try:
  30. from urllib.parse import urlparse as compat_urllib_parse_urlparse
  31. except ImportError: # Python 2
  32. from urlparse import urlparse as compat_urllib_parse_urlparse
  33. try:
  34. import urllib.parse as compat_urlparse
  35. except ImportError: # Python 2
  36. import urlparse as compat_urlparse
  37. try:
  38. import urllib.response as compat_urllib_response
  39. except ImportError: # Python 2
  40. import urllib as compat_urllib_response
  41. try:
  42. import http.cookiejar as compat_cookiejar
  43. except ImportError: # Python 2
  44. import cookielib as compat_cookiejar
  45. try:
  46. import http.cookies as compat_cookies
  47. except ImportError: # Python 2
  48. import Cookie as compat_cookies
  49. try:
  50. import html.entities as compat_html_entities
  51. except ImportError: # Python 2
  52. import htmlentitydefs as compat_html_entities
  53. try:
  54. import http.client as compat_http_client
  55. except ImportError: # Python 2
  56. import httplib as compat_http_client
  57. try:
  58. from urllib.error import HTTPError as compat_HTTPError
  59. except ImportError: # Python 2
  60. from urllib2 import HTTPError as compat_HTTPError
  61. try:
  62. from urllib.request import urlretrieve as compat_urlretrieve
  63. except ImportError: # Python 2
  64. from urllib import urlretrieve as compat_urlretrieve
  65. try:
  66. from html.parser import HTMLParser as compat_HTMLParser
  67. except ImportError: # Python 2
  68. from HTMLParser import HTMLParser as compat_HTMLParser
  69. try:
  70. from subprocess import DEVNULL
  71. compat_subprocess_get_DEVNULL = lambda: DEVNULL
  72. except ImportError:
  73. compat_subprocess_get_DEVNULL = lambda: open(os.path.devnull, 'w')
  74. try:
  75. import http.server as compat_http_server
  76. except ImportError:
  77. import BaseHTTPServer as compat_http_server
  78. try:
  79. compat_str = unicode # Python 2
  80. except NameError:
  81. compat_str = str
  82. try:
  83. from urllib.parse import unquote_to_bytes as compat_urllib_parse_unquote_to_bytes
  84. from urllib.parse import unquote as compat_urllib_parse_unquote
  85. from urllib.parse import unquote_plus as compat_urllib_parse_unquote_plus
  86. except ImportError: # Python 2
  87. _asciire = (compat_urllib_parse._asciire if hasattr(compat_urllib_parse, '_asciire')
  88. else re.compile('([\x00-\x7f]+)'))
  89. # HACK: The following are the correct unquote_to_bytes, unquote and unquote_plus
  90. # implementations from cpython 3.4.3's stdlib. Python 2's version
  91. # is apparently broken (see https://github.com/rg3/youtube-dl/pull/6244)
  92. def compat_urllib_parse_unquote_to_bytes(string):
  93. """unquote_to_bytes('abc%20def') -> b'abc def'."""
  94. # Note: strings are encoded as UTF-8. This is only an issue if it contains
  95. # unescaped non-ASCII characters, which URIs should not.
  96. if not string:
  97. # Is it a string-like object?
  98. string.split
  99. return b''
  100. if isinstance(string, compat_str):
  101. string = string.encode('utf-8')
  102. bits = string.split(b'%')
  103. if len(bits) == 1:
  104. return string
  105. res = [bits[0]]
  106. append = res.append
  107. for item in bits[1:]:
  108. try:
  109. append(compat_urllib_parse._hextochr[item[:2]])
  110. append(item[2:])
  111. except KeyError:
  112. append(b'%')
  113. append(item)
  114. return b''.join(res)
  115. def compat_urllib_parse_unquote(string, encoding='utf-8', errors='replace'):
  116. """Replace %xx escapes by their single-character equivalent. The optional
  117. encoding and errors parameters specify how to decode percent-encoded
  118. sequences into Unicode characters, as accepted by the bytes.decode()
  119. method.
  120. By default, percent-encoded sequences are decoded with UTF-8, and invalid
  121. sequences are replaced by a placeholder character.
  122. unquote('abc%20def') -> 'abc def'.
  123. """
  124. if '%' not in string:
  125. string.split
  126. return string
  127. if encoding is None:
  128. encoding = 'utf-8'
  129. if errors is None:
  130. errors = 'replace'
  131. bits = _asciire.split(string)
  132. res = [bits[0]]
  133. append = res.append
  134. for i in range(1, len(bits), 2):
  135. append(compat_urllib_parse_unquote_to_bytes(bits[i]).decode(encoding, errors))
  136. append(bits[i + 1])
  137. return ''.join(res)
  138. def compat_urllib_parse_unquote_plus(string, encoding='utf-8', errors='replace'):
  139. """Like unquote(), but also replace plus signs by spaces, as required for
  140. unquoting HTML form values.
  141. unquote_plus('%7e/abc+def') -> '~/abc def'
  142. """
  143. string = string.replace('+', ' ')
  144. return compat_urllib_parse_unquote(string, encoding, errors)
  145. try:
  146. from urllib.parse import urlencode as compat_urllib_parse_urlencode
  147. except ImportError: # Python 2
  148. # Python 2 will choke in urlencode on mixture of byte and unicode strings.
  149. # Possible solutions are to either port it from python 3 with all
  150. # the friends or manually ensure input query contains only byte strings.
  151. # We will stick with latter thus recursively encoding the whole query.
  152. def compat_urllib_parse_urlencode(query, doseq=0, encoding='utf-8'):
  153. def encode_elem(e):
  154. if isinstance(e, dict):
  155. e = encode_dict(e)
  156. elif isinstance(e, (list, tuple,)):
  157. e = encode_list(e)
  158. elif isinstance(e, compat_str):
  159. e = e.encode(encoding)
  160. return e
  161. def encode_dict(d):
  162. return dict((encode_elem(k), encode_elem(v)) for k, v in d.items())
  163. def encode_list(l):
  164. return [encode_elem(e) for e in l]
  165. return compat_urllib_parse.urlencode(encode_elem(query), doseq=doseq)
  166. try:
  167. from urllib.request import DataHandler as compat_urllib_request_DataHandler
  168. except ImportError: # Python < 3.4
  169. # Ported from CPython 98774:1733b3bd46db, Lib/urllib/request.py
  170. class compat_urllib_request_DataHandler(compat_urllib_request.BaseHandler):
  171. def data_open(self, req):
  172. # data URLs as specified in RFC 2397.
  173. #
  174. # ignores POSTed data
  175. #
  176. # syntax:
  177. # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
  178. # mediatype := [ type "/" subtype ] *( ";" parameter )
  179. # data := *urlchar
  180. # parameter := attribute "=" value
  181. url = req.get_full_url()
  182. scheme, data = url.split(':', 1)
  183. mediatype, data = data.split(',', 1)
  184. # even base64 encoded data URLs might be quoted so unquote in any case:
  185. data = compat_urllib_parse_unquote_to_bytes(data)
  186. if mediatype.endswith(';base64'):
  187. data = binascii.a2b_base64(data)
  188. mediatype = mediatype[:-7]
  189. if not mediatype:
  190. mediatype = 'text/plain;charset=US-ASCII'
  191. headers = email.message_from_string(
  192. 'Content-type: %s\nContent-length: %d\n' % (mediatype, len(data)))
  193. return compat_urllib_response.addinfourl(io.BytesIO(data), headers, url)
  194. try:
  195. compat_basestring = basestring # Python 2
  196. except NameError:
  197. compat_basestring = str
  198. try:
  199. compat_chr = unichr # Python 2
  200. except NameError:
  201. compat_chr = chr
  202. try:
  203. from xml.etree.ElementTree import ParseError as compat_xml_parse_error
  204. except ImportError: # Python 2.6
  205. from xml.parsers.expat import ExpatError as compat_xml_parse_error
  206. if sys.version_info[0] >= 3:
  207. compat_etree_fromstring = xml.etree.ElementTree.fromstring
  208. else:
  209. # python 2.x tries to encode unicode strings with ascii (see the
  210. # XMLParser._fixtext method)
  211. etree = xml.etree.ElementTree
  212. try:
  213. _etree_iter = etree.Element.iter
  214. except AttributeError: # Python <=2.6
  215. def _etree_iter(root):
  216. for el in root.findall('*'):
  217. yield el
  218. for sub in _etree_iter(el):
  219. yield sub
  220. # on 2.6 XML doesn't have a parser argument, function copied from CPython
  221. # 2.7 source
  222. def _XML(text, parser=None):
  223. if not parser:
  224. parser = etree.XMLParser(target=etree.TreeBuilder())
  225. parser.feed(text)
  226. return parser.close()
  227. def _element_factory(*args, **kwargs):
  228. el = etree.Element(*args, **kwargs)
  229. for k, v in el.items():
  230. if isinstance(v, bytes):
  231. el.set(k, v.decode('utf-8'))
  232. return el
  233. def compat_etree_fromstring(text):
  234. doc = _XML(text, parser=etree.XMLParser(target=etree.TreeBuilder(element_factory=_element_factory)))
  235. for el in _etree_iter(doc):
  236. if el.text is not None and isinstance(el.text, bytes):
  237. el.text = el.text.decode('utf-8')
  238. return doc
  239. if sys.version_info < (2, 7):
  240. # Here comes the crazy part: In 2.6, if the xpath is a unicode,
  241. # .//node does not match if a node is a direct child of . !
  242. def compat_xpath(xpath):
  243. if isinstance(xpath, compat_str):
  244. xpath = xpath.encode('ascii')
  245. return xpath
  246. else:
  247. compat_xpath = lambda xpath: xpath
  248. try:
  249. from urllib.parse import parse_qs as compat_parse_qs
  250. except ImportError: # Python 2
  251. # HACK: The following is the correct parse_qs implementation from cpython 3's stdlib.
  252. # Python 2's version is apparently totally broken
  253. def _parse_qsl(qs, keep_blank_values=False, strict_parsing=False,
  254. encoding='utf-8', errors='replace'):
  255. qs, _coerce_result = qs, compat_str
  256. pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
  257. r = []
  258. for name_value in pairs:
  259. if not name_value and not strict_parsing:
  260. continue
  261. nv = name_value.split('=', 1)
  262. if len(nv) != 2:
  263. if strict_parsing:
  264. raise ValueError('bad query field: %r' % (name_value,))
  265. # Handle case of a control-name with no equal sign
  266. if keep_blank_values:
  267. nv.append('')
  268. else:
  269. continue
  270. if len(nv[1]) or keep_blank_values:
  271. name = nv[0].replace('+', ' ')
  272. name = compat_urllib_parse_unquote(
  273. name, encoding=encoding, errors=errors)
  274. name = _coerce_result(name)
  275. value = nv[1].replace('+', ' ')
  276. value = compat_urllib_parse_unquote(
  277. value, encoding=encoding, errors=errors)
  278. value = _coerce_result(value)
  279. r.append((name, value))
  280. return r
  281. def compat_parse_qs(qs, keep_blank_values=False, strict_parsing=False,
  282. encoding='utf-8', errors='replace'):
  283. parsed_result = {}
  284. pairs = _parse_qsl(qs, keep_blank_values, strict_parsing,
  285. encoding=encoding, errors=errors)
  286. for name, value in pairs:
  287. if name in parsed_result:
  288. parsed_result[name].append(value)
  289. else:
  290. parsed_result[name] = [value]
  291. return parsed_result
  292. try:
  293. from shlex import quote as shlex_quote
  294. except ImportError: # Python < 3.3
  295. def shlex_quote(s):
  296. if re.match(r'^[-_\w./]+$', s):
  297. return s
  298. else:
  299. return "'" + s.replace("'", "'\"'\"'") + "'"
  300. if sys.version_info >= (2, 7, 3):
  301. compat_shlex_split = shlex.split
  302. else:
  303. # Working around shlex issue with unicode strings on some python 2
  304. # versions (see http://bugs.python.org/issue1548891)
  305. def compat_shlex_split(s, comments=False, posix=True):
  306. if isinstance(s, compat_str):
  307. s = s.encode('utf-8')
  308. return shlex.split(s, comments, posix)
  309. def compat_ord(c):
  310. if type(c) is int:
  311. return c
  312. else:
  313. return ord(c)
  314. compat_os_name = os._name if os.name == 'java' else os.name
  315. if sys.version_info >= (3, 0):
  316. compat_getenv = os.getenv
  317. compat_expanduser = os.path.expanduser
  318. else:
  319. # Environment variables should be decoded with filesystem encoding.
  320. # Otherwise it will fail if any non-ASCII characters present (see #3854 #3217 #2918)
  321. def compat_getenv(key, default=None):
  322. from .utils import get_filesystem_encoding
  323. env = os.getenv(key, default)
  324. if env:
  325. env = env.decode(get_filesystem_encoding())
  326. return env
  327. # HACK: The default implementations of os.path.expanduser from cpython do not decode
  328. # environment variables with filesystem encoding. We will work around this by
  329. # providing adjusted implementations.
  330. # The following are os.path.expanduser implementations from cpython 2.7.8 stdlib
  331. # for different platforms with correct environment variables decoding.
  332. if compat_os_name == 'posix':
  333. def compat_expanduser(path):
  334. """Expand ~ and ~user constructions. If user or $HOME is unknown,
  335. do nothing."""
  336. if not path.startswith('~'):
  337. return path
  338. i = path.find('/', 1)
  339. if i < 0:
  340. i = len(path)
  341. if i == 1:
  342. if 'HOME' not in os.environ:
  343. import pwd
  344. userhome = pwd.getpwuid(os.getuid()).pw_dir
  345. else:
  346. userhome = compat_getenv('HOME')
  347. else:
  348. import pwd
  349. try:
  350. pwent = pwd.getpwnam(path[1:i])
  351. except KeyError:
  352. return path
  353. userhome = pwent.pw_dir
  354. userhome = userhome.rstrip('/')
  355. return (userhome + path[i:]) or '/'
  356. elif compat_os_name == 'nt' or compat_os_name == 'ce':
  357. def compat_expanduser(path):
  358. """Expand ~ and ~user constructs.
  359. If user or $HOME is unknown, do nothing."""
  360. if path[:1] != '~':
  361. return path
  362. i, n = 1, len(path)
  363. while i < n and path[i] not in '/\\':
  364. i = i + 1
  365. if 'HOME' in os.environ:
  366. userhome = compat_getenv('HOME')
  367. elif 'USERPROFILE' in os.environ:
  368. userhome = compat_getenv('USERPROFILE')
  369. elif 'HOMEPATH' not in os.environ:
  370. return path
  371. else:
  372. try:
  373. drive = compat_getenv('HOMEDRIVE')
  374. except KeyError:
  375. drive = ''
  376. userhome = os.path.join(drive, compat_getenv('HOMEPATH'))
  377. if i != 1: # ~user
  378. userhome = os.path.join(os.path.dirname(userhome), path[1:i])
  379. return userhome + path[i:]
  380. else:
  381. compat_expanduser = os.path.expanduser
  382. if sys.version_info < (3, 0):
  383. def compat_print(s):
  384. from .utils import preferredencoding
  385. print(s.encode(preferredencoding(), 'xmlcharrefreplace'))
  386. else:
  387. def compat_print(s):
  388. assert isinstance(s, compat_str)
  389. print(s)
  390. try:
  391. subprocess_check_output = subprocess.check_output
  392. except AttributeError:
  393. def subprocess_check_output(*args, **kwargs):
  394. assert 'input' not in kwargs
  395. p = subprocess.Popen(*args, stdout=subprocess.PIPE, **kwargs)
  396. output, _ = p.communicate()
  397. ret = p.poll()
  398. if ret:
  399. raise subprocess.CalledProcessError(ret, p.args, output=output)
  400. return output
  401. if sys.version_info < (3, 0) and sys.platform == 'win32':
  402. def compat_getpass(prompt, *args, **kwargs):
  403. if isinstance(prompt, compat_str):
  404. from .utils import preferredencoding
  405. prompt = prompt.encode(preferredencoding())
  406. return getpass.getpass(prompt, *args, **kwargs)
  407. else:
  408. compat_getpass = getpass.getpass
  409. # Python < 2.6.5 require kwargs to be bytes
  410. try:
  411. def _testfunc(x):
  412. pass
  413. _testfunc(**{'x': 0})
  414. except TypeError:
  415. def compat_kwargs(kwargs):
  416. return dict((bytes(k), v) for k, v in kwargs.items())
  417. else:
  418. compat_kwargs = lambda kwargs: kwargs
  419. if sys.version_info < (2, 7):
  420. def compat_socket_create_connection(address, timeout, source_address=None):
  421. host, port = address
  422. err = None
  423. for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
  424. af, socktype, proto, canonname, sa = res
  425. sock = None
  426. try:
  427. sock = socket.socket(af, socktype, proto)
  428. sock.settimeout(timeout)
  429. if source_address:
  430. sock.bind(source_address)
  431. sock.connect(sa)
  432. return sock
  433. except socket.error as _:
  434. err = _
  435. if sock is not None:
  436. sock.close()
  437. if err is not None:
  438. raise err
  439. else:
  440. raise socket.error('getaddrinfo returns an empty list')
  441. else:
  442. compat_socket_create_connection = socket.create_connection
  443. # Fix https://github.com/rg3/youtube-dl/issues/4223
  444. # See http://bugs.python.org/issue9161 for what is broken
  445. def workaround_optparse_bug9161():
  446. op = optparse.OptionParser()
  447. og = optparse.OptionGroup(op, 'foo')
  448. try:
  449. og.add_option('-t')
  450. except TypeError:
  451. real_add_option = optparse.OptionGroup.add_option
  452. def _compat_add_option(self, *args, **kwargs):
  453. enc = lambda v: (
  454. v.encode('ascii', 'replace') if isinstance(v, compat_str)
  455. else v)
  456. bargs = [enc(a) for a in args]
  457. bkwargs = dict(
  458. (k, enc(v)) for k, v in kwargs.items())
  459. return real_add_option(self, *bargs, **bkwargs)
  460. optparse.OptionGroup.add_option = _compat_add_option
  461. if hasattr(shutil, 'get_terminal_size'): # Python >= 3.3
  462. compat_get_terminal_size = shutil.get_terminal_size
  463. else:
  464. _terminal_size = collections.namedtuple('terminal_size', ['columns', 'lines'])
  465. def compat_get_terminal_size(fallback=(80, 24)):
  466. columns = compat_getenv('COLUMNS')
  467. if columns:
  468. columns = int(columns)
  469. else:
  470. columns = None
  471. lines = compat_getenv('LINES')
  472. if lines:
  473. lines = int(lines)
  474. else:
  475. lines = None
  476. if columns is None or lines is None or columns <= 0 or lines <= 0:
  477. try:
  478. sp = subprocess.Popen(
  479. ['stty', 'size'],
  480. stdout=subprocess.PIPE, stderr=subprocess.PIPE)
  481. out, err = sp.communicate()
  482. _lines, _columns = map(int, out.split())
  483. except Exception:
  484. _columns, _lines = _terminal_size(*fallback)
  485. if columns is None or columns <= 0:
  486. columns = _columns
  487. if lines is None or lines <= 0:
  488. lines = _lines
  489. return _terminal_size(columns, lines)
  490. try:
  491. itertools.count(start=0, step=1)
  492. compat_itertools_count = itertools.count
  493. except TypeError: # Python 2.6
  494. def compat_itertools_count(start=0, step=1):
  495. n = start
  496. while True:
  497. yield n
  498. n += step
  499. if sys.version_info >= (3, 0):
  500. from tokenize import tokenize as compat_tokenize_tokenize
  501. else:
  502. from tokenize import generate_tokens as compat_tokenize_tokenize
  503. __all__ = [
  504. 'compat_HTMLParser',
  505. 'compat_HTTPError',
  506. 'compat_basestring',
  507. 'compat_chr',
  508. 'compat_cookiejar',
  509. 'compat_cookies',
  510. 'compat_etree_fromstring',
  511. 'compat_expanduser',
  512. 'compat_get_terminal_size',
  513. 'compat_getenv',
  514. 'compat_getpass',
  515. 'compat_html_entities',
  516. 'compat_http_client',
  517. 'compat_http_server',
  518. 'compat_itertools_count',
  519. 'compat_kwargs',
  520. 'compat_ord',
  521. 'compat_os_name',
  522. 'compat_parse_qs',
  523. 'compat_print',
  524. 'compat_shlex_split',
  525. 'compat_socket_create_connection',
  526. 'compat_str',
  527. 'compat_subprocess_get_DEVNULL',
  528. 'compat_tokenize_tokenize',
  529. 'compat_urllib_error',
  530. 'compat_urllib_parse',
  531. 'compat_urllib_parse_unquote',
  532. 'compat_urllib_parse_unquote_plus',
  533. 'compat_urllib_parse_unquote_to_bytes',
  534. 'compat_urllib_parse_urlencode',
  535. 'compat_urllib_parse_urlparse',
  536. 'compat_urllib_request',
  537. 'compat_urllib_request_DataHandler',
  538. 'compat_urllib_response',
  539. 'compat_urlparse',
  540. 'compat_urlretrieve',
  541. 'compat_xml_parse_error',
  542. 'compat_xpath',
  543. 'shlex_quote',
  544. 'subprocess_check_output',
  545. 'workaround_optparse_bug9161',
  546. ]