You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

80 lines
3.1 KiB

  1. from __future__ import unicode_literals
  2. from .fragment import FragmentFD
  3. from ..compat import compat_urllib_error
  4. from ..utils import (
  5. DownloadError,
  6. urljoin,
  7. )
  8. class DashSegmentsFD(FragmentFD):
  9. """
  10. Download segments in a DASH manifest
  11. """
  12. FD_NAME = 'dashsegments'
  13. def real_download(self, filename, info_dict):
  14. fragment_base_url = info_dict.get('fragment_base_url')
  15. fragments = info_dict['fragments'][:1] if self.params.get(
  16. 'test', False) else info_dict['fragments']
  17. ctx = {
  18. 'filename': filename,
  19. 'total_frags': len(fragments),
  20. }
  21. self._prepare_and_start_frag_download(ctx)
  22. fragment_retries = self.params.get('fragment_retries', 0)
  23. skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
  24. frag_index = 0
  25. for i, fragment in enumerate(fragments):
  26. frag_index += 1
  27. if frag_index <= ctx['fragment_index']:
  28. continue
  29. # In DASH, the first segment contains necessary headers to
  30. # generate a valid MP4 file, so always abort for the first segment
  31. fatal = i == 0 or not skip_unavailable_fragments
  32. count = 0
  33. while count <= fragment_retries:
  34. try:
  35. fragment_url = fragment.get('url')
  36. if not fragment_url:
  37. assert fragment_base_url
  38. fragment_url = urljoin(fragment_base_url, fragment['path'])
  39. success, frag_content = self._download_fragment(ctx, fragment_url, info_dict)
  40. if not success:
  41. return False
  42. self._append_fragment(ctx, frag_content)
  43. break
  44. except compat_urllib_error.HTTPError as err:
  45. # YouTube may often return 404 HTTP error for a fragment causing the
  46. # whole download to fail. However if the same fragment is immediately
  47. # retried with the same request data this usually succeeds (1-2 attemps
  48. # is usually enough) thus allowing to download the whole file successfully.
  49. # To be future-proof we will retry all fragments that fail with any
  50. # HTTP error.
  51. count += 1
  52. if count <= fragment_retries:
  53. self.report_retry_fragment(err, frag_index, count, fragment_retries)
  54. except DownloadError:
  55. # Don't retry fragment if error occurred during HTTP downloading
  56. # itself since it has own retry settings
  57. if not fatal:
  58. self.report_skip_fragment(frag_index)
  59. break
  60. raise
  61. if count > fragment_retries:
  62. if not fatal:
  63. self.report_skip_fragment(frag_index)
  64. continue
  65. self.report_error('giving up after %s fragment retries' % fragment_retries)
  66. return False
  67. self._finish_frag_download(ctx)
  68. return True