You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

92 lines
3.6 KiB

  1. from __future__ import unicode_literals
  2. import os
  3. import re
  4. from .fragment import FragmentFD
  5. from ..compat import compat_urllib_error
  6. from ..utils import (
  7. sanitize_open,
  8. encodeFilename,
  9. )
  10. class DashSegmentsFD(FragmentFD):
  11. """
  12. Download segments in a DASH manifest
  13. """
  14. FD_NAME = 'dashsegments'
  15. def real_download(self, filename, info_dict):
  16. base_url = info_dict['url']
  17. segment_urls = [info_dict['segment_urls'][0]] if self.params.get('test', False) else info_dict['segment_urls']
  18. initialization_url = info_dict.get('initialization_url')
  19. ctx = {
  20. 'filename': filename,
  21. 'total_frags': len(segment_urls) + (1 if initialization_url else 0),
  22. }
  23. self._prepare_and_start_frag_download(ctx)
  24. def combine_url(base_url, target_url):
  25. if re.match(r'^https?://', target_url):
  26. return target_url
  27. return '%s%s%s' % (base_url, '' if base_url.endswith('/') else '/', target_url)
  28. segments_filenames = []
  29. fragment_retries = self.params.get('fragment_retries', 0)
  30. skip_unavailable_fragments = self.params.get('skip_unavailable_fragments', True)
  31. def process_segment(segment, tmp_filename, fatal):
  32. target_url, segment_name = segment
  33. target_filename = '%s-%s' % (tmp_filename, segment_name)
  34. count = 0
  35. while count <= fragment_retries:
  36. try:
  37. success = ctx['dl'].download(target_filename, {'url': combine_url(base_url, target_url)})
  38. if not success:
  39. return False
  40. down, target_sanitized = sanitize_open(target_filename, 'rb')
  41. ctx['dest_stream'].write(down.read())
  42. down.close()
  43. segments_filenames.append(target_sanitized)
  44. break
  45. except compat_urllib_error.HTTPError as err:
  46. # YouTube may often return 404 HTTP error for a fragment causing the
  47. # whole download to fail. However if the same fragment is immediately
  48. # retried with the same request data this usually succeeds (1-2 attemps
  49. # is usually enough) thus allowing to download the whole file successfully.
  50. # To be future-proof we will retry all fragments that fail with any
  51. # HTTP error.
  52. count += 1
  53. if count <= fragment_retries:
  54. self.report_retry_fragment(err, segment_name, count, fragment_retries)
  55. if count > fragment_retries:
  56. if not fatal:
  57. self.report_skip_fragment(segment_name)
  58. return True
  59. self.report_error('giving up after %s fragment retries' % fragment_retries)
  60. return False
  61. return True
  62. segments_to_download = [(initialization_url, 'Init')] if initialization_url else []
  63. segments_to_download.extend([
  64. (segment_url, 'Seg%d' % i)
  65. for i, segment_url in enumerate(segment_urls)])
  66. for i, segment in enumerate(segments_to_download):
  67. # In DASH, the first segment contains necessary headers to
  68. # generate a valid MP4 file, so always abort for the first segment
  69. fatal = i == 0 or not skip_unavailable_fragments
  70. if not process_segment(segment, ctx['tmpfilename'], fatal):
  71. return False
  72. self._finish_frag_download(ctx)
  73. for segment_file in segments_filenames:
  74. os.remove(encodeFilename(segment_file))
  75. return True