Browse Source

[utils] Remove sanitize_url_path_consecutive_slashes()

This function is used only in SohuIE, which is updated to use a new
extraction logic.
totalwebcasting
Yen Chi Hsuan 10 years ago
parent
commit
d39e0f05db
2 changed files with 0 additions and 28 deletions
  1. +0
    -21
      test/test_utils.py
  2. +0
    -7
      youtube_dl/utils.py

+ 0
- 21
test/test_utils.py View File

@ -40,7 +40,6 @@ from youtube_dl.utils import (
read_batch_urls,
sanitize_filename,
sanitize_path,
sanitize_url_path_consecutive_slashes,
prepend_extension,
replace_extension,
shell_quote,
@ -176,26 +175,6 @@ class TestUtil(unittest.TestCase):
self.assertEqual(sanitize_path('./abc'), 'abc')
self.assertEqual(sanitize_path('./../abc'), '..\\abc')
def test_sanitize_url_path_consecutive_slashes(self):
self.assertEqual(
sanitize_url_path_consecutive_slashes('http://hostname/foo//bar/filename.html'),
'http://hostname/foo/bar/filename.html')
self.assertEqual(
sanitize_url_path_consecutive_slashes('http://hostname//foo/bar/filename.html'),
'http://hostname/foo/bar/filename.html')
self.assertEqual(
sanitize_url_path_consecutive_slashes('http://hostname//'),
'http://hostname/')
self.assertEqual(
sanitize_url_path_consecutive_slashes('http://hostname/foo/bar/filename.html'),
'http://hostname/foo/bar/filename.html')
self.assertEqual(
sanitize_url_path_consecutive_slashes('http://hostname/'),
'http://hostname/')
self.assertEqual(
sanitize_url_path_consecutive_slashes('http://hostname/abc//'),
'http://hostname/abc/')
def test_prepend_extension(self):
self.assertEqual(prepend_extension('abc.ext', 'temp'), 'abc.temp.ext')
self.assertEqual(prepend_extension('abc.ext', 'temp', 'ext'), 'abc.temp.ext')


+ 0
- 7
youtube_dl/utils.py View File

@ -327,13 +327,6 @@ def sanitize_path(s):
return os.path.join(*sanitized_path)
def sanitize_url_path_consecutive_slashes(url):
"""Collapses consecutive slashes in URLs' path"""
parsed_url = list(compat_urlparse.urlparse(url))
parsed_url[2] = re.sub(r'/{2,}', '/', parsed_url[2])
return compat_urlparse.urlunparse(parsed_url)
def orderedSet(iterable):
""" Remove all duplicates from the input iterable """
res = []


Loading…
Cancel
Save