# HG changeset patch # User RĂ©mi Cardona # Date 1442907570 -7200 # Node ID 65ad6980976e67c87f191f5611346f38d0798828 # Parent 4845012cfc8e6610a1d686e33753d2d043745f31 [py3k] import URL mangling functions using six.moves diff -r 4845012cfc8e -r 65ad6980976e cwctl.py --- a/cwctl.py Mon Sep 14 12:19:48 2015 +0200 +++ b/cwctl.py Tue Sep 22 09:39:30 2015 +0200 @@ -29,7 +29,6 @@ from warnings import warn, filterwarnings from os import remove, listdir, system, pathsep from os.path import exists, join, isfile, isdir, dirname, abspath -from urlparse import urlparse try: from os import kill, getpgid @@ -39,6 +38,8 @@ def getpgid(): """win32 getpgid implementation""" +from six.moves.urllib.parse import urlparse + from logilab.common.clcommands import CommandLine from logilab.common.shellutils import ASK from logilab.common.configuration import merge_options diff -r 4845012cfc8e -r 65ad6980976e devtools/httptest.py --- a/devtools/httptest.py Mon Sep 14 12:19:48 2015 +0200 +++ b/devtools/httptest.py Tue Sep 22 09:39:30 2015 +0200 @@ -26,7 +26,8 @@ import threading import socket import httplib -from urlparse import urlparse + +from six.moves.urllib.parse import urlparse from twisted.internet import reactor, error diff -r 4845012cfc8e -r 65ad6980976e devtools/testlib.py --- a/devtools/testlib.py Mon Sep 14 12:19:48 2015 +0200 +++ b/devtools/testlib.py Tue Sep 22 09:39:30 2015 +0200 @@ -22,14 +22,14 @@ import sys import re -import urlparse from os.path import dirname, join, abspath -from urllib import unquote from math import log from contextlib import contextmanager from warnings import warn from itertools import chain +from six.moves.urllib.parse import urlparse, parse_qs, unquote as urlunquote + import yams.schema from logilab.common.testlib import TestCase, InnerTest, Tags @@ -736,8 +736,8 @@ req = self.request(url=url) if isinstance(url, unicode): url = url.encode(req.encoding) # req.setup_params() expects encoded strings - querystring = urlparse.urlparse(url)[-2] - params = urlparse.parse_qs(querystring) + querystring = urlparse(url)[-2] + params = parse_qs(querystring) req.setup_params(params) return req @@ -750,8 +750,8 @@ with self.admin_access.web_request(url=url) as req: if isinstance(url, unicode): url = url.encode(req.encoding) # req.setup_params() expects encoded strings - querystring = urlparse.urlparse(url)[-2] - params = urlparse.parse_qs(querystring) + querystring = urlparse(url)[-2] + params = parse_qs(querystring) req.setup_params(params) yield req @@ -790,7 +790,7 @@ path = location params = {} else: - cleanup = lambda p: (p[0], unquote(p[1])) + cleanup = lambda p: (p[0], urlunquote(p[1])) params = dict(cleanup(p.split('=', 1)) for p in params.split('&') if p) if path.startswith(req.base_url()): # may be relative path = path[len(req.base_url()):] diff -r 4845012cfc8e -r 65ad6980976e entities/lib.py --- a/entities/lib.py Mon Sep 14 12:19:48 2015 +0200 +++ b/entities/lib.py Tue Sep 22 09:39:30 2015 +0200 @@ -19,9 +19,9 @@ __docformat__ = "restructuredtext en" from warnings import warn +from datetime import datetime -from urlparse import urlsplit, urlunsplit -from datetime import datetime +from six.moves.urllib.parse import urlsplit, urlunsplit from logilab.mtconverter import xml_escape diff -r 4845012cfc8e -r 65ad6980976e etwist/server.py --- a/etwist/server.py Mon Sep 14 12:19:48 2015 +0200 +++ b/etwist/server.py Tue Sep 22 09:39:30 2015 +0200 @@ -22,8 +22,10 @@ import select import traceback import threading -from urlparse import urlsplit, urlunsplit from cgi import FieldStorage, parse_header + +from six.moves.urllib.parse import urlsplit, urlunsplit + from cubicweb.statsd_logger import statsd_timeit from twisted.internet import reactor, task, threads diff -r 4845012cfc8e -r 65ad6980976e ext/rest.py --- a/ext/rest.py Mon Sep 14 12:19:48 2015 +0200 +++ b/ext/rest.py Tue Sep 22 09:39:30 2015 +0200 @@ -37,7 +37,8 @@ from itertools import chain from logging import getLogger from os.path import join -from urlparse import urlsplit + +from six.moves.urllib.parse import urlsplit from docutils import statemachine, nodes, utils, io from docutils.core import Publisher diff -r 4845012cfc8e -r 65ad6980976e multipart.py --- a/multipart.py Mon Sep 14 12:19:48 2015 +0200 +++ b/multipart.py Tue Sep 22 09:39:30 2015 +0200 @@ -41,14 +41,12 @@ from wsgiref.headers import Headers import re, sys try: - from urlparse import parse_qs -except ImportError: # pragma: no cover (fallback for Python 2.5) - from cgi import parse_qs -try: from io import BytesIO except ImportError: # pragma: no cover (fallback for Python 2.5) from StringIO import StringIO as BytesIO +from six.moves.urllib.parse import parse_qs + ############################################################################## ################################ Helper & Misc ################################ ############################################################################## diff -r 4845012cfc8e -r 65ad6980976e req.py --- a/req.py Mon Sep 14 12:19:48 2015 +0200 +++ b/req.py Tue Sep 22 09:39:30 2015 +0200 @@ -20,10 +20,9 @@ __docformat__ = "restructuredtext en" from warnings import warn -from urlparse import urlsplit, urlunsplit -from urllib import quote as urlquote, unquote as urlunquote from datetime import time, datetime, timedelta -from cgi import parse_qs, parse_qsl + +from six.moves.urllib.parse import parse_qs, parse_qsl, quote as urlquote, unquote as urlunquote, urlsplit, urlunsplit from logilab.common.decorators import cached from logilab.common.deprecation import deprecated diff -r 4845012cfc8e -r 65ad6980976e server/sources/datafeed.py --- a/server/sources/datafeed.py Mon Sep 14 12:19:48 2015 +0200 +++ b/server/sources/datafeed.py Tue Sep 22 09:39:30 2015 +0200 @@ -24,7 +24,9 @@ from os.path import exists from datetime import datetime, timedelta from cookielib import CookieJar -import urlparse + +from six.moves.urllib.parse import urlparse + from lxml import etree from cubicweb import RegistryNotFound, ObjectNotFound, ValidationError, UnknownEid @@ -327,7 +329,7 @@ For http URLs, it will try to find a cwclientlib config entry (if available) and use it as requester. """ - purl = urlparse.urlparse(url) + purl = urlparse(url) if purl.scheme == 'file': return URLLibResponseAdapter(open(url[7:]), url) @@ -529,7 +531,7 @@ self.source.debug(str(exc)) # no chance with cwclientlib, fall back to former implementation - if urlparse.urlparse(url).scheme in ('http', 'https'): + if urlparse(url).scheme in ('http', 'https'): try: _OPENER.open(url, timeout=self.source.http_timeout) except urllib2.HTTPError as ex: diff -r 4845012cfc8e -r 65ad6980976e sobjects/cwxmlparser.py --- a/sobjects/cwxmlparser.py Mon Sep 14 12:19:48 2015 +0200 +++ b/sobjects/cwxmlparser.py Tue Sep 22 09:39:30 2015 +0200 @@ -32,9 +32,10 @@ """ from datetime import datetime, time -import urlparse import urllib +from six.moves.urllib.parse import urlparse, urlunparse, parse_qs + from logilab.common.date import todate, totime from logilab.common.textutils import splitstrip, text_to_dict from logilab.common.decorators import classproperty @@ -242,9 +243,9 @@ def normalize_url(self, url): """overridden to add vid=xml if vid is not set in the qs""" url = super(CWEntityXMLParser, self).normalize_url(url) - purl = urlparse.urlparse(url) + purl = urlparse(url) if purl.scheme in ('http', 'https'): - params = urlparse.parse_qs(purl.query) + params = parse_qs(purl.query) if 'vid' not in params: params['vid'] = ['xml'] purl = list(purl) @@ -263,8 +264,8 @@ If `known_relations` is given, it should be a dictionary of already known relations, so they don't get queried again. """ - purl = urlparse.urlparse(url) - params = urlparse.parse_qs(purl.query) + purl = urlparse(url) + params = parse_qs(purl.query) if etype is None: etype = purl.path.split('/')[-1] try: @@ -278,7 +279,7 @@ relations.add('%s-%s' % (rtype, role)) purl = list(purl) purl[4] = urllib.urlencode(params, doseq=True) - return urlparse.urlunparse(purl) + return urlunparse(purl) def complete_item(self, item, rels): try: diff -r 4845012cfc8e -r 65ad6980976e sobjects/test/unittest_cwxmlparser.py --- a/sobjects/test/unittest_cwxmlparser.py Mon Sep 14 12:19:48 2015 +0200 +++ b/sobjects/test/unittest_cwxmlparser.py Tue Sep 22 09:39:30 2015 +0200 @@ -17,7 +17,8 @@ # with CubicWeb. If not, see . from datetime import datetime -from urlparse import urlsplit, parse_qsl + +from six.moves.urllib.parse import urlsplit, parse_qsl from cubicweb.devtools.testlib import CubicWebTC from cubicweb.sobjects.cwxmlparser import CWEntityXMLParser diff -r 4845012cfc8e -r 65ad6980976e test/unittest_rset.py --- a/test/unittest_rset.py Mon Sep 14 12:19:48 2015 +0200 +++ b/test/unittest_rset.py Tue Sep 22 09:39:30 2015 +0200 @@ -18,9 +18,8 @@ # with CubicWeb. If not, see . """unit tests for module cubicweb.utils""" -from urlparse import urlsplit - from six.moves import cPickle as pickle +from six.moves.urllib.parse import urlsplit from rql import parse diff -r 4845012cfc8e -r 65ad6980976e utils.py --- a/utils.py Mon Sep 14 12:19:48 2015 +0200 +++ b/utils.py Tue Sep 22 09:39:30 2015 +0200 @@ -33,9 +33,9 @@ from uuid import uuid4 from warnings import warn from threading import Lock -from urlparse import urlparse +from logging import getLogger -from logging import getLogger +from six.moves.urllib.parse import urlparse from logilab.mtconverter import xml_escape from logilab.common.deprecation import deprecated diff -r 4845012cfc8e -r 65ad6980976e web/__init__.py --- a/web/__init__.py Mon Sep 14 12:19:48 2015 +0200 +++ b/web/__init__.py Tue Sep 22 09:39:30 2015 +0200 @@ -22,8 +22,7 @@ __docformat__ = "restructuredtext en" _ = unicode -from urllib import quote as urlquote - +from six.moves.urllib.parse import quote as urlquote from logilab.common.deprecation import deprecated from cubicweb.web._exceptions import * diff -r 4845012cfc8e -r 65ad6980976e web/cors.py --- a/web/cors.py Mon Sep 14 12:19:48 2015 +0200 +++ b/web/cors.py Tue Sep 22 09:39:30 2015 +0200 @@ -14,7 +14,7 @@ """ -import urlparse +from six.moves.urllib.parse import urlsplit from cubicweb.web import LOGGER info = LOGGER.info @@ -37,7 +37,7 @@ In case of non-compliance, no CORS-related header is set. """ - base_url = urlparse.urlsplit(req.base_url()) + base_url = urlsplit(req.base_url()) expected_host = '://'.join((base_url.scheme, base_url.netloc)) if not req.get_header('Origin') or req.get_header('Origin') == expected_host: # not a CORS request, nothing to do @@ -101,7 +101,7 @@ if '*' not in allowed_origins and origin not in allowed_origins: raise CORSFailed('Origin is not allowed') # bit of sanity check; see "6.3 Security" - myhost = urlparse.urlsplit(req.base_url()).netloc + myhost = urlsplit(req.base_url()).netloc host = req.get_header('Host') if host != myhost: info('cross origin resource sharing detected possible ' diff -r 4845012cfc8e -r 65ad6980976e web/http_headers.py --- a/web/http_headers.py Mon Sep 14 12:19:48 2015 +0200 +++ b/web/http_headers.py Tue Sep 22 09:39:30 2015 +0200 @@ -6,7 +6,9 @@ from calendar import timegm import base64 import re -import urlparse + +from six.moves.urllib.parse import urlparse + def dashCapitalize(s): ''' Capitalize a string, making sure to treat - as a word seperator ''' @@ -398,7 +400,7 @@ """Ensure origin is a valid URL-base stuff, or null""" if origin == 'null': return origin - p = urlparse.urlparse(origin) + p = urlparse(origin) if p.params or p.query or p.username or p.path not in ('', '/'): raise ValueError('Incorrect Accept-Control-Allow-Origin value %s' % origin) if p.scheme not in ('http', 'https'): diff -r 4845012cfc8e -r 65ad6980976e web/request.py --- a/web/request.py Mon Sep 14 12:19:48 2015 +0200 +++ b/web/request.py Tue Sep 22 09:39:30 2015 +0200 @@ -22,16 +22,16 @@ import time import random import base64 -import urllib from StringIO import StringIO from hashlib import sha1 # pylint: disable=E0611 from Cookie import SimpleCookie from calendar import timegm from datetime import date, datetime -from urlparse import urlsplit import httplib from warnings import warn +from six.moves.urllib.parse import urlsplit, quote as urlquote + from rql.utils import rqlvar_maker from logilab.common.decorators import cached @@ -580,7 +580,7 @@ header.append('filename="%s"' % ascii_filename) if unicode_filename is not None: # encoded filename according RFC5987 - urlquoted_filename = urllib.quote(unicode_filename.encode('utf-8'), '') + urlquoted_filename = urlquote(unicode_filename.encode('utf-8'), '') header.append("filename*=utf-8''" + urlquoted_filename) self.set_header('content-disposition', ';'.join(header)) diff -r 4845012cfc8e -r 65ad6980976e web/test/unittest_views_basecontrollers.py --- a/web/test/unittest_views_basecontrollers.py Mon Sep 14 12:19:48 2015 +0200 +++ b/web/test/unittest_views_basecontrollers.py Tue Sep 22 09:39:30 2015 +0200 @@ -17,12 +17,7 @@ # with CubicWeb. If not, see . """cubicweb.web.views.basecontrollers unit tests""" -from urlparse import urlsplit, urlunsplit, urljoin -# parse_qs is deprecated in cgi and has been moved to urlparse in Python 2.6 -try: - from urlparse import parse_qs as url_parse_query -except ImportError: - from cgi import parse_qs as url_parse_query +from six.moves.urllib.parse import urlsplit, urlunsplit, urljoin, parse_qs import lxml @@ -1042,7 +1037,7 @@ """ with self.admin_access.web_request() as req: scheme, netloc, path, query, fragment = urlsplit(url) - query_dict = url_parse_query(query) + query_dict = parse_qs(query) expected_url = urljoin(req.base_url(), expected_path) self.assertEqual( urlunsplit((scheme, netloc, path, None, None)), expected_url) diff -r 4845012cfc8e -r 65ad6980976e wsgi/request.py --- a/wsgi/request.py Mon Sep 14 12:19:48 2015 +0200 +++ b/wsgi/request.py Tue Sep 22 09:39:30 2015 +0200 @@ -28,7 +28,8 @@ import tempfile from StringIO import StringIO -from urlparse import parse_qs + +from six.moves.urllib.parse import parse_qs from cubicweb.multipart import ( copy_file, parse_form_data, parse_options_header)