# HG changeset patch # User RĂ©mi Cardona # Date 1442225988 -7200 # Node ID 4845012cfc8e6610a1d686e33753d2d043745f31 # Parent 73c1c9cf6bda67ff5897471b0f204ccf3db61f91 [py3k] import 'pickle' using six.moves diff -r 73c1c9cf6bda -r 4845012cfc8e __init__.py --- a/__init__.py Wed Sep 16 14:45:15 2015 +0200 +++ b/__init__.py Mon Sep 14 12:19:48 2015 +0200 @@ -22,7 +22,6 @@ # ignore the pygments UserWarnings import warnings -import cPickle import zlib warnings.filterwarnings('ignore', category=UserWarning, message='.*was already imported', @@ -40,6 +39,8 @@ import sys, os, logging from StringIO import StringIO +from six.moves import cPickle as pickle + from logilab.common.deprecation import deprecated from logilab.common.logging_ext import set_log_methods from yams.constraints import BASE_CONVERTERS, BASE_CHECKERS @@ -132,12 +133,12 @@ def zpickle(cls, obj): """ return a Binary containing a gzipped pickle of obj """ retval = cls() - retval.write(zlib.compress(cPickle.dumps(obj, protocol=2))) + retval.write(zlib.compress(pickle.dumps(obj, protocol=2))) return retval def unzpickle(self): """ decompress and loads the stream before returning it """ - return cPickle.loads(zlib.decompress(self.getvalue())) + return pickle.loads(zlib.decompress(self.getvalue())) def check_password(eschema, value): diff -r 73c1c9cf6bda -r 4845012cfc8e crypto.py --- a/crypto.py Wed Sep 16 14:45:15 2015 +0200 +++ b/crypto.py Mon Sep 14 12:19:48 2015 +0200 @@ -18,9 +18,10 @@ """Simple cryptographic routines, based on python-crypto.""" __docformat__ = "restructuredtext en" -from pickle import dumps, loads from base64 import b64encode, b64decode +from six.moves import cPickle as pickle + from Crypto.Cipher import Blowfish @@ -34,7 +35,7 @@ def encrypt(data, seed): - string = dumps(data) + string = pickle.dumps(data) string = string + '*' * (8 - len(string) % 8) string = b64encode(_cypherer(seed).encrypt(string)) return unicode(string) @@ -43,4 +44,4 @@ def decrypt(string, seed): # pickle ignores trailing characters so we do not need to strip them off string = _cypherer(seed).decrypt(b64decode(string)) - return loads(string) + return pickle.loads(string) diff -r 73c1c9cf6bda -r 4845012cfc8e dataimport/pgstore.py --- a/dataimport/pgstore.py Wed Sep 16 14:45:15 2015 +0200 +++ b/dataimport/pgstore.py Mon Sep 14 12:19:48 2015 +0200 @@ -20,7 +20,6 @@ import threading import warnings -import cPickle import os.path as osp from StringIO import StringIO from time import asctime @@ -28,6 +27,8 @@ from collections import defaultdict from base64 import b64encode +from six.moves import cPickle as pickle + from cubicweb.utils import make_uid from cubicweb.server.sqlutils import SQL_PREFIX from cubicweb.dataimport.stores import NoHookRQLObjectStore @@ -110,7 +111,7 @@ try: with open(osp.join(dump_output_dir, '%s.pickle' % filename), 'wb') as fobj: - cPickle.dump(pdata, fobj) + pickle.dump(pdata, fobj) except IOError: print('ERROR while pickling in', dump_output_dir, filename+'.pickle') cnx.rollback() diff -r 73c1c9cf6bda -r 4845012cfc8e devtools/__init__.py --- a/devtools/__init__.py Wed Sep 16 14:45:15 2015 +0200 +++ b/devtools/__init__.py Mon Sep 14 12:19:48 2015 +0200 @@ -25,7 +25,6 @@ import errno import logging import shutil -import pickle import glob import subprocess import warnings @@ -36,6 +35,8 @@ from os.path import (abspath, realpath, join, exists, split, isabs, isdir) from functools import partial +from six.moves import cPickle as pickle + from logilab.common.date import strptime from logilab.common.decorators import cached, clear_cache diff -r 73c1c9cf6bda -r 4845012cfc8e server/sources/native.py --- a/server/sources/native.py Wed Sep 16 14:45:15 2015 +0200 +++ b/server/sources/native.py Mon Sep 14 12:19:48 2015 +0200 @@ -27,8 +27,6 @@ __docformat__ = "restructuredtext en" -from cPickle import loads, dumps -import cPickle as pickle from threading import Lock from datetime import datetime from base64 import b64encode @@ -40,6 +38,8 @@ import logging import sys +from six.moves import cPickle as pickle + from logilab.common.decorators import cached, clear_cache from logilab.common.configuration import Method from logilab.common.shellutils import getlogin @@ -623,7 +623,7 @@ changes = self._save_attrs(cnx, entity, attrs) self._record_tx_action(cnx, 'tx_entity_actions', u'U', etype=unicode(entity.cw_etype), eid=entity.eid, - changes=self._binary(dumps(changes))) + changes=self._binary(pickle.dumps(changes))) sql = self.sqlgen.update(SQL_PREFIX + entity.cw_etype, attrs, ['cw_eid']) self.doexec(cnx, sql, attrs) @@ -638,7 +638,7 @@ changes = self._save_attrs(cnx, entity, attrs) self._record_tx_action(cnx, 'tx_entity_actions', u'D', etype=unicode(entity.cw_etype), eid=entity.eid, - changes=self._binary(dumps(changes))) + changes=self._binary(pickle.dumps(changes))) attrs = {'cw_eid': entity.eid} sql = self.sqlgen.delete(SQL_PREFIX + entity.cw_etype, attrs) self.doexec(cnx, sql, attrs) @@ -1044,7 +1044,7 @@ 'etype', 'eid', 'changes')) with cnx.ensure_cnx_set: cu = self.doexec(cnx, sql, restr) - actions = [tx.EntityAction(a,p,o,et,e,c and loads(self.binary_to_str(c))) + actions = [tx.EntityAction(a,p,o,et,e,c and pickle.loads(self.binary_to_str(c))) for a,p,o,et,e,c in cu.fetchall()] sql = self.sqlgen.select('tx_relation_actions', restr, ('txa_action', 'txa_public', 'txa_order', @@ -1708,7 +1708,7 @@ return tuple(columns), rows def _serialize(self, name, columns, rows): - return dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL) + return pickle.dumps((name, columns, rows), pickle.HIGHEST_PROTOCOL) def restore(self, backupfile): archive = zipfile.ZipFile(backupfile, 'r', allowZip64=True) @@ -1756,7 +1756,7 @@ return sequences, numranges, tables, table_chunks def read_sequence(self, archive, seq): - seqname, columns, rows = loads(archive.read('sequences/%s' % seq)) + seqname, columns, rows = pickle.loads(archive.read('sequences/%s' % seq)) assert seqname == seq assert len(rows) == 1 assert len(rows[0]) == 1 @@ -1766,7 +1766,7 @@ self.cnx.commit() def read_numrange(self, archive, numrange): - rangename, columns, rows = loads(archive.read('numrange/%s' % numrange)) + rangename, columns, rows = pickle.loads(archive.read('numrange/%s' % numrange)) assert rangename == numrange assert len(rows) == 1 assert len(rows[0]) == 1 @@ -1781,7 +1781,7 @@ self.cnx.commit() row_count = 0 for filename in filenames: - tablename, columns, rows = loads(archive.read(filename)) + tablename, columns, rows = pickle.loads(archive.read(filename)) assert tablename == table if not rows: continue diff -r 73c1c9cf6bda -r 4845012cfc8e test/unittest_rset.py --- a/test/unittest_rset.py Wed Sep 16 14:45:15 2015 +0200 +++ b/test/unittest_rset.py Mon Sep 14 12:19:48 2015 +0200 @@ -19,7 +19,8 @@ """unit tests for module cubicweb.utils""" from urlparse import urlsplit -import pickle + +from six.moves import cPickle as pickle from rql import parse @@ -100,7 +101,9 @@ def test_pickle(self): del self.rset.req - self.assertEqual(len(pickle.dumps(self.rset)), 376) + # 373 for python 2.7's cPickle + # 376 for the old python pickle implementation + self.assertIn(len(pickle.dumps(self.rset)), (373, 376)) def test_build_url(self): with self.admin_access.web_request() as req: