[py3k] dict.iteritems → dict.items
authorRémi Cardona <remi.cardona@logilab.fr>
Tue, 15 Sep 2015 16:15:03 +0200
changeset 10662 10942ed172de
parent 10661 e6eb0c7c2e98
child 10663 54b8a1f249fb
[py3k] dict.iteritems → dict.items
cwconfig.py
cwctl.py
cwvreg.py
dataimport/pgstore.py
dataimport/stores.py
devtools/__init__.py
devtools/devctl.py
devtools/fake.py
devtools/htmlparser.py
devtools/instrument.py
devtools/repotest.py
devtools/testlib.py
doc/tutorials/dataimport/diseasome_import.py
entities/adapters.py
entities/sources.py
entities/test/unittest_base.py
entity.py
etwist/request.py
ext/test/unittest_rest.py
hooks/__init__.py
hooks/integrity.py
hooks/synccomputed.py
hooks/test/unittest_synccomputed.py
migration.py
misc/scripts/ldapuser2ldapfeed.py
multipart.py
req.py
rqlrewrite.py
rset.py
schema.py
server/cwzmq.py
server/edition.py
server/hook.py
server/migractions.py
server/querier.py
server/repository.py
server/rqlannotation.py
server/schemaserial.py
server/serverctl.py
server/sources/datafeed.py
server/sources/ldapfeed.py
server/sources/native.py
server/sources/rql2sql.py
server/sqlutils.py
server/ssplanner.py
server/test/unittest_ldapsource.py
server/test/unittest_migractions.py
setup.py
skeleton/setup.py
sobjects/cwxmlparser.py
sobjects/ldapparser.py
sobjects/notification.py
sobjects/services.py
spa2rql.py
uilib.py
utils.py
web/form.py
web/htmlwidgets.py
web/http_headers.py
web/propertysheet.py
web/request.py
web/schemaviewer.py
web/test/data/views.py
web/views/ajaxcontroller.py
web/views/cwproperties.py
web/views/cwsources.py
web/views/debug.py
web/views/editcontroller.py
web/views/facets.py
web/views/forms.py
web/views/management.py
web/views/reledit.py
web/views/schema.py
web/views/tableview.py
web/views/treeview.py
web/views/uicfg.py
web/views/undohistory.py
wsgi/request.py
--- a/cwconfig.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/cwconfig.py	Tue Sep 15 16:15:03 2015 +0200
@@ -506,7 +506,7 @@
                 deps = {}
             else:
                 deps = dict( (x[len('cubicweb-'):], v)
-                             for x, v in gendeps.iteritems()
+                             for x, v in gendeps.items()
                              if x.startswith('cubicweb-'))
         for depcube in deps:
             try:
--- a/cwctl.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/cwctl.py	Tue Sep 15 16:15:03 2015 +0200
@@ -773,7 +773,7 @@
         vcconf = repo.get_versions()
         helper = self.config_helper(config, required=False)
         if self.config.force_cube_version:
-            for cube, version in self.config.force_cube_version.iteritems():
+            for cube, version in self.config.force_cube_version.items():
                 vcconf[cube] = Version(version)
         toupgrade = []
         for cube in config.cubes():
@@ -1049,7 +1049,7 @@
     def configure_instance(self, appid):
         if self.config.param is not None:
             appcfg = cwcfg.config_for(appid)
-            for key, value in self.config.param.iteritems():
+            for key, value in self.config.param.items():
                 try:
                     appcfg.global_set_option(key, value)
                 except KeyError:
--- a/cwvreg.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/cwvreg.py	Tue Sep 15 16:15:03 2015 +0200
@@ -382,7 +382,7 @@
         return [item for item in super(CWRegistryStore, self).items()
                 if not item[0] in ('propertydefs', 'propertyvalues')]
     def iteritems(self):
-        return (item for item in super(CWRegistryStore, self).iteritems()
+        return (item for item in super(CWRegistryStore, self).items()
                 if not item[0] in ('propertydefs', 'propertyvalues'))
 
     def values(self):
@@ -576,7 +576,7 @@
         if withsitewide:
             return sorted(k for k in self['propertydefs']
                           if not k.startswith('sources.'))
-        return sorted(k for k, kd in self['propertydefs'].iteritems()
+        return sorted(k for k, kd in self['propertydefs'].items()
                       if not kd['sitewide'] and not k.startswith('sources.'))
 
     def register_property(self, key, type, help, default=None, vocabulary=None,
--- a/dataimport/pgstore.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/dataimport/pgstore.py	Tue Sep 15 16:15:03 2015 +0200
@@ -348,7 +348,7 @@
             # In that case, simply update the insert dict and remove
             # the need to make the
             # UPDATE statement
-            for statement, datalist in _inlined_relations_sql.iteritems():
+            for statement, datalist in _inlined_relations_sql.items():
                 new_datalist = []
                 # for a given inlined relation,
                 # browse each couple to be inserted
--- a/dataimport/stores.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/dataimport/stores.py	Tue Sep 15 16:15:03 2015 +0200
@@ -168,7 +168,7 @@
         """Given an entity type, attributes and inlined relations, returns the inserted entity's
         eid.
         """
-        for k, v in kwargs.iteritems():
+        for k, v in kwargs.items():
             kwargs[k] = getattr(v, 'eid', v)
         entity, rels = self.metagen.base_etype_dicts(etype)
         # make a copy to keep cached entity pristine
@@ -183,7 +183,7 @@
         kwargs = dict()
         if inspect.getargspec(self.add_relation).keywords:
             kwargs['subjtype'] = entity.cw_etype
-        for rtype, targeteids in rels.iteritems():
+        for rtype, targeteids in rels.items():
             # targeteids may be a single eid or a list of eids
             inlined = self.rschema(rtype).inlined
             try:
--- a/devtools/__init__.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/devtools/__init__.py	Tue Sep 15 16:15:03 2015 +0200
@@ -465,7 +465,7 @@
             dbname, data = data.split('-', 1)
             db_id, filetype = data.split('.', 1)
             entries.setdefault((dbname, db_id), {})[filetype] = filepath
-        for (dbname, db_id), entry in entries.iteritems():
+        for (dbname, db_id), entry in entries.items():
             # apply necessary transformation from the driver
             value = self.process_cache_entry(directory, dbname, db_id, entry)
             assert 'config' in entry
--- a/devtools/devctl.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/devtools/devctl.py	Tue Sep 15 16:15:03 2015 +0200
@@ -733,7 +733,7 @@
                 except Exception as exc:
                     sys.stderr.write('Line %s: %s (%s)\n' % (lineno, exc, line))
         stat = []
-        for rql, times in requests.iteritems():
+        for rql, times in requests.items():
             stat.append( (sum(time[0] for time in times),
                           sum(time[1] for time in times),
                           len(times), rql) )
--- a/devtools/fake.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/devtools/fake.py	Tue Sep 15 16:15:03 2015 +0200
@@ -112,7 +112,7 @@
     def build_url_params(self, **kwargs):
         # overriden to get predictable resultts
         args = []
-        for param, values in sorted(kwargs.iteritems()):
+        for param, values in sorted(kwargs.items()):
             if not isinstance(values, (list, tuple)):
                 values = (values,)
             for value in values:
--- a/devtools/htmlparser.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/devtools/htmlparser.py	Tue Sep 15 16:15:03 2015 +0200
@@ -209,7 +209,7 @@
     def matching_nodes(self, tag, **attrs):
         for elt in self.etree.iterfind(self._iterstr(tag)):
             eltattrs  = elt.attrib
-            for attr, value in attrs.iteritems():
+            for attr, value in attrs.items():
                 try:
                     if eltattrs[attr] != value:
                         break
--- a/devtools/instrument.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/devtools/instrument.py	Tue Sep 15 16:15:03 2015 +0200
@@ -186,7 +186,7 @@
 
     def add_colors_legend(self, graph):
         """Add a legend of used colors to the graph."""
-        for package, color in sorted(_COLORS.iteritems()):
+        for package, color in sorted(_COLORS.items()):
             graph.add_node(package, color=color, fontcolor=color, shape='record')
 
 
--- a/devtools/repotest.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/devtools/repotest.py	Tue Sep 15 16:15:03 2015 +0200
@@ -307,8 +307,8 @@
     variantes = _orig_build_variantes(self, newsolutions)
     sortedvariantes = []
     for variante in variantes:
-        orderedkeys = sorted((k[1], k[2], v) for k, v in variante.iteritems())
-        variante = DumbOrderedDict(sorted(variante.iteritems(),
+        orderedkeys = sorted((k[1], k[2], v) for k, v in variante.items())
+        variante = DumbOrderedDict(sorted(variante.items(),
                                           lambda a, b: cmp((a[0][1],a[0][2],a[1]),
                                                            (b[0][1],b[0][2],b[1]))))
         sortedvariantes.append( (orderedkeys, variante) )
@@ -319,7 +319,7 @@
 
 def _check_permissions(*args, **kwargs):
     res, restricted = _orig_check_permissions(*args, **kwargs)
-    res = DumbOrderedDict(sorted(res.iteritems(), lambda a, b: cmp(a[1], b[1])))
+    res = DumbOrderedDict(sorted(res.items(), lambda a, b: cmp(a[1], b[1])))
     return res, restricted
 
 def _dummy_check_permissions(self, rqlst):
--- a/devtools/testlib.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/devtools/testlib.py	Tue Sep 15 16:15:03 2015 +0200
@@ -521,10 +521,10 @@
         similar to `orig_permissions.update(partial_perms)`.
         """
         torestore = []
-        for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.iteritems()):
+        for erschema, etypeperms in chain(perm_overrides, perm_kwoverrides.items()):
             if isinstance(erschema, string_types):
                 erschema = self.schema[erschema]
-            for action, actionperms in etypeperms.iteritems():
+            for action, actionperms in etypeperms.items():
                 origperms = erschema.permissions[action]
                 erschema.set_action_permissions(action, actionperms)
                 torestore.append([erschema, action, origperms])
@@ -887,7 +887,7 @@
         }
     # maps vid : validator name (override content_type_validators)
     vid_validators = dict((vid, htmlparser.VALMAP[valkey])
-                          for vid, valkey in VIEW_VALIDATORS.iteritems())
+                          for vid, valkey in VIEW_VALIDATORS.items())
 
 
     def view(self, vid, rset=None, req=None, template='main-template',
@@ -1106,7 +1106,7 @@
     # new num for etype = max(current num, sum(num for possible target etypes))
     #
     # XXX we should first check there is no cycle then propagate changes
-    for (rschema, etype), targets in relmap.iteritems():
+    for (rschema, etype), targets in relmap.items():
         relfactor = sum(howmanydict[e] for e in targets)
         howmanydict[str(etype)] = max(relfactor, howmanydict[etype])
     return howmanydict
@@ -1314,7 +1314,7 @@
 
 
 # def print_untested_objects(testclass, skipregs=('hooks', 'etypes')):
-#     for regname, reg in testclass._env.vreg.iteritems():
+#     for regname, reg in testclass._env.vreg.items():
 #         if regname in skipregs:
 #             continue
 #         for appobjects in reg.itervalues():
--- a/doc/tutorials/dataimport/diseasome_import.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/doc/tutorials/dataimport/diseasome_import.py	Tue Sep 15 16:15:03 2015 +0200
@@ -95,7 +95,7 @@
     # Perform a first commit, of the entities
     store.flush()
     kwargs = {}
-    for uri, relations in all_relations.iteritems():
+    for uri, relations in all_relations.items():
         from_eid = uri_to_eid.get(uri)
         # ``subjtype`` should be initialized if ``SQLGenObjectStore`` is used
         # and there are inlined relations in the schema.
@@ -108,7 +108,7 @@
         kwargs['subjtype'] = uri_to_etype.get(uri)
         if not from_eid:
             continue
-        for rtype, rels in relations.iteritems():
+        for rtype, rels in relations.items():
             if rtype in ('classes', 'possible_drugs', 'omim', 'omim_page',
                          'chromosomal_location', 'same_as', 'gene_id',
                          'hgnc_id', 'hgnc_page'):
--- a/entities/adapters.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/entities/adapters.py	Tue Sep 15 16:15:03 2015 +0200
@@ -162,7 +162,7 @@
         return words
 
 def merge_weight_dict(maindict, newdict):
-    for weight, words in newdict.iteritems():
+    for weight, words in newdict.items():
         maindict.setdefault(weight, []).extend(words)
 
 class IDownloadableAdapter(view.EntityAdapter):
--- a/entities/sources.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/entities/sources.py	Tue Sep 15 16:15:03 2015 +0200
@@ -42,7 +42,7 @@
         cfg.update(config)
         options = SOURCE_TYPES[self.type].options
         sconfig = SourceConfiguration(self._cw.vreg.config, options=options)
-        for opt, val in cfg.iteritems():
+        for opt, val in cfg.items():
             try:
                 sconfig.set_option(opt, val)
             except OptionError:
--- a/entities/test/unittest_base.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/entities/test/unittest_base.py	Tue Sep 15 16:15:03 2015 +0200
@@ -60,7 +60,7 @@
         # XXX move to yams
         self.assertEqual(self.schema['CWUser'].meta_attributes(), {})
         self.assertEqual(dict((str(k), v)
-                              for k, v in self.schema['State'].meta_attributes().iteritems()),
+                              for k, v in self.schema['State'].meta_attributes().items()),
                           {'description_format': ('format', 'description')})
 
     def test_fti_rql_method(self):
--- a/entity.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/entity.py	Tue Sep 15 16:15:03 2015 +0200
@@ -108,7 +108,7 @@
     """
     st = cstr.snippet_rqlst.copy()
     # replace relations in ST by eid infos from linkto where possible
-    for (info_rtype, info_role), eids in lt_infos.iteritems():
+    for (info_rtype, info_role), eids in lt_infos.items():
         eid = eids[0] # NOTE: we currently assume a pruned lt_info with only 1 eid
         for rel in st.iget_nodes(RqlRelation):
             targetvar = rel_matches(rel, info_rtype, info_role, evar.name)
@@ -135,7 +135,7 @@
 
 def pruned_lt_info(eschema, lt_infos):
     pruned = {}
-    for (lt_rtype, lt_role), eids in lt_infos.iteritems():
+    for (lt_rtype, lt_role), eids in lt_infos.items():
         # we can only use lt_infos describing relation with a cardinality
         # of value 1 towards the linked entity
         if not len(eids) == 1:
--- a/etwist/request.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/etwist/request.py	Tue Sep 15 16:15:03 2015 +0200
@@ -31,7 +31,7 @@
         self._twreq = req
         super(CubicWebTwistedRequestAdapter, self).__init__(
             vreg, https, req.args, headers=req.received_headers)
-        for key, name_stream_list in req.files.iteritems():
+        for key, name_stream_list in req.files.items():
             for name, stream in name_stream_list:
                 if name is not None:
                     name = unicode(name, self.encoding)
--- a/ext/test/unittest_rest.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/ext/test/unittest_rest.py	Tue Sep 15 16:15:03 2015 +0200
@@ -221,7 +221,7 @@
    %(rql)s
                 """ % {'rql': rql,
                        'colvids': ', '.join(["%d=%s" % (k, v)
-                                             for k, v in colvids.iteritems()])
+                                             for k, v in colvids.items()])
                    })
             view = self.vreg['views'].select('table', req, rset=req.execute(rql))
             view.cellvids = colvids
--- a/hooks/__init__.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/hooks/__init__.py	Tue Sep 15 16:15:03 2015 +0200
@@ -52,7 +52,7 @@
         def update_feeds(repo):
             # take a list to avoid iterating on a dictionary whose size may
             # change
-            for uri, source in list(repo.sources_by_uri.iteritems()):
+            for uri, source in list(repo.sources_by_uri.items()):
                 if (uri == 'system'
                     or not repo.config.source_enabled(source)
                     or not source.config['synchronize']):
@@ -72,7 +72,7 @@
 
     def __call__(self):
         def expire_dataimports(repo=self.repo):
-            for uri, source in repo.sources_by_uri.iteritems():
+            for uri, source in repo.sources_by_uri.items():
                 if (uri == 'system'
                     or not repo.config.source_enabled(source)):
                     continue
--- a/hooks/integrity.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/hooks/integrity.py	Tue Sep 15 16:15:03 2015 +0200
@@ -247,7 +247,7 @@
     def __call__(self):
         entity = self.entity
         eschema = entity.e_schema
-        for attr, val in entity.cw_edited.iteritems():
+        for attr, val in entity.cw_edited.items():
             if eschema.subjrels[attr].final and eschema.has_unique_values(attr):
                 if val is None:
                     continue
@@ -286,7 +286,7 @@
         entity = self.entity
         metaattrs = entity.e_schema.meta_attributes()
         edited = entity.cw_edited
-        for metaattr, (metadata, attr) in metaattrs.iteritems():
+        for metaattr, (metadata, attr) in metaattrs.items():
             if metadata == 'format' and attr in edited:
                 try:
                     value = edited[attr]
--- a/hooks/synccomputed.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/hooks/synccomputed.py	Tue Sep 15 16:15:03 2015 +0200
@@ -40,7 +40,7 @@
             self._container[computed_attribute] = set((eid,))
 
     def precommit_event(self):
-        for computed_attribute_rdef, eids in self.get_data().iteritems():
+        for computed_attribute_rdef, eids in self.get_data().items():
             attr = computed_attribute_rdef.rtype
             formula  = computed_attribute_rdef.formula
             select = self.cnx.repo.vreg.rqlhelper.parse(formula).children[0]
@@ -110,7 +110,7 @@
 
     def __call__(self):
         edited_attributes = frozenset(self.entity.cw_edited)
-        for rdef, used_attributes in self.attributes_computed_attributes.iteritems():
+        for rdef, used_attributes in self.attributes_computed_attributes.items():
             if edited_attributes.intersection(used_attributes):
                 # XXX optimize if the modified attributes belong to the same
                 # entity as the computed attribute
@@ -178,7 +178,7 @@
                             self.computed_attribute_by_relation[depend_on_rdef].append(rdef)
 
     def generate_entity_creation_hooks(self):
-        for etype, computed_attributes in self.computed_attribute_by_etype.iteritems():
+        for etype, computed_attributes in self.computed_attribute_by_etype.items():
             regid = 'computed_attribute.%s_created' % etype
             selector = hook.is_instance(etype)
             yield type('%sCreatedHook' % etype,
@@ -188,7 +188,7 @@
                         'computed_attributes': computed_attributes})
 
     def generate_relation_change_hooks(self):
-        for rdef, computed_attributes in self.computed_attribute_by_relation.iteritems():
+        for rdef, computed_attributes in self.computed_attribute_by_relation.items():
             regid = 'computed_attribute.%s_modified' % rdef.rtype
             selector = hook.match_rtype(rdef.rtype.type,
                                         frometypes=(rdef.subject.type,),
@@ -206,7 +206,7 @@
                         'optimized_computed_attributes': optimized_computed_attributes})
 
     def generate_entity_update_hooks(self):
-        for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.iteritems():
+        for etype, attributes_computed_attributes in self.computed_attribute_by_etype_attrs.items():
             regid = 'computed_attribute.%s_updated' % etype
             selector = hook.is_instance(etype)
             yield type('%sModifiedHook' % etype,
--- a/hooks/test/unittest_synccomputed.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/hooks/test/unittest_synccomputed.py	Tue Sep 15 16:15:03 2015 +0200
@@ -62,7 +62,7 @@
     def test_computed_attribute_by_relation(self):
         comp_by_rdef = self.dependencies.computed_attribute_by_relation
         self.assertEqual(len(comp_by_rdef), 1)
-        key, values = iter(comp_by_rdef.iteritems()).next()
+        key, values = iter(comp_by_rdef.items()).next()
         self.assertEqual(key.rtype, 'works_for')
         self.assertEqual(len(values), 1)
         self.assertEqual(values[0].rtype, 'total_salary')
@@ -73,7 +73,7 @@
         values = comp_by_attr['Person']
         self.assertEqual(len(values), 2)
         values = set((rdef.formula, tuple(v))
-                     for rdef, v in values.iteritems())
+                     for rdef, v in values.items())
         self.assertEquals(values,
                           set((('Any 2014 - D WHERE X birth_year D', tuple(('birth_year',))),
                                ('Any SUM(SA) GROUPBY X WHERE P works_for X, P salary SA', tuple(('salary',)))))
--- a/migration.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/migration.py	Tue Sep 15 16:15:03 2015 +0200
@@ -493,8 +493,8 @@
             self.dependencies[cube] = dict(self.config.cube_dependencies(cube))
             self.dependencies[cube]['cubicweb'] = self.config.cube_depends_cubicweb_version(cube)
         # compute reverse dependencies
-        for cube, dependencies in self.dependencies.iteritems():
-            for name, constraint in dependencies.iteritems():
+        for cube, dependencies in self.dependencies.items():
+            for name, constraint in dependencies.items():
                 self.reverse_dependencies.setdefault(name,set())
                 if constraint:
                     try:
--- a/misc/scripts/ldapuser2ldapfeed.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/misc/scripts/ldapuser2ldapfeed.py	Tue Sep 15 16:15:03 2015 +0200
@@ -80,7 +80,7 @@
     pprint(duplicates)
 
 print(len(todelete), 'entities will be deleted')
-for etype, entities in todelete.iteritems():
+for etype, entities in todelete.items():
     print('deleting', etype, [e.login for e in entities])
     system_source.delete_info_multi(session, entities, source_name)
 
--- a/multipart.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/multipart.py	Tue Sep 15 16:15:03 2015 +0200
@@ -61,7 +61,7 @@
     """ A dict that remembers old values for each key """
     def __init__(self, *a, **k):
         self.dict = dict()
-        for k, v in dict(*a, **k).iteritems():
+        for k, v in dict(*a, **k).items():
             self[k] = v
 
     def __len__(self): return len(self.dict)
@@ -82,7 +82,7 @@
         return self.dict[key][index]
 
     def iterallitems(self):
-        for key, values in self.dict.iteritems():
+        for key, values in self.dict.items():
             for value in values:
                 yield key, value
 
@@ -400,7 +400,7 @@
             if stream.read(1): # These is more that does not fit mem_limit
                 raise MultipartError("Request to big. Increase MAXMEM.")
             data = parse_qs(data, keep_blank_values=True)
-            for key, values in data.iteritems():
+            for key, values in data.items():
                 for value in values:
                     forms[key] = value.decode(charset)
         else:
--- a/req.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/req.py	Tue Sep 15 16:15:03 2015 +0200
@@ -299,7 +299,7 @@
     def build_url_params(self, **kwargs):
         """return encoded params to incorporate them in a URL"""
         args = []
-        for param, values in kwargs.iteritems():
+        for param, values in kwargs.items():
             if not isinstance(values, (list, tuple)):
                 values = (values,)
             for value in values:
@@ -352,7 +352,7 @@
         schema, netloc, path, query, fragment = urlsplit(url)
         query = parse_qs(query)
         # sort for testing predictability
-        for key, val in sorted(newparams.iteritems()):
+        for key, val in sorted(newparams.items()):
             query[key] = (self.url_quote(val),)
         query = '&'.join(u'%s=%s' % (param, value)
                          for param, values in sorted(query.items())
--- a/rqlrewrite.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/rqlrewrite.py	Tue Sep 15 16:15:03 2015 +0200
@@ -56,7 +56,7 @@
     eschema = schema.eschema
     allpossibletypes = {}
     for solution in solutions:
-        for varname, etype in solution.iteritems():
+        for varname, etype in solution.items():
             # XXX not considering aliases by design, right ?
             if varname not in newroot.defined_vars or eschema(etype).final:
                 continue
@@ -332,7 +332,7 @@
             union.replace(select, newselect)
         elif not () in localchecks:
             union.remove(select)
-        for lcheckdef, lchecksolutions in localchecks.iteritems():
+        for lcheckdef, lchecksolutions in localchecks.items():
             if not lcheckdef:
                 continue
             myrqlst = select.copy(solutions=lchecksolutions)
@@ -648,7 +648,7 @@
         # insert "is" where necessary
         varexistsmap = {}
         self.removing_ambiguity = True
-        for (erqlexpr, varmap, oldvarname), etype in variantes[0].iteritems():
+        for (erqlexpr, varmap, oldvarname), etype in variantes[0].items():
             varname = self.rewritten[(erqlexpr, varmap, oldvarname)]
             var = self.select.defined_vars[varname]
             exists = var.references()[0].scope
@@ -657,7 +657,7 @@
         # insert ORED exists where necessary
         for variante in variantes[1:]:
             self.insert_snippets(snippets, varexistsmap)
-            for key, etype in variante.iteritems():
+            for key, etype in variante.items():
                 varname = self.rewritten[key]
                 try:
                     var = self.select.defined_vars[varname]
@@ -676,7 +676,7 @@
         variantes = set()
         for sol in newsolutions:
             variante = []
-            for key, newvar in self.rewritten.iteritems():
+            for key, newvar in self.rewritten.items():
                 variante.append( (key, sol[newvar]) )
             variantes.add(tuple(variante))
         # rebuild variantes as dict
--- a/rset.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/rset.py	Tue Sep 15 16:15:03 2015 +0200
@@ -103,7 +103,7 @@
         if self._rsetactions is None:
             self._rsetactions = {}
         if kwargs:
-            key = tuple(sorted(kwargs.iteritems()))
+            key = tuple(sorted(kwargs.items()))
         else:
             key = None
         try:
@@ -509,9 +509,9 @@
             eschema = entity.e_schema
             eid_col, attr_cols, rel_cols = self._rset_structure(eschema, col)
             entity.eid = rowvalues[eid_col]
-            for attr, col_idx in attr_cols.iteritems():
+            for attr, col_idx in attr_cols.items():
                 entity.cw_attr_cache[attr] = rowvalues[col_idx]
-            for (rtype, role), col_idx in rel_cols.iteritems():
+            for (rtype, role), col_idx in rel_cols.items():
                 value = rowvalues[col_idx]
                 if value is None:
                     if role == 'subject':
--- a/schema.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/schema.py	Tue Sep 15 16:15:03 2015 +0200
@@ -329,7 +329,7 @@
         """
         creating = kwargs.get('creating')
         if not creating and self.eid is not None:
-            key = (self.eid, tuple(sorted(kwargs.iteritems())))
+            key = (self.eid, tuple(sorted(kwargs.items())))
             try:
                 return _cw.local_perm_cache[key]
             except KeyError:
@@ -717,7 +717,7 @@
 def check_permission_definitions(self):
     orig_check_permission_definitions(self)
     schema = self.subject.schema
-    for action, groups in self.permissions.iteritems():
+    for action, groups in self.permissions.items():
         for group_or_rqlexpr in groups:
             if action == 'read' and \
                    isinstance(group_or_rqlexpr, RQLExpression):
--- a/server/cwzmq.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/cwzmq.py	Tue Sep 15 16:15:03 2015 +0200
@@ -65,7 +65,7 @@
 
     def add_subscriber(self, address):
         subscriber = Subscriber(self.ioloop, address)
-        for topic, callback in self._topics.iteritems():
+        for topic, callback in self._topics.items():
             subscriber.subscribe(topic, callback)
         self._subscribers.append(subscriber)
 
--- a/server/edition.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/edition.py	Tue Sep 15 16:15:03 2015 +0200
@@ -96,7 +96,7 @@
             setitem = self.__setitem__
         else:
             setitem = self.edited_attribute
-        for attr, value in values.iteritems():
+        for attr, value in values.items():
             setitem(attr, value)
 
     def edited_attribute(self, attr, value):
--- a/server/hook.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/hook.py	Tue Sep 15 16:15:03 2015 +0200
@@ -372,7 +372,7 @@
         pruned = set()
         cnx.pruned_hooks_cache[cache_key] = pruned
         if look_for_selector is not None:
-            for id, hooks in self.iteritems():
+            for id, hooks in self.items():
                 for hook in hooks:
                     enabled_cat, main_filter = hook.filterable_selectors()
                     if enabled_cat is not None:
--- a/server/migractions.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/migractions.py	Tue Sep 15 16:15:03 2015 +0200
@@ -222,7 +222,7 @@
                 format_file.write('%s\n' % format)
             with open(osp.join(tmpdir, 'versions.txt'), 'w') as version_file:
                 versions = repo.get_versions()
-                for cube, version in versions.iteritems():
+                for cube, version in versions.items():
                     version_file.write('%s %s\n' % (cube, version))
             if not failed:
                 bkup = tarfile.open(backupfile, 'w|gz')
--- a/server/querier.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/querier.py	Tue Sep 15 16:15:03 2015 +0200
@@ -66,7 +66,7 @@
 def check_no_password_selected(rqlst):
     """check that Password entities are not selected"""
     for solution in rqlst.solutions:
-        for var, etype in solution.iteritems():
+        for var, etype in solution.items():
             if etype == 'Password':
                 raise Unauthorized('Password selection is not allowed (%s)' % var)
 
@@ -290,7 +290,7 @@
                 newsolutions.append(solution)
                 # try to benefit of rqlexpr.check cache for entities which
                 # are specified by eid in query'args
-                for varname, eid in varkwargs.iteritems():
+                for varname, eid in varkwargs.items():
                     try:
                         rqlexprs = localcheck.pop(varname)
                     except KeyError:
@@ -308,7 +308,7 @@
                 # mark variables protected by an rql expression
                 restricted_vars.update(localcheck)
                 # turn local check into a dict key
-                localcheck = tuple(sorted(localcheck.iteritems()))
+                localcheck = tuple(sorted(localcheck.items()))
                 localchecks.setdefault(localcheck, []).append(solution)
         # raise Unautorized exception if the user can't access to any solution
         if not newsolutions:
@@ -606,7 +606,7 @@
             if args:
                 # different SQL generated when some argument is None or not (IS
                 # NULL). This should be considered when computing sql cache key
-                cachekey += tuple(sorted([k for k, v in args.iteritems()
+                cachekey += tuple(sorted([k for k, v in args.items()
                                           if v is None]))
         # make an execution plan
         plan = self.plan_factory(rqlst, args, cnx)
--- a/server/repository.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/repository.py	Tue Sep 15 16:15:03 2015 +0200
@@ -571,7 +571,7 @@
         """
         sources = {}
         # remove sensitive information
-        for uri, source in self.sources_by_uri.iteritems():
+        for uri, source in self.sources_by_uri.items():
             sources[uri] = source.public_config
         return sources
 
@@ -1039,7 +1039,7 @@
             except KeyError:
                 data_by_etype[etype] = [entity]
         source = self.system_source
-        for etype, entities in data_by_etype.iteritems():
+        for etype, entities in data_by_etype.items():
             if server.DEBUG & server.DBG_REPO:
                 print('DELETE entities', etype, [entity.eid for entity in entities])
             self.hm.call_hooks('before_delete_entity', cnx, entities=entities)
@@ -1063,7 +1063,7 @@
         subjects_by_types = {}
         objects_by_types = {}
         activintegrity = cnx.is_hook_category_activated('activeintegrity')
-        for rtype, eids_subj_obj in relations.iteritems():
+        for rtype, eids_subj_obj in relations.items():
             if server.DEBUG & server.DBG_REPO:
                 for subjeid, objeid in eids_subj_obj:
                     print('ADD relation', subjeid, rtype, objeid)
@@ -1101,15 +1101,15 @@
                         objects[objeid] = len(relations_by_rtype[rtype])
                         continue
                     objects[objeid] = len(relations_by_rtype[rtype])
-        for rtype, source_relations in relations_by_rtype.iteritems():
+        for rtype, source_relations in relations_by_rtype.items():
             self.hm.call_hooks('before_add_relation', cnx,
                                rtype=rtype, eids_from_to=source_relations)
-        for rtype, source_relations in relations_by_rtype.iteritems():
+        for rtype, source_relations in relations_by_rtype.items():
             source.add_relations(cnx, rtype, source_relations)
             rschema = self.schema.rschema(rtype)
             for subjeid, objeid in source_relations:
                 cnx.update_rel_cache_add(subjeid, rtype, objeid, rschema.symmetric)
-        for rtype, source_relations in relations_by_rtype.iteritems():
+        for rtype, source_relations in relations_by_rtype.items():
             self.hm.call_hooks('after_add_relation', cnx,
                                rtype=rtype, eids_from_to=source_relations)
 
--- a/server/rqlannotation.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/rqlannotation.py	Tue Sep 15 16:15:03 2015 +0200
@@ -318,7 +318,7 @@
 
     def compute(self, rqlst):
         # set domains for each variable
-        for varname, var in rqlst.defined_vars.iteritems():
+        for varname, var in rqlst.defined_vars.items():
             if var.stinfo['uidrel'] is not None or \
                    self.eschema(rqlst.solutions[0][varname]).final:
                 ptypes = var.stinfo['possibletypes']
@@ -356,7 +356,7 @@
 
     def _debug_print(self):
         print('varsols', dict((x, sorted(str(v) for v in values))
-                               for x, values in self.varsols.iteritems()))
+                               for x, values in self.varsols.items()))
         print('ambiguous vars', sorted(self.ambiguousvars))
 
     def set_rel_constraint(self, term, rel, etypes_func):
--- a/server/schemaserial.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/schemaserial.py	Tue Sep 15 16:15:03 2015 +0200
@@ -149,7 +149,7 @@
                     {'x': etype, 'n': netype})
             cnx.commit(False)
             tocleanup = [eid]
-            tocleanup += (eid for eid, cached in repo._type_source_cache.iteritems()
+            tocleanup += (eid for eid, cached in repo._type_source_cache.items()
                           if etype == cached[0])
             repo.clear_caches(tocleanup)
             cnx.commit(False)
@@ -334,7 +334,7 @@
         thispermsdict = permsidx[erschema.eid]
     except KeyError:
         return
-    for action, somethings in thispermsdict.iteritems():
+    for action, somethings in thispermsdict.items():
         erschema.permissions[action] = tuple(
             isinstance(p, tuple) and erschema.rql_expression(*p) or p
             for p in somethings)
@@ -384,7 +384,7 @@
             continue
         execschemarql(execute, rschema, rschema2rql(rschema, addrdef=False))
         if rschema.symmetric:
-            rdefs = [rdef for k, rdef in rschema.rdefs.iteritems()
+            rdefs = [rdef for k, rdef in rschema.rdefs.items()
                      if (rdef.subject, rdef.object) == k]
         else:
             rdefs = rschema.rdefs.itervalues()
--- a/server/serverctl.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/serverctl.py	Tue Sep 15 16:15:03 2015 +0200
@@ -1004,7 +1004,7 @@
                 stats = source.pull_data(cnx, force=True, raise_on_error=True)
         finally:
             repo.shutdown()
-        for key, val in stats.iteritems():
+        for key, val in stats.items():
             if val:
                 print(key, ':', val)
 
@@ -1082,7 +1082,7 @@
     if self.config.db is not None:
         appcfg = ServerConfiguration.config_for(appid)
         srccfg = appcfg.read_sources_file()
-        for key, value in self.config.db.iteritems():
+        for key, value in self.config.db.items():
             if '.' in key:
                 section, key = key.split('.', 1)
             else:
--- a/server/sources/datafeed.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/sources/datafeed.py	Tue Sep 15 16:15:03 2015 +0200
@@ -449,10 +449,10 @@
     def handle_deletion(self, config, cnx, myuris):
         if config['delete-entities'] and myuris:
             byetype = {}
-            for extid, (eid, etype) in myuris.iteritems():
+            for extid, (eid, etype) in myuris.items():
                 if self.is_deleted(extid, etype, eid):
                     byetype.setdefault(etype, []).append(str(eid))
-            for etype, eids in byetype.iteritems():
+            for etype, eids in byetype.items():
                 self.warning('delete %s %s entities', len(eids), etype)
                 cnx.execute('DELETE %s X WHERE X eid IN (%s)'
                             % (etype, ','.join(eids)))
@@ -465,7 +465,7 @@
         self.notify_checked(entity)
         mdate = attrs.get('modification_date')
         if not mdate or mdate > entity.modification_date:
-            attrs = dict( (k, v) for k, v in attrs.iteritems()
+            attrs = dict( (k, v) for k, v in attrs.items()
                           if v != getattr(entity, k))
             if attrs:
                 entity.cw_set(**attrs)
--- a/server/sources/ldapfeed.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/sources/ldapfeed.py	Tue Sep 15 16:15:03 2015 +0200
@@ -185,7 +185,7 @@
         self.user_default_groups = typedconfig['user-default-group']
         self.user_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'}
         self.user_attrs.update(typedconfig['user-attrs-map'])
-        self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.iteritems())
+        self.user_rev_attrs = dict((v, k) for k, v in self.user_attrs.items())
         self.base_filters = [filter_format('(%s=%s)', ('objectClass', o))
                              for o in typedconfig['user-classes']]
         if typedconfig['user-filter']:
@@ -195,7 +195,7 @@
         self.group_attrs = typedconfig['group-attrs-map']
         self.group_attrs = {'dn': 'eid', 'modifyTimestamp': 'modification_date'}
         self.group_attrs.update(typedconfig['group-attrs-map'])
-        self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.iteritems())
+        self.group_rev_attrs = dict((v, k) for k, v in self.group_attrs.items())
         self.group_base_filters = [filter_format('(%s=%s)', ('objectClass', o))
                                    for o in typedconfig['group-classes']]
         if typedconfig['group-filter']:
@@ -349,7 +349,7 @@
             # This appears to be some sort of internal referral, but
             # we can't handle it, so we need to skip over it.
             try:
-                items = rec_dict.iteritems()
+                items = rec_dict.items()
             except AttributeError:
                 continue
             else:
--- a/server/sources/native.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/sources/native.py	Tue Sep 15 16:15:03 2015 +0200
@@ -674,7 +674,7 @@
                     etypes[etype].append((subject, object))
                 else:
                     etypes[etype] = [(subject, object)]
-            for subj_etype, subj_obj_list in etypes.iteritems():
+            for subj_etype, subj_obj_list in etypes.items():
                 attrs = [{'cw_eid': subject, SQL_PREFIX + rtype: object}
                          for subject, object in subj_obj_list]
                 sql.append((self.sqlgen.update(SQL_PREFIX + etype, attrs[0],
@@ -976,7 +976,7 @@
             if actionfilters.pop('public', True):
                 genrestr['txa_public'] = True
             # put additional filters in trarestr and/or tearestr
-            for key, val in actionfilters.iteritems():
+            for key, val in actionfilters.items():
                 if key == 'etype':
                     # filtering on etype implies filtering on entity actions
                     # only, and with no eid specified
--- a/server/sources/rql2sql.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/sources/rql2sql.py	Tue Sep 15 16:15:03 2015 +0200
@@ -174,7 +174,7 @@
     existssols = {}
     unstable = set()
     invariants = {}
-    for vname, var in rqlst.defined_vars.iteritems():
+    for vname, var in rqlst.defined_vars.items():
         vtype = newsols[0][vname]
         if var._q_invariant or vname in varmap:
             # remove invariant variable from solutions to remove duplicates
@@ -207,11 +207,11 @@
         newsols = newsols_
         # reinsert solutions for invariants
         for sol in newsols:
-            for invvar, vartype in invariants[id(sol)].iteritems():
+            for invvar, vartype in invariants[id(sol)].items():
                 sol[invvar] = vartype
         for sol in existssols:
             try:
-                for invvar, vartype in invariants[id(sol)].iteritems():
+                for invvar, vartype in invariants[id(sol)].items():
                     sol[invvar] = vartype
             except KeyError:
                 continue
@@ -602,7 +602,7 @@
             self.outer_chains.remove(lchain)
             rchain += lchain
             self.mark_as_used_in_outer_join(leftalias)
-            for alias, (aouter, aconditions, achain) in outer_tables.iteritems():
+            for alias, (aouter, aconditions, achain) in outer_tables.items():
                 if achain is lchain:
                     outer_tables[alias] = (aouter, aconditions, rchain)
         else:
@@ -1611,7 +1611,7 @@
 
     def _temp_table_scope(self, select, table):
         scope = 9999
-        for var, sql in self._varmap.iteritems():
+        for var, sql in self._varmap.items():
             # skip "attribute variable" in varmap (such 'T.login')
             if not '.' in var and table == sql.split('.', 1)[0]:
                 try:
--- a/server/sqlutils.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/sqlutils.py	Tue Sep 15 16:15:03 2015 +0200
@@ -373,7 +373,7 @@
     def merge_args(self, args, query_args):
         if args is not None:
             newargs = {}
-            for key, val in args.iteritems():
+            for key, val in args.items():
                 # convert cubicweb binary into db binary
                 if isinstance(val, Binary):
                     val = self._binary(val.getvalue())
@@ -444,7 +444,7 @@
         attrs = {}
         eschema = entity.e_schema
         converters = getattr(self.dbhelper, 'TYPE_CONVERTERS', {})
-        for attr, value in entity.cw_edited.iteritems():
+        for attr, value in entity.cw_edited.items():
             if value is not None and eschema.subjrels[attr].final:
                 atype = str(entity.e_schema.destination(attr))
                 if atype in converters:
--- a/server/ssplanner.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/ssplanner.py	Tue Sep 15 16:15:03 2015 +0200
@@ -306,7 +306,7 @@
     if varmap is None:
         return varmap
     maprepr = {}
-    for var, sql in varmap.iteritems():
+    for var, sql in varmap.items():
         table, col = sql.split('.')
         maprepr[var] = '%s.%s' % (tablesinorder[table], col)
     return maprepr
@@ -527,7 +527,7 @@
             result[i] = newrow
         # update entities
         repo.glob_add_relations(cnx, relations)
-        for eid, edited in edefs.iteritems():
+        for eid, edited in edefs.items():
             repo.glob_update_entity(cnx, edited)
         return result
 
--- a/server/test/unittest_ldapsource.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/test/unittest_ldapsource.py	Tue Sep 15 16:15:03 2015 +0200
@@ -153,7 +153,7 @@
         add an LDAP entity
         """
         modcmd = ['dn: %s'%dn, 'changetype: add']
-        for key, values in mods.iteritems():
+        for key, values in mods.items():
             if isinstance(values, string_types):
                 values = [values]
             for value in values:
@@ -172,7 +172,7 @@
         modify one or more attributes of an LDAP entity
         """
         modcmd = ['dn: %s'%dn, 'changetype: modify']
-        for (kind, key), values in mods.iteritems():
+        for (kind, key), values in mods.items():
             modcmd.append('%s: %s' % (kind, key))
             if isinstance(values, string_types):
                 values = [values]
--- a/server/test/unittest_migractions.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/server/test/unittest_migractions.py	Tue Sep 15 16:15:03 2015 +0200
@@ -151,7 +151,7 @@
             orderdict2 = dict(mh.rqlexec('Any RTN, O WHERE X name "Note", RDEF from_entity X, '
                                          'RDEF relation_type RT, RDEF ordernum O, RT name RTN'))
             whateverorder = migrschema['whatever'].rdef('Note', 'Int').order
-            for k, v in orderdict.iteritems():
+            for k, v in orderdict.items():
                 if v >= whateverorder:
                     orderdict[k] = v+1
             orderdict['whatever'] = whateverorder
--- a/setup.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/setup.py	Tue Sep 15 16:15:03 2015 +0200
@@ -51,7 +51,7 @@
     for entry in ("__depends__",): # "__recommends__"):
         requires.update(getattr(__pkginfo__, entry, {}))
     install_requires = [("%s %s" % (d, v and v or "")).strip()
-                       for d, v in requires.iteritems()]
+                       for d, v in requires.items()]
 else:
     install_requires = []
 
--- a/skeleton/setup.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/skeleton/setup.py	Tue Sep 15 16:15:03 2015 +0200
@@ -55,7 +55,7 @@
     for entry in ("__depends__",):  # "__recommends__"):
         requires.update(getattr(__pkginfo__, entry, {}))
     install_requires = [("%s %s" % (d, v and v or "")).strip()
-                        for d, v in requires.iteritems()]
+                        for d, v in requires.items()]
 else:
     install_requires = []
 
--- a/sobjects/cwxmlparser.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/sobjects/cwxmlparser.py	Tue Sep 15 16:15:03 2015 +0200
@@ -125,7 +125,7 @@
 
     def list_actions(self):
         reg = self._cw.vreg['components']
-        return sorted(clss[0].action for rid, clss in reg.iteritems()
+        return sorted(clss[0].action for rid, clss in reg.items()
                       if rid.startswith('cw.entityxml.action.'))
 
     # mapping handling #########################################################
@@ -221,7 +221,7 @@
 
     def process_relations(self, entity, rels):
         etype = entity.cw_etype
-        for (rtype, role, action), rules in self.source.mapping.get(etype, {}).iteritems():
+        for (rtype, role, action), rules in self.source.mapping.get(etype, {}).items():
             try:
                 related_items = rels[role][rtype]
             except KeyError:
--- a/sobjects/ldapparser.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/sobjects/ldapparser.py	Tue Sep 15 16:15:03 2015 +0200
@@ -99,11 +99,11 @@
             return
         if myuris:
             byetype = {}
-            for extid, (eid, etype) in myuris.iteritems():
+            for extid, (eid, etype) in myuris.items():
                 if self.is_deleted(extid, etype, eid):
                     byetype.setdefault(etype, []).append(str(eid))
 
-            for etype, eids in byetype.iteritems():
+            for etype, eids in byetype.items():
                 if etype != 'CWUser':
                     continue
                 self.info('deactivate %s %s entities', len(eids), etype)
@@ -123,7 +123,7 @@
                 self.info('user %s reactivated', entity.login)
         mdate = attrs.get('modification_date')
         if not mdate or mdate > entity.modification_date:
-            attrs = dict( (k, v) for k, v in attrs.iteritems()
+            attrs = dict( (k, v) for k, v in attrs.items()
                           if v != getattr(entity, k))
             if attrs:
                 entity.cw_set(**attrs)
@@ -135,9 +135,9 @@
         if tdict is None:
             tdict = {}
         if etype == 'CWUser':
-            items = self.source.user_attrs.iteritems()
+            items = self.source.user_attrs.items()
         elif etype == 'CWGroup':
-            items = self.source.group_attrs.iteritems()
+            items = self.source.group_attrs.items()
         for sattr, tattr in items:
             if tattr not in self.non_attribute_keys:
                 try:
--- a/sobjects/notification.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/sobjects/notification.py	Tue Sep 15 16:15:03 2015 +0200
@@ -181,7 +181,7 @@
 
     def context(self, **kwargs):
         entity = self.cw_rset.get_entity(self.cw_row or 0, self.cw_col or 0)
-        for key, val in kwargs.iteritems():
+        for key, val in kwargs.items():
             if val and isinstance(val, unicode) and val.strip():
                kwargs[key] = self._cw._(val)
         kwargs.update({'user': self.user_data['login'],
--- a/sobjects/services.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/sobjects/services.py	Tue Sep 15 16:15:03 2015 +0200
@@ -94,9 +94,9 @@
         results = {}
         counters, ocounters, garbage = gc_info(lookupclasses,
                                                viewreferrersclasses=())
-        values = sorted(counters.iteritems(), key=lambda x: x[1], reverse=True)
+        values = sorted(counters.items(), key=lambda x: x[1], reverse=True)
         results['lookupclasses'] = values
-        values = sorted(ocounters.iteritems(), key=lambda x: x[1], reverse=True)[:nmax]
+        values = sorted(ocounters.items(), key=lambda x: x[1], reverse=True)[:nmax]
         results['referenced'] = values
         results['unreachable'] = garbage
         return results
--- a/spa2rql.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/spa2rql.py	Tue Sep 15 16:15:03 2015 +0200
@@ -146,7 +146,7 @@
 
     def finalize(self):
         """return corresponding rql query (string) / args (dict)"""
-        for varname, ptypes in self.possible_types.iteritems():
+        for varname, ptypes in self.possible_types.items():
             if len(ptypes) == 1:
                 self.restrictions.append('%s is %s' % (varname, iter(ptypes).next()))
         unions = []
--- a/uilib.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/uilib.py	Tue Sep 15 16:15:03 2015 +0200
@@ -505,7 +505,7 @@
             local_context = tcbk.tb_frame.f_locals
             html_info = []
             chars = 0
-            for name, value in local_context.iteritems():
+            for name, value in local_context.items():
                 value = xml_escape(repr(value))
                 info = u'<span class="name">%s</span>=%s, ' % (name, value)
                 line_length = len(name) + len(value)
--- a/utils.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/utils.py	Tue Sep 15 16:15:03 2015 +0200
@@ -555,9 +555,9 @@
 
 def _dict2js(d, predictable=False):
     if predictable:
-        it = sorted(d.iteritems())
+        it = sorted(d.items())
     else:
-        it = d.iteritems()
+        it = d.items()
     res = [key + ': ' + js_dumps(val, predictable)
            for key, val in it]
     return '{%s}' % ', '.join(res)
--- a/web/form.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/form.py	Tue Sep 15 16:15:03 2015 +0200
@@ -120,7 +120,7 @@
         extrakw = {}
         # search for navigation parameters and customization of existing
         # attributes; remaining stuff goes in extrakwargs
-        for key, val in kwargs.iteritems():
+        for key, val in kwargs.items():
             if key in controller.NAV_FORM_PARAMETERS:
                 hiddens.append( (key, val) )
             elif key == 'redirect_path':
--- a/web/htmlwidgets.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/htmlwidgets.py	Tue Sep 15 16:15:03 2015 +0200
@@ -341,7 +341,7 @@
         self.w(u'<thead>')
         self.w(u'<tr class="header">')
         for column in self.columns:
-            attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.iteritems())
+            attrs = ('%s="%s"' % (name, value) for name, value in column.cell_attrs.items())
             self.w(u'<th %s>%s</th>' % (' '.join(attrs), column.name or u''))
         self.w(u'</tr>')
         self.w(u'</thead><tbody>')
@@ -351,7 +351,7 @@
             for column, sortvalue in self.itercols(rowindex):
                 attrs = dict(column.cell_attrs)
                 attrs["cubicweb:sortvalue"] = sortvalue
-                attrs = ('%s="%s"' % (name, value) for name, value in attrs.iteritems())
+                attrs = ('%s="%s"' % (name, value) for name, value in attrs.items())
                 self.w(u'<td %s>' % (' '.join(attrs)))
                 for cellvid, colindex in column.cellrenderers:
                     self.model.render_cell(cellvid, rowindex, colindex, w=self.w)
--- a/web/http_headers.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/http_headers.py	Tue Sep 15 16:15:03 2015 +0200
@@ -502,7 +502,7 @@
         return "MimeType(%r, %r, %r)" % (self.mediaType, self.mediaSubtype, self.params)
 
     def __hash__(self):
-        return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.iteritems()))
+        return hash(self.mediaType)^hash(self.mediaSubtype)^hash(tuple(self.params.items()))
 
 ##### Specific header parsers.
 def parseAccept(field):
@@ -733,7 +733,7 @@
 
     out ="%s/%s"%(mimeType.mediaType, mimeType.mediaSubtype)
     if mimeType.params:
-        out+=';'+generateKeyValues(mimeType.params.iteritems())
+        out+=';'+generateKeyValues(mimeType.params.items())
 
     if q != 1.0:
         out+=(';q=%.3f' % (q,)).rstrip('0').rstrip('.')
@@ -837,7 +837,7 @@
 def generateContentType(mimeType):
     out = "%s/%s" % (mimeType.mediaType, mimeType.mediaSubtype)
     if mimeType.params:
-        out += ';' + generateKeyValues(mimeType.params.iteritems())
+        out += ';' + generateKeyValues(mimeType.params.items())
     return out
 
 def generateIfRange(dateOrETag):
@@ -858,7 +858,7 @@
 
         try:
             l = []
-            for k, v in dict(challenge).iteritems():
+            for k, v in dict(challenge).items():
                 l.append("%s=%s" % (k, quoteString(v)))
 
             _generated.append("%s %s" % (scheme, ", ".join(l)))
@@ -1330,10 +1330,10 @@
         self._headers = {}
         self.handler = handler
         if headers is not None:
-            for key, value in headers.iteritems():
+            for key, value in headers.items():
                 self.setHeader(key, value)
         if rawHeaders is not None:
-            for key, value in rawHeaders.iteritems():
+            for key, value in rawHeaders.items():
                 self.setRawHeaders(key, value)
 
     def _setRawHeaders(self, headers):
@@ -1462,7 +1462,7 @@
         """Return an iterator of key, value pairs of all headers
         contained in this object, as strings. The keys are capitalized
         in canonical capitalization."""
-        for k, v in self._raw_headers.iteritems():
+        for k, v in self._raw_headers.items():
             if v is _RecalcNeeded:
                 v = self._toRaw(k)
             yield self.canonicalNameCaps(k), v
@@ -1484,7 +1484,7 @@
    is strictly an error, but we're nice.).
    """
 
-iteritems = lambda x: x.iteritems()
+iteritems = lambda x: x.items()
 
 
 parser_general_headers = {
--- a/web/propertysheet.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/propertysheet.py	Tue Sep 15 16:15:03 2015 +0200
@@ -72,7 +72,7 @@
         for rid, (adirectory, rdirectory, mtime) in self._cache.items():
             if os.stat(osp.join(rdirectory, rid)).st_mtime > mtime:
                 del self._cache[rid]
-        for fpath, mtime in self._propfile_mtime.iteritems():
+        for fpath, mtime in self._propfile_mtime.items():
             if os.stat(fpath).st_mtime > mtime:
                 return True
         return False
--- a/web/request.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/request.py	Tue Sep 15 16:15:03 2015 +0200
@@ -138,7 +138,7 @@
         #: received headers
         self._headers_in = Headers()
         if headers is not None:
-            for k, v in headers.iteritems():
+            for k, v in headers.items():
                 self._headers_in.addRawHeader(k, v)
         #: form parameters
         self.setup_params(form)
@@ -251,7 +251,7 @@
         if params is None:
             return
         encoding = self.encoding
-        for param, val in params.iteritems():
+        for param, val in params.items():
             if isinstance(val, (tuple, list)):
                 val = [unicode(x, encoding) for x in val]
                 if len(val) == 1:
@@ -801,7 +801,7 @@
     def header_accept_language(self):
         """returns an ordered list of preferred languages"""
         acceptedlangs = self.get_header('Accept-Language', raw=False) or {}
-        for lang, _ in sorted(acceptedlangs.iteritems(), key=lambda x: x[1],
+        for lang, _ in sorted(acceptedlangs.items(), key=lambda x: x[1],
                               reverse=True):
             lang = lang.split('-')[0]
             yield lang
--- a/web/schemaviewer.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/schemaviewer.py	Tue Sep 15 16:15:03 2015 +0200
@@ -220,7 +220,7 @@
                     elif prop == 'constraints':
                         val = ', '.join([c.expression for c in val])
                     elif isinstance(val, dict):
-                        for key, value in val.iteritems():
+                        for key, value in val.items():
                             if isinstance(value, (list, tuple)):
                                 val[key] = ', '.join(sorted( str(v) for v in value))
                         val = str(val)
--- a/web/test/data/views.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/test/data/views.py	Tue Sep 15 16:15:03 2015 +0200
@@ -42,7 +42,7 @@
     """
     try:
         result_dict = {}
-        for key, value in self._cw.form.iteritems():
+        for key, value in self._cw.form.items():
             result_dict[key] = _recursive_replace_stream_by_content(value)
         return result_dict
     except Exception as ex:
--- a/web/views/ajaxcontroller.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/ajaxcontroller.py	Tue Sep 15 16:15:03 2015 +0200
@@ -84,7 +84,7 @@
     if extraargs is None:
         return {}
     # we receive unicode keys which is not supported by the **syntax
-    return dict((str(key), value) for key, value in extraargs.iteritems())
+    return dict((str(key), value) for key, value in extraargs.items())
 
 
 class AjaxController(Controller):
--- a/web/views/cwproperties.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/cwproperties.py	Tue Sep 15 16:15:03 2015 +0200
@@ -119,10 +119,10 @@
         _ = self._cw._
         self.w(u'<h1>%s</h1>\n' % _(self.title))
         for label, group, form in sorted((_(g), g, f)
-                                         for g, f in mainforms.iteritems()):
+                                         for g, f in mainforms.items()):
             self.wrap_main_form(group, label, form)
         for label, group, objects in sorted((_(g), g, o)
-                                            for g, o in groupedforms.iteritems()):
+                                            for g, o in groupedforms.items()):
             self.wrap_grouped_form(group, label, objects)
 
     @property
@@ -224,7 +224,7 @@
           (make_togglable_link('fieldset_' + group, label)))
         self.w(u'<div id="fieldset_%s" %s>' % (group, status))
         sorted_objects = sorted((self._cw.__('%s_%s' % (group, o)), o, f)
-                                for o, f in objects.iteritems())
+                                for o, f in objects.items())
         for label, oid, form in sorted_objects:
             self.wrap_object_form(group, oid, label, form)
         self.w(u'</div>')
--- a/web/views/cwsources.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/cwsources.py	Tue Sep 15 16:15:03 2015 +0200
@@ -189,7 +189,7 @@
                     warning(_('relation %(rtype)s with %(etype)s as %(role)s is '
                               'supported but no target type supported') %
                             {'rtype': rschema, 'role': role, 'etype': etype})
-        for rtype, rdefs in self.srelations.iteritems():
+        for rtype, rdefs in self.srelations.items():
             if rdefs is None:
                 rschema = self.schema[rtype]
                 for subj, obj in rschema.rdefs:
--- a/web/views/debug.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/debug.py	Tue Sep 15 16:15:03 2015 +0200
@@ -170,7 +170,7 @@
                 continue
             self.w(u'<h3 id="%s">%s</h3>' % (key, key))
             if self._cw.vreg[key]:
-                values = sorted(self._cw.vreg[key].iteritems())
+                values = sorted(self._cw.vreg[key].items())
                 self.wview('pyvaltable', pyvalue=[(key, xml_escape(repr(val)))
                                                   for key, val in values])
             else:
--- a/web/views/editcontroller.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/editcontroller.py	Tue Sep 15 16:15:03 2015 +0200
@@ -143,7 +143,7 @@
         values_by_eid = dict((eid, req.extract_entity_params(eid, minparams=2))
                              for eid in req.edited_eids())
         # iterate over all the edited entities
-        for eid, values in values_by_eid.iteritems():
+        for eid, values in values_by_eid.items():
             # add eid to the dependency graph
             graph.setdefault(eid, set())
             # search entity's edited fields for mandatory inlined relation
@@ -355,7 +355,7 @@
         for eid, etype in eidtypes:
             entity = self._cw.entity_from_eid(eid, etype)
             path, params = entity.cw_adapt_to('IEditControl').after_deletion_path()
-            redirect_info.add( (path, tuple(params.iteritems())) )
+            redirect_info.add( (path, tuple(params.items())) )
             entity.cw_delete()
         if len(redirect_info) > 1:
             # In the face of ambiguity, refuse the temptation to guess.
--- a/web/views/facets.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/facets.py	Tue Sep 15 16:15:03 2015 +0200
@@ -168,7 +168,7 @@
                  DeprecationWarning, stacklevel=2)
         else:
             vidargs = {}
-        vidargs = dict((k, v) for k, v in vidargs.iteritems() if v)
+        vidargs = dict((k, v) for k, v in vidargs.items() if v)
         facetargs = xml_escape(json_dumps([divid, vid, paginate, vidargs]))
         w(u'<form id="%sForm" class="%s" method="post" action="" '
           'cubicweb:facetargs="%s" >' % (divid, cssclass, facetargs))
--- a/web/views/forms.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/forms.py	Tue Sep 15 16:15:03 2015 +0200
@@ -377,7 +377,7 @@
 
         Warning: this method must be called only when all form fields are setup
         """
-        for (rtype, role), eids in self.linked_to.iteritems():
+        for (rtype, role), eids in self.linked_to.items():
             # if the relation is already setup by a form field, do not add it
             # in a __linkto hidden to avoid setting it twice in the controller
             try:
--- a/web/views/management.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/management.py	Tue Sep 15 16:15:03 2015 +0200
@@ -169,7 +169,7 @@
     binfo += u'\n\n:URL: %s\n' % req.url()
     if not '__bugreporting' in req.form:
         binfo += u'\n:form params:\n'
-        binfo += u'\n'.join(u'  * %s = %s' % (k, v) for k, v in req.form.iteritems())
+        binfo += u'\n'.join(u'  * %s = %s' % (k, v) for k, v in req.form.items())
     binfo += u'\n\n:CubicWeb version: %s\n'  % (eversion,)
     for pkg, pkgversion in cubes:
         binfo += u":Cube %s version: %s\n" % (pkg, pkgversion)
--- a/web/views/reledit.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/reledit.py	Tue Sep 15 16:15:03 2015 +0200
@@ -292,7 +292,7 @@
             cwtarget='eformframe', cssclass='releditForm',
             **formargs)
         # pass reledit arguments
-        for pname, pvalue in event_args.iteritems():
+        for pname, pvalue in event_args.items():
             form.add_hidden('__reledit|' + pname, pvalue)
         # handle buttons
         if form.form_buttons: # edition, delete
--- a/web/views/schema.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/schema.py	Tue Sep 15 16:15:03 2015 +0200
@@ -131,7 +131,7 @@
         _ = self._cw._
         w(u'<div style="margin: 0px 1.5em">')
         tmpl = u'<strong>%s</strong> %s <strong>%s</strong>'
-        for perm, rdefs in perms.iteritems():
+        for perm, rdefs in perms.items():
             w(u'<div>%s</div>' % u', '.join(
                 tmpl % (_(s.type), _(rschema.type), _(o.type)) for s, o in rdefs))
             # accessing rdef from previous loop by design: only used to get
--- a/web/views/tableview.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/tableview.py	Tue Sep 15 16:15:03 2015 +0200
@@ -1146,7 +1146,7 @@
             else:
                 column.append_renderer(subvid or 'incontext', colindex)
             if cellattrs and colindex in cellattrs:
-                for name, value in cellattrs[colindex].iteritems():
+                for name, value in cellattrs[colindex].items():
                     column.add_attr(name, value)
             # add column
             columns.append(column)
--- a/web/views/treeview.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/treeview.py	Tue Sep 15 16:15:03 2015 +0200
@@ -140,7 +140,7 @@
             ajaxargs = json.loads(form.pop('morekwargs'))
             # got unicode & python keywords must be strings
             morekwargs.update(dict((str(k), v)
-                                   for k, v in ajaxargs.iteritems()))
+                                   for k, v in ajaxargs.items()))
         toplevel_thru_ajax = form.pop('treeview_top', False) or initial_thru_ajax
         toplevel = toplevel_thru_ajax or (initial_load and not form.get('fname'))
         return subvid, treeid, toplevel_thru_ajax, toplevel
--- a/web/views/uicfg.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/uicfg.py	Tue Sep 15 16:15:03 2015 +0200
@@ -269,7 +269,7 @@
         if not 'inlined' in sectdict:
             sectdict['inlined'] = sectdict['main']
         # recompute formsections and set it to avoid recomputing
-        for formtype, section in sectdict.iteritems():
+        for formtype, section in sectdict.items():
             formsections.add('%s_%s' % (formtype, section))
 
     def tag_relation(self, key, formtype, section):
@@ -304,7 +304,7 @@
                 rtags[section] = value
         cls = self.tag_container_cls
         rtags = cls('_'.join([section,value])
-                    for section,value in rtags.iteritems())
+                    for section,value in rtags.items())
         return rtags
 
     def get(self, *key):
--- a/web/views/undohistory.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/web/views/undohistory.py	Tue Sep 15 16:15:03 2015 +0200
@@ -46,7 +46,7 @@
 
     def __str__(self):
         return '%s(%s)' % (self.__class__.__name__, ', '.join(
-            "%s=%v" % (str(k), str(v)) for k, v in kwargs.iteritems() ))
+            "%s=%v" % (str(k), str(v)) for k, v in kwargs.items() ))
 
     def __call__(self, cls, req, tx_action=None, **kwargs):
         # tx_action is expected to be a transaction.AbstractAction
--- a/wsgi/request.py	Tue Sep 08 18:04:57 2015 +0200
+++ b/wsgi/request.py	Tue Sep 15 16:15:03 2015 +0200
@@ -81,7 +81,7 @@
                                                   headers= headers_in)
         self.content = environ['wsgi.input']
         if files is not None:
-            for key, part in files.iteritems():
+            for key, part in files.items():
                 self.form[key] = (part.filename, part.file)
 
     def __repr__(self):
@@ -148,7 +148,7 @@
         if params is None:
             return
         encoding = self.encoding
-        for param, val in params.iteritems():
+        for param, val in params.items():
             if isinstance(val, (tuple, list)):
                 val = [
                     unicode(x, encoding) if isinstance(x, str) else x