修订版 | 2e88a215ddd1d695dfa975d195be3524ea352097 (tree) |
---|---|
时间 | 2019-09-07 06:57:45 |
作者 | H. Turgut Uyar <uyar@teki...> |
Commiter | H. Turgut Uyar |
Run 2to3 on u1db sources
@@ -65,7 +65,7 @@ | ||
65 | 65 | # Ask the database for currently existing indexes. |
66 | 66 | db_indexes = dict(self.db.list_indexes()) |
67 | 67 | # Loop through the indexes we expect to find. |
68 | - for name, expression in INDEXES.items(): | |
68 | + for name, expression in list(INDEXES.items()): | |
69 | 69 | if name not in db_indexes: |
70 | 70 | # The index does not yet exist. |
71 | 71 | self.db.create_index(name, *expression) |
@@ -101,7 +101,7 @@ | ||
101 | 101 | # Get the ids of all documents with this tag. |
102 | 102 | ids = [ |
103 | 103 | doc.doc_id for doc in self.db.get_from_index(TAGS_INDEX, tag)] |
104 | - for key in results.keys(): | |
104 | + for key in list(results.keys()): | |
105 | 105 | if key not in ids: |
106 | 106 | # Remove the document from result, because it does not have |
107 | 107 | # this particular tag. |
@@ -110,7 +110,7 @@ | ||
110 | 110 | # If results is empty, we're done: there are no |
111 | 111 | # documents with all tags. |
112 | 112 | return [] |
113 | - return results.values() | |
113 | + return list(results.values()) | |
114 | 114 | |
115 | 115 | def get_task(self, doc_id): |
116 | 116 | """Get a task from the database.""" |
@@ -17,7 +17,7 @@ | ||
17 | 17 | """Tests for cosas example application.""" |
18 | 18 | |
19 | 19 | from testtools import TestCase |
20 | -from cosas import ( | |
20 | +from .cosas import ( | |
21 | 21 | Task, TodoStore, INDEXES, TAGS_INDEX, get_empty_task, extract_tags) |
22 | 22 | from u1db.backends import inmemory |
23 | 23 |
@@ -22,7 +22,7 @@ | ||
22 | 22 | import sys |
23 | 23 | from PyQt4 import QtGui, QtCore, uic |
24 | 24 | |
25 | -from cosas import TodoStore, get_database, extract_tags | |
25 | +from .cosas import TodoStore, get_database, extract_tags | |
26 | 26 | from u1db.errors import DatabaseDoesNotExist |
27 | 27 | from u1db.remote.http_database import HTTPDatabase |
28 | 28 | from ubuntuone.platform.credentials import CredentialsManagementTool |
@@ -63,7 +63,7 @@ | ||
63 | 63 | self.task.done = False |
64 | 64 | self.store.save_task(self.task) |
65 | 65 | if role == QtCore.Qt.EditRole: |
66 | - text = unicode(value.toString(), 'utf-8') | |
66 | + text = str(value.toString(), 'utf-8') | |
67 | 67 | if not text: |
68 | 68 | # There was no text in the edit field so do nothing. |
69 | 69 | return |
@@ -186,7 +186,7 @@ | ||
186 | 186 | if value: |
187 | 187 | self.other.sync_target = U1_URL |
188 | 188 | else: |
189 | - text = unicode(self.url_edit.text(), 'utf-8') | |
189 | + text = str(self.url_edit.text(), 'utf-8') | |
190 | 190 | if not text: |
191 | 191 | # There was no text in the edit field so do nothing. |
192 | 192 | self.other.sync_target = None |
@@ -195,7 +195,7 @@ | ||
195 | 195 | |
196 | 196 | def toggle_url(self, value): |
197 | 197 | if value: |
198 | - text = unicode(self.url_edit.text(), 'utf-8') | |
198 | + text = str(self.url_edit.text(), 'utf-8') | |
199 | 199 | if not text: |
200 | 200 | # There was no text in the edit field so do nothing. |
201 | 201 | self.other.sync_target = None |
@@ -214,7 +214,7 @@ | ||
214 | 214 | def url_changed(self): |
215 | 215 | if not self.url_radio.isChecked(): |
216 | 216 | return |
217 | - text = unicode(self.url_edit.text(), 'utf-8') | |
217 | + text = str(self.url_edit.text(), 'utf-8') | |
218 | 218 | if not text: |
219 | 219 | # There was no text in the edit field so do nothing. |
220 | 220 | self.other.sync_target = None |
@@ -348,7 +348,7 @@ | ||
348 | 348 | |
349 | 349 | def update(self): |
350 | 350 | """Either add a new task or update an existing one.""" |
351 | - text = unicode(self.title_edit.text(), 'utf-8') | |
351 | + text = str(self.title_edit.text(), 'utf-8') | |
352 | 352 | if not text: |
353 | 353 | # There was no text in the edit field so do nothing. |
354 | 354 | return |
@@ -68,7 +68,7 @@ | ||
68 | 68 | try: |
69 | 69 | from Cython.Distutils import build_ext |
70 | 70 | except ImportError: |
71 | - print "Unable to import Cython, to test the C implementation" | |
71 | + print("Unable to import Cython, to test the C implementation") | |
72 | 72 | else: |
73 | 73 | kwargs["cmdclass"] = {"build_ext": build_ext} |
74 | 74 | extra_libs = [] |
@@ -121,7 +121,7 @@ | ||
121 | 121 | return new_rev |
122 | 122 | |
123 | 123 | def _put_and_update_indexes(self, old_doc, doc): |
124 | - for index in self._indexes.itervalues(): | |
124 | + for index in self._indexes.values(): | |
125 | 125 | if old_doc is not None and not old_doc.is_tombstone(): |
126 | 126 | index.remove_json(old_doc.doc_id, old_doc.get_json()) |
127 | 127 | if not doc.is_tombstone(): |
@@ -155,7 +155,7 @@ | ||
155 | 155 | """Return all documents in the database.""" |
156 | 156 | generation = self._get_generation() |
157 | 157 | results = [] |
158 | - for doc_id, (doc_rev, content) in self._docs.items(): | |
158 | + for doc_id, (doc_rev, content) in list(self._docs.items()): | |
159 | 159 | if content is None and not include_deleted: |
160 | 160 | continue |
161 | 161 | doc = self._factory(doc_id, doc_rev, content) |
@@ -234,7 +234,7 @@ | ||
234 | 234 | return |
235 | 235 | raise errors.IndexNameTakenError |
236 | 236 | index = InMemoryIndex(index_name, list(index_expressions)) |
237 | - for doc_id, (doc_rev, doc) in self._docs.iteritems(): | |
237 | + for doc_id, (doc_rev, doc) in self._docs.items(): | |
238 | 238 | if doc is not None: |
239 | 239 | index.add_json(doc_id, doc) |
240 | 240 | self._indexes[index_name] = index |
@@ -244,7 +244,7 @@ | ||
244 | 244 | |
245 | 245 | def list_indexes(self): |
246 | 246 | definitions = [] |
247 | - for idx in self._indexes.itervalues(): | |
247 | + for idx in self._indexes.values(): | |
248 | 248 | definitions.append((idx._name, idx._definition)) |
249 | 249 | return definitions |
250 | 250 |
@@ -266,9 +266,9 @@ | ||
266 | 266 | index = self._indexes[index_name] |
267 | 267 | except KeyError: |
268 | 268 | raise errors.IndexDoesNotExist |
269 | - if isinstance(start_value, basestring): | |
269 | + if isinstance(start_value, str): | |
270 | 270 | start_value = (start_value,) |
271 | - if isinstance(end_value, basestring): | |
271 | + if isinstance(end_value, str): | |
272 | 272 | end_value = (end_value,) |
273 | 273 | doc_ids = index.lookup_range(start_value, end_value) |
274 | 274 | result = [] |
@@ -281,7 +281,7 @@ | ||
281 | 281 | index = self._indexes[index_name] |
282 | 282 | except KeyError: |
283 | 283 | raise errors.IndexDoesNotExist |
284 | - keys = index.keys() | |
284 | + keys = list(index.keys()) | |
285 | 285 | # XXX inefficiency warning |
286 | 286 | return list(set([tuple(key.split('\x01')) for key in keys])) |
287 | 287 |
@@ -415,7 +415,7 @@ | ||
415 | 415 | exact = False |
416 | 416 | end_values = get_prefix(end_values) |
417 | 417 | found = [] |
418 | - for key, doc_ids in sorted(self._values.iteritems()): | |
418 | + for key, doc_ids in sorted(self._values.items()): | |
419 | 419 | if start_values and start_values > key: |
420 | 420 | continue |
421 | 421 | if end_values and end_values < key: |
@@ -429,7 +429,7 @@ | ||
429 | 429 | |
430 | 430 | def keys(self): |
431 | 431 | """Find the indexed keys.""" |
432 | - return self._values.keys() | |
432 | + return list(self._values.keys()) | |
433 | 433 | |
434 | 434 | def _lookup_prefix(self, value): |
435 | 435 | """Find docs that match the prefix string in values.""" |
@@ -437,7 +437,7 @@ | ||
437 | 437 | # some sort of sorted list would work, but a plain dict doesn't. |
438 | 438 | key_prefix = get_prefix(value) |
439 | 439 | all_doc_ids = [] |
440 | - for key, doc_ids in sorted(self._values.iteritems()): | |
440 | + for key, doc_ids in sorted(self._values.items()): | |
441 | 441 | if key.startswith(key_prefix): |
442 | 442 | all_doc_ids.extend(doc_ids) |
443 | 443 | return all_doc_ids |
@@ -61,7 +61,7 @@ | ||
61 | 61 | try: |
62 | 62 | c.execute("SELECT value FROM u1db_config" |
63 | 63 | " WHERE name = 'index_storage'") |
64 | - except dbapi2.OperationalError, e: | |
64 | + except dbapi2.OperationalError as e: | |
65 | 65 | # The table does not exist yet |
66 | 66 | return None, e |
67 | 67 | else: |
@@ -390,14 +390,14 @@ | ||
390 | 390 | """ |
391 | 391 | # TODO: Handle lists |
392 | 392 | values = [] |
393 | - for field_name, value in raw_doc.iteritems(): | |
393 | + for field_name, value in raw_doc.items(): | |
394 | 394 | if value is None and not save_none: |
395 | 395 | continue |
396 | 396 | if base_field: |
397 | 397 | full_name = base_field + '.' + field_name |
398 | 398 | else: |
399 | 399 | full_name = field_name |
400 | - if value is None or isinstance(value, (int, float, basestring)): | |
400 | + if value is None or isinstance(value, (int, float, str)): | |
401 | 401 | values.append((doc_id, full_name, value, len(values))) |
402 | 402 | else: |
403 | 403 | subvalues = self._expand_to_fields(doc_id, full_name, value, |
@@ -671,7 +671,7 @@ | ||
671 | 671 | c = self._db_handle.cursor() |
672 | 672 | try: |
673 | 673 | c.execute(statement, tuple(args)) |
674 | - except dbapi2.OperationalError, e: | |
674 | + except dbapi2.OperationalError as e: | |
675 | 675 | raise dbapi2.OperationalError(str(e) + |
676 | 676 | '\nstatement: %s\nargs: %s\n' % (statement, args)) |
677 | 677 | res = c.fetchall() |
@@ -703,7 +703,7 @@ | ||
703 | 703 | args = [] |
704 | 704 | where = [] |
705 | 705 | if start_value: |
706 | - if isinstance(start_value, basestring): | |
706 | + if isinstance(start_value, str): | |
707 | 707 | start_value = (start_value,) |
708 | 708 | if len(start_value) != len(definition): |
709 | 709 | raise errors.InvalidValueForIndex() |
@@ -728,7 +728,7 @@ | ||
728 | 728 | where.append(range_where_lower[idx]) |
729 | 729 | args.append(value) |
730 | 730 | if end_value: |
731 | - if isinstance(end_value, basestring): | |
731 | + if isinstance(end_value, str): | |
732 | 732 | end_value = (end_value,) |
733 | 733 | if len(end_value) != len(definition): |
734 | 734 | raise errors.InvalidValueForIndex() |
@@ -770,7 +770,7 @@ | ||
770 | 770 | c = self._db_handle.cursor() |
771 | 771 | try: |
772 | 772 | c.execute(statement, tuple(args)) |
773 | - except dbapi2.OperationalError, e: | |
773 | + except dbapi2.OperationalError as e: | |
774 | 774 | raise dbapi2.OperationalError(str(e) + |
775 | 775 | '\nstatement: %s\nargs: %s\n' % (statement, args)) |
776 | 776 | res = c.fetchall() |
@@ -799,7 +799,7 @@ | ||
799 | 799 | value_fields)) |
800 | 800 | try: |
801 | 801 | c.execute(statement, tuple(definition)) |
802 | - except dbapi2.OperationalError, e: | |
802 | + except dbapi2.OperationalError as e: | |
803 | 803 | raise dbapi2.OperationalError(str(e) + |
804 | 804 | '\nstatement: %s\nargs: %s\n' % (statement, tuple(definition))) |
805 | 805 | return c.fetchall() |
@@ -893,7 +893,7 @@ | ||
893 | 893 | stored_def = self._get_index_definition(index_name) |
894 | 894 | if stored_def == [x[-1] for x in definition]: |
895 | 895 | return |
896 | - raise errors.IndexNameTakenError, e, sys.exc_info()[2] | |
896 | + raise errors.IndexNameTakenError(e).with_traceback(sys.exc_info()[2]) | |
897 | 897 | new_fields = set( |
898 | 898 | [f for f in index_expressions if f not in cur_fields]) |
899 | 899 | if new_fields: |
@@ -35,7 +35,7 @@ | ||
35 | 35 | """Create an argparse.ArgumentParser""" |
36 | 36 | parser = argparse.ArgumentParser(description=self.description) |
37 | 37 | subs = parser.add_subparsers(title='commands') |
38 | - for name, cmd in sorted(self.commands.iteritems()): | |
38 | + for name, cmd in sorted(self.commands.items()): | |
39 | 39 | sub = subs.add_parser(name, help=cmd.__doc__) |
40 | 40 | sub.set_defaults(subcommand=cmd) |
41 | 41 | cmd._populate_subparser(sub) |
@@ -178,7 +178,7 @@ | ||
178 | 178 | |
179 | 179 | # mapping wire (transimission) descriptions/tags for errors to the exceptions |
180 | 180 | wire_description_to_exc = dict( |
181 | - (x.wire_description, x) for x in globals().values() | |
181 | + (x.wire_description, x) for x in list(globals().values()) | |
182 | 182 | if getattr(x, 'wire_description', None) not in (None, "error") |
183 | 183 | ) |
184 | 184 | wire_description_to_exc["error"] = U1DBError |
@@ -141,7 +141,7 @@ | ||
141 | 141 | name = "lower" |
142 | 142 | |
143 | 143 | def _can_transform(self, val): |
144 | - return isinstance(val, basestring) | |
144 | + return isinstance(val, str) | |
145 | 145 | |
146 | 146 | def transform(self, values): |
147 | 147 | if not values: |
@@ -202,7 +202,7 @@ | ||
202 | 202 | name = "split_words" |
203 | 203 | |
204 | 204 | def _can_transform(self, val): |
205 | - return isinstance(val, basestring) | |
205 | + return isinstance(val, str) | |
206 | 206 | |
207 | 207 | def transform(self, values): |
208 | 208 | if not values: |
@@ -322,7 +322,7 @@ | ||
322 | 322 | else: |
323 | 323 | try: |
324 | 324 | inner = arg_type(arg) |
325 | - except ValueError, e: | |
325 | + except ValueError as e: | |
326 | 326 | raise errors.IndexDefinitionParseError( |
327 | 327 | "Invalid value %r for argument type %r " |
328 | 328 | "(%r)." % (arg, arg_type, e)) |
@@ -14,7 +14,7 @@ | ||
14 | 14 | # You should have received a copy of the GNU Lesser General Public License |
15 | 15 | # along with u1db. If not, see <http://www.gnu.org/licenses/>. |
16 | 16 | """U1DB Basic Auth authorisation WSGI middleware.""" |
17 | -import httplib | |
17 | +import http.client | |
18 | 18 | try: |
19 | 19 | import simplejson as json |
20 | 20 | except ImportError: |
@@ -34,7 +34,7 @@ | ||
34 | 34 | self.prefix = prefix |
35 | 35 | |
36 | 36 | def _error(self, start_response, status, description, message=None): |
37 | - start_response("%d %s" % (status, httplib.responses[status]), | |
37 | + start_response("%d %s" % (status, http.client.responses[status]), | |
38 | 38 | [('content-type', 'application/json')]) |
39 | 39 | err = {"error": description} |
40 | 40 | if message: |
@@ -17,14 +17,14 @@ | ||
17 | 17 | """HTTP Application exposing U1DB.""" |
18 | 18 | |
19 | 19 | import functools |
20 | -import httplib | |
20 | +import http.client | |
21 | 21 | import inspect |
22 | 22 | try: |
23 | 23 | import simplejson as json |
24 | 24 | except ImportError: |
25 | 25 | import json # noqa |
26 | 26 | import sys |
27 | -import urlparse | |
27 | +import urllib.parse | |
28 | 28 | |
29 | 29 | import routes.mapper |
30 | 30 |
@@ -140,7 +140,7 @@ | ||
140 | 140 | """ |
141 | 141 | content_as_args = control.pop('content_as_args', False) |
142 | 142 | no_query = control.pop('no_query', False) |
143 | - conversions = control.items() | |
143 | + conversions = list(control.items()) | |
144 | 144 | |
145 | 145 | def wrap(f): |
146 | 146 | argspec = inspect.getargspec(f) |
@@ -431,11 +431,11 @@ | ||
431 | 431 | if self._started: |
432 | 432 | return |
433 | 433 | self._started = True |
434 | - status_text = httplib.responses[status] | |
434 | + status_text = http.client.responses[status] | |
435 | 435 | self._write = self._start_response('%d %s' % (status, status_text), |
436 | 436 | [('content-type', self.content_type), |
437 | 437 | ('cache-control', 'no-cache')] + |
438 | - headers.items()) | |
438 | + list(headers.items())) | |
439 | 439 | # xxx version in headers |
440 | 440 | if obj_dic is not None: |
441 | 441 | self._no_initial_obj = False |
@@ -500,7 +500,7 @@ | ||
500 | 500 | raise BadRequest() |
501 | 501 | |
502 | 502 | def __call__(self): |
503 | - args = urlparse.parse_qsl(self.environ['QUERY_STRING'], | |
503 | + args = urllib.parse.parse_qsl(self.environ['QUERY_STRING'], | |
504 | 504 | strict_parsing=False) |
505 | 505 | try: |
506 | 506 | args = dict( |
@@ -527,7 +527,7 @@ | ||
527 | 527 | content_type = self.environ.get('CONTENT_TYPE') |
528 | 528 | if content_type == 'application/json': |
529 | 529 | meth = self._lookup(method) |
530 | - body = reader.read_chunk(sys.maxint) | |
530 | + body = reader.read_chunk(sys.maxsize) | |
531 | 531 | return meth(args, body) |
532 | 532 | elif content_type == 'application/x-u1db-sync-stream': |
533 | 533 | meth_args = self._lookup('%s_args' % method) |
@@ -579,7 +579,7 @@ | ||
579 | 579 | try: |
580 | 580 | resource = self._lookup_resource(environ, responder) |
581 | 581 | HTTPInvocationByMethodWithBody(resource, environ, self)() |
582 | - except errors.U1DBError, e: | |
582 | + except errors.U1DBError as e: | |
583 | 583 | self.request_u1db_error(environ, e) |
584 | 584 | status = http_errors.wire_description_to_status.get( |
585 | 585 | e.wire_description, 500) |
@@ -16,7 +16,7 @@ | ||
16 | 16 | |
17 | 17 | """Base class to make requests to a remote HTTP server.""" |
18 | 18 | |
19 | -import httplib | |
19 | +import http.client | |
20 | 20 | from oauth import oauth |
21 | 21 | try: |
22 | 22 | import simplejson as json |
@@ -25,8 +25,8 @@ | ||
25 | 25 | import socket |
26 | 26 | import ssl |
27 | 27 | import sys |
28 | -import urlparse | |
29 | -import urllib | |
28 | +import urllib.parse | |
29 | +import urllib.request, urllib.parse, urllib.error | |
30 | 30 | |
31 | 31 | from time import sleep |
32 | 32 | from u1db import ( |
@@ -53,10 +53,10 @@ | ||
53 | 53 | value = 'true' |
54 | 54 | else: |
55 | 55 | value = 'false' |
56 | - return unicode(value).encode('utf-8') | |
56 | + return str(value).encode('utf-8') | |
57 | 57 | |
58 | 58 | |
59 | -class _VerifiedHTTPSConnection(httplib.HTTPSConnection): | |
59 | +class _VerifiedHTTPSConnection(http.client.HTTPSConnection): | |
60 | 60 | """HTTPSConnection verifying server side certificates.""" |
61 | 61 | # derived from httplib.py |
62 | 62 |
@@ -99,13 +99,13 @@ | ||
99 | 99 | _delays = (1, 1, 2, 4, 0) |
100 | 100 | |
101 | 101 | def __init__(self, url, creds=None): |
102 | - self._url = urlparse.urlsplit(url) | |
102 | + self._url = urllib.parse.urlsplit(url) | |
103 | 103 | self._conn = None |
104 | 104 | self._creds = {} |
105 | 105 | if creds is not None: |
106 | 106 | if len(creds) != 1: |
107 | 107 | raise errors.UnknownAuthMethod() |
108 | - auth_meth, credentials = creds.items()[0] | |
108 | + auth_meth, credentials = list(creds.items())[0] | |
109 | 109 | try: |
110 | 110 | set_creds = getattr(self, 'set_%s_credentials' % auth_meth) |
111 | 111 | except AttributeError: |
@@ -124,7 +124,7 @@ | ||
124 | 124 | if self._url.scheme == 'https': |
125 | 125 | connClass = _VerifiedHTTPSConnection |
126 | 126 | else: |
127 | - connClass = httplib.HTTPConnection | |
127 | + connClass = http.client.HTTPConnection | |
128 | 128 | self._conn = connClass(self._url.hostname, self._url.port) |
129 | 129 | |
130 | 130 | def close(self): |
@@ -173,7 +173,7 @@ | ||
173 | 173 | oauth_req.sign_request( |
174 | 174 | self.oauth_signature_method, consumer, token) |
175 | 175 | # Authorization: OAuth ... |
176 | - return oauth_req.to_header().items() | |
176 | + return list(oauth_req.to_header().items()) | |
177 | 177 | else: |
178 | 178 | return [] |
179 | 179 |
@@ -185,17 +185,17 @@ | ||
185 | 185 | if not url_query.endswith('/'): |
186 | 186 | url_query += '/' |
187 | 187 | unquoted_url = url_query |
188 | - url_query += '/'.join(urllib.quote(part, safe='') | |
188 | + url_query += '/'.join(urllib.parse.quote(part, safe='') | |
189 | 189 | for part in url_parts) |
190 | 190 | # oauth performs its own quoting |
191 | 191 | unquoted_url += '/'.join(url_parts) |
192 | 192 | encoded_params = {} |
193 | 193 | if params: |
194 | - for key, value in params.items(): | |
195 | - key = unicode(key).encode('utf-8') | |
194 | + for key, value in list(params.items()): | |
195 | + key = str(key).encode('utf-8') | |
196 | 196 | encoded_params[key] = _encode_query_parameter(value) |
197 | - url_query += ('?' + urllib.urlencode(encoded_params)) | |
198 | - if body is not None and not isinstance(body, basestring): | |
197 | + url_query += ('?' + urllib.parse.urlencode(encoded_params)) | |
198 | + if body is not None and not isinstance(body, str): | |
199 | 199 | body = json.dumps(body) |
200 | 200 | content_type = 'application/json' |
201 | 201 | headers = {} |
@@ -207,7 +207,7 @@ | ||
207 | 207 | try: |
208 | 208 | self._conn.request(method, url_query, body, headers) |
209 | 209 | return self._response() |
210 | - except errors.Unavailable, e: | |
210 | + except errors.Unavailable as e: | |
211 | 211 | sleep(delay) |
212 | 212 | raise e |
213 | 213 |
@@ -92,7 +92,7 @@ | ||
92 | 92 | 'GET', ['doc', doc_id], {"include_deleted": include_deleted}) |
93 | 93 | except errors.DocumentDoesNotExist: |
94 | 94 | return None |
95 | - except errors.HTTPError, e: | |
95 | + except errors.HTTPError as e: | |
96 | 96 | if (e.status == DOCUMENT_DELETED_STATUS and |
97 | 97 | 'x-u1db-rev' in e.headers): |
98 | 98 | res = None |
@@ -14,13 +14,13 @@ | ||
14 | 14 | # You should have received a copy of the GNU Lesser General Public License |
15 | 15 | # along with u1db. If not, see <http://www.gnu.org/licenses/>. |
16 | 16 | """U1DB OAuth authorisation WSGI middleware.""" |
17 | -import httplib | |
17 | +import http.client | |
18 | 18 | from oauth import oauth |
19 | 19 | try: |
20 | 20 | import simplejson as json |
21 | 21 | except ImportError: |
22 | 22 | import json # noqa |
23 | -from urllib import quote | |
23 | +from urllib.parse import quote | |
24 | 24 | from wsgiref.util import shift_path_info |
25 | 25 | |
26 | 26 |
@@ -45,7 +45,7 @@ | ||
45 | 45 | raise NotImplementedError(self.get_oauth_data_store) |
46 | 46 | |
47 | 47 | def _error(self, start_response, status, description, message=None): |
48 | - start_response("%d %s" % (status, httplib.responses[status]), | |
48 | + start_response("%d %s" % (status, http.client.responses[status]), | |
49 | 49 | [('content-type', 'application/json')]) |
50 | 50 | err = {"error": description} |
51 | 51 | if message: |
@@ -69,7 +69,7 @@ | ||
69 | 69 | "Missing OAuth.") |
70 | 70 | try: |
71 | 71 | self.verify(environ, oauth_req) |
72 | - except oauth.OAuthError, e: | |
72 | + except oauth.OAuthError as e: | |
73 | 73 | return self._error(start_response, 401, "unauthorized", |
74 | 74 | e.message) |
75 | 75 | shift_path_info(environ) |
@@ -85,5 +85,5 @@ | ||
85 | 85 | # filter out oauth bits |
86 | 86 | environ['QUERY_STRING'] = '&'.join("%s=%s" % (quote(k, safe=''), |
87 | 87 | quote(v, safe='')) |
88 | - for k, v in parameters.iteritems()) | |
88 | + for k, v in parameters.items()) | |
89 | 89 | return consumer, token |
@@ -15,7 +15,7 @@ | ||
15 | 15 | # along with u1db. If not, see <http://www.gnu.org/licenses/>. |
16 | 16 | |
17 | 17 | """The synchronization utilities for U1DB.""" |
18 | -from itertools import izip | |
18 | + | |
19 | 19 | |
20 | 20 | import u1db |
21 | 21 | from u1db import errors |
@@ -131,9 +131,9 @@ | ||
131 | 131 | docs_to_send = self.source.get_docs(changed_doc_ids, |
132 | 132 | check_for_conflicts=False, include_deleted=True) |
133 | 133 | # TODO: there must be a way to not iterate twice |
134 | - docs_by_generation = zip( | |
134 | + docs_by_generation = list(zip( | |
135 | 135 | docs_to_send, (gen for _, gen, _ in changes), |
136 | - (trans for _, _, trans in changes)) | |
136 | + (trans for _, _, trans in changes))) | |
137 | 137 | |
138 | 138 | # exchange documents and try to insert the returned ones with |
139 | 139 | # the target, return target synced-up-to gen |
@@ -261,7 +261,7 @@ | ||
261 | 261 | docs = self._db.get_docs( |
262 | 262 | changed_doc_ids, check_for_conflicts=False, include_deleted=True) |
263 | 263 | |
264 | - docs_by_gen = izip( | |
264 | + docs_by_gen = zip( | |
265 | 265 | docs, (gen for _, gen, _ in changes_to_return), |
266 | 266 | (trans_id for _, _, trans_id in changes_to_return)) |
267 | 267 | _outgoing_trace = [] # for tests |
@@ -31,7 +31,7 @@ | ||
31 | 31 | |
32 | 32 | from oauth import oauth |
33 | 33 | from sqlite3 import dbapi2 |
34 | -from StringIO import StringIO | |
34 | +from io import StringIO | |
35 | 35 | |
36 | 36 | import testscenarios |
37 | 37 | import testtools |
@@ -51,7 +51,7 @@ | ||
51 | 51 | try: |
52 | 52 | from u1db.tests import c_backend_wrapper |
53 | 53 | c_backend_error = None |
54 | -except ImportError, e: | |
54 | +except ImportError as e: | |
55 | 55 | c_backend_wrapper = None # noqa |
56 | 56 | c_backend_error = e |
57 | 57 |
@@ -102,7 +102,7 @@ | ||
102 | 102 | database, however the rest can be returned in any order. |
103 | 103 | """ |
104 | 104 | if conflicts: |
105 | - conflicts = [(rev, (json.loads(cont) if isinstance(cont, basestring) | |
105 | + conflicts = [(rev, (json.loads(cont) if isinstance(cont, str) | |
106 | 106 | else cont)) for (rev, cont) in conflicts] |
107 | 107 | conflicts = conflicts[:1] + sorted(conflicts[1:]) |
108 | 108 | actual = db.get_doc_conflicts(doc_id) |
@@ -29,7 +29,7 @@ | ||
29 | 29 | return |
30 | 30 | try: |
31 | 31 | process.terminate() |
32 | - except OSError, e: | |
32 | + except OSError as e: | |
33 | 33 | if e.errno in (errno.ESRCH,): |
34 | 34 | # Process has exited |
35 | 35 | return |
@@ -40,7 +40,7 @@ | ||
40 | 40 | time.sleep(0.01) |
41 | 41 | try: |
42 | 42 | process.kill() |
43 | - except OSError, e: | |
43 | + except OSError as e: | |
44 | 44 | if e.errno in (errno.ESRCH,): |
45 | 45 | # Process has exited |
46 | 46 | return |
@@ -14,7 +14,7 @@ | ||
14 | 14 | # You should have received a copy of the GNU Lesser General Public License |
15 | 15 | # along with u1db. If not, see <http://www.gnu.org/licenses/>. |
16 | 16 | |
17 | -import cStringIO | |
17 | +import io | |
18 | 18 | import os |
19 | 19 | import sys |
20 | 20 | try: |
@@ -190,9 +190,9 @@ | ||
190 | 190 | self.addCleanup(self.db.close) |
191 | 191 | |
192 | 192 | def make_command(self, cls, stdin_content=''): |
193 | - inf = cStringIO.StringIO(stdin_content) | |
194 | - out = cStringIO.StringIO() | |
195 | - err = cStringIO.StringIO() | |
193 | + inf = io.StringIO(stdin_content) | |
194 | + out = io.StringIO() | |
195 | + err = io.StringIO() | |
196 | 196 | return cls(inf, out, err) |
197 | 197 | |
198 | 198 |
@@ -200,7 +200,7 @@ | ||
200 | 200 | |
201 | 201 | def test_create(self): |
202 | 202 | cmd = self.make_command(client.CmdCreate) |
203 | - inf = cStringIO.StringIO(tests.simple_doc) | |
203 | + inf = io.StringIO(tests.simple_doc) | |
204 | 204 | cmd.run(self.db_path, inf, 'test-id') |
205 | 205 | doc = self.db.get_doc('test-id') |
206 | 206 | self.assertEqual(tests.simple_doc, doc.get_json()) |
@@ -363,7 +363,7 @@ | ||
363 | 363 | |
364 | 364 | def test_put_simple(self): |
365 | 365 | cmd = self.make_command(client.CmdPut) |
366 | - inf = cStringIO.StringIO(tests.nested_doc) | |
366 | + inf = io.StringIO(tests.nested_doc) | |
367 | 367 | cmd.run(self.db_path, 'my-test-doc', self.doc.rev, inf) |
368 | 368 | doc = self.db.get_doc('my-test-doc') |
369 | 369 | self.assertNotEqual(self.doc.rev, doc.rev) |
@@ -375,7 +375,7 @@ | ||
375 | 375 | |
376 | 376 | def test_put_no_db(self): |
377 | 377 | cmd = self.make_command(client.CmdPut) |
378 | - inf = cStringIO.StringIO(tests.nested_doc) | |
378 | + inf = io.StringIO(tests.nested_doc) | |
379 | 379 | retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", |
380 | 380 | 'my-test-doc', self.doc.rev, inf) |
381 | 381 | self.assertEqual(retval, 1) |
@@ -384,7 +384,7 @@ | ||
384 | 384 | |
385 | 385 | def test_put_no_doc(self): |
386 | 386 | cmd = self.make_command(client.CmdPut) |
387 | - inf = cStringIO.StringIO(tests.nested_doc) | |
387 | + inf = io.StringIO(tests.nested_doc) | |
388 | 388 | retval = cmd.run(self.db_path, 'no-such-doc', 'wut:1', inf) |
389 | 389 | self.assertEqual(1, retval) |
390 | 390 | self.assertEqual('', cmd.stdout.getvalue()) |
@@ -395,7 +395,7 @@ | ||
395 | 395 | doc = self.make_document('my-test-doc', rev, '{}', False) |
396 | 396 | self.db.put_doc(doc) |
397 | 397 | cmd = self.make_command(client.CmdPut) |
398 | - inf = cStringIO.StringIO(tests.nested_doc) | |
398 | + inf = io.StringIO(tests.nested_doc) | |
399 | 399 | retval = cmd.run(self.db_path, 'my-test-doc', rev, inf) |
400 | 400 | self.assertEqual(1, retval) |
401 | 401 | self.assertEqual('', cmd.stdout.getvalue()) |
@@ -408,7 +408,7 @@ | ||
408 | 408 | doc, save_conflict=True, replica_uid='r', replica_gen=1, |
409 | 409 | replica_trans_id='foo') |
410 | 410 | cmd = self.make_command(client.CmdPut) |
411 | - inf = cStringIO.StringIO(tests.nested_doc) | |
411 | + inf = io.StringIO(tests.nested_doc) | |
412 | 412 | retval = cmd.run(self.db_path, 'my-test-doc', 'other:1', inf) |
413 | 413 | self.assertEqual(1, retval) |
414 | 414 | self.assertEqual('', cmd.stdout.getvalue()) |
@@ -431,7 +431,7 @@ | ||
431 | 431 | def test_resolve_simple(self): |
432 | 432 | self.assertTrue(self.db.get_doc('my-doc').has_conflicts) |
433 | 433 | cmd = self.make_command(client.CmdResolve) |
434 | - inf = cStringIO.StringIO(tests.nested_doc) | |
434 | + inf = io.StringIO(tests.nested_doc) | |
435 | 435 | cmd.run(self.db_path, 'my-doc', [self.doc1.rev, self.doc2.rev], inf) |
436 | 436 | doc = self.db.get_doc('my-doc') |
437 | 437 | vec = vectorclock.VectorClockRev(doc.rev) |
@@ -451,7 +451,7 @@ | ||
451 | 451 | doc3, save_conflict=True, replica_uid='r', replica_gen=1, |
452 | 452 | replica_trans_id='foo') |
453 | 453 | cmd = self.make_command(client.CmdResolve) |
454 | - inf = cStringIO.StringIO(tests.nested_doc) | |
454 | + inf = io.StringIO(tests.nested_doc) | |
455 | 455 | cmd.run(self.db_path, 'my-doc', [self.doc1.rev, self.doc2.rev], inf) |
456 | 456 | doc = self.db.get_doc('my-doc') |
457 | 457 | self.assertGetDoc(self.db, 'my-doc', doc.rev, moar, True) |
@@ -642,7 +642,7 @@ | ||
642 | 642 | |
643 | 643 | def test_get_index_keys_nonascii(self): |
644 | 644 | self.db.create_index("foo", "bar") |
645 | - self.db.create_doc_from_json('{"bar": "\u00a4"}') | |
645 | + self.db.create_doc_from_json('{"bar": "\\u00a4"}') | |
646 | 646 | cmd = self.make_command(client.CmdGetIndexKeys) |
647 | 647 | retval = cmd.run(self.db_path, "foo") |
648 | 648 | self.assertEqual(retval, None) |
@@ -770,14 +770,14 @@ | ||
770 | 770 | |
771 | 771 | def run_main(self, args, stdin=None): |
772 | 772 | if stdin is not None: |
773 | - self.patch(sys, 'stdin', cStringIO.StringIO(stdin)) | |
774 | - stdout = cStringIO.StringIO() | |
775 | - stderr = cStringIO.StringIO() | |
773 | + self.patch(sys, 'stdin', io.StringIO(stdin)) | |
774 | + stdout = io.StringIO() | |
775 | + stderr = io.StringIO() | |
776 | 776 | self.patch(sys, 'stdout', stdout) |
777 | 777 | self.patch(sys, 'stderr', stderr) |
778 | 778 | try: |
779 | 779 | ret = client.main(args) |
780 | - except SystemExit, e: | |
780 | + except SystemExit as e: | |
781 | 781 | self.fail("Intercepted SystemExit: %s" % (e,)) |
782 | 782 | if ret is None: |
783 | 783 | ret = 0 |
@@ -819,7 +819,7 @@ | ||
819 | 819 | # When run under python-dbg, it prints out the refs after the |
820 | 820 | # actual content, so match it if we need to. |
821 | 821 | expected_re = expected + '\[\d+ refs\]\n' |
822 | - self.assertRegexpMatches(stripped, expected_re) | |
822 | + self.assertRegex(stripped, expected_re) | |
823 | 823 | |
824 | 824 | def test_get(self): |
825 | 825 | doc = self.db.create_doc_from_json(tests.simple_doc, doc_id='test-id') |
@@ -14,7 +14,7 @@ | ||
14 | 14 | # You should have received a copy of the GNU Lesser General Public License |
15 | 15 | # along with u1db. If not, see <http://www.gnu.org/licenses/>. |
16 | 16 | |
17 | -import cStringIO | |
17 | +import io | |
18 | 18 | import argparse |
19 | 19 | |
20 | 20 | from u1db import ( |
@@ -41,7 +41,7 @@ | ||
41 | 41 | |
42 | 42 | |
43 | 43 | def make_stdin_out_err(): |
44 | - return cStringIO.StringIO(), cStringIO.StringIO(), cStringIO.StringIO() | |
44 | + return io.StringIO(), io.StringIO(), io.StringIO() | |
45 | 45 | |
46 | 46 | |
47 | 47 | class TestCommandGroup(tests.TestCase): |
@@ -49,7 +49,7 @@ | ||
49 | 49 | def trap_system_exit(self, func, *args, **kwargs): |
50 | 50 | try: |
51 | 51 | return func(*args, **kwargs) |
52 | - except SystemExit, e: | |
52 | + except SystemExit as e: | |
53 | 53 | self.fail('Got SystemExit trying to run: %s' % (func,)) |
54 | 54 | |
55 | 55 | def parse_args(self, parser, args): |
@@ -49,7 +49,7 @@ | ||
49 | 49 | if stderr != '': |
50 | 50 | # stderr should normally be empty, but if we are running under |
51 | 51 | # python-dbg, it contains the following string |
52 | - self.assertRegexpMatches(stderr, r'\[\d+ refs\]') | |
52 | + self.assertRegex(stderr, r'\[\d+ refs\]') | |
53 | 53 | self.assertEqual(0, p.returncode) |
54 | 54 | self.assertIn('Run the U1DB server', stdout) |
55 | 55 |
@@ -79,7 +79,7 @@ | ||
79 | 79 | password = "correct_password" |
80 | 80 | params = {'old_rev': 'old-rev'} |
81 | 81 | url = BASE_URL + '/pfx/foo/doc/doc-id?%s' % ( |
82 | - '&'.join("%s=%s" % (k, v) for k, v in params.items())) | |
82 | + '&'.join("%s=%s" % (k, v) for k, v in list(params.items()))) | |
83 | 83 | auth = '%s:%s' % (user, password) |
84 | 84 | headers = { |
85 | 85 | 'Authorization': 'Basic %s' % (auth.encode('base64'),)} |
@@ -93,7 +93,7 @@ | ||
93 | 93 | password = "incorrect_password" |
94 | 94 | params = {'old_rev': 'old-rev'} |
95 | 95 | url = BASE_URL + '/pfx/foo/doc/doc-id?%s' % ( |
96 | - '&'.join("%s=%s" % (k, v) for k, v in params.items())) | |
96 | + '&'.join("%s=%s" % (k, v) for k, v in list(params.items()))) | |
97 | 97 | auth = '%s:%s' % (user, password) |
98 | 98 | headers = { |
99 | 99 | 'Authorization': 'Basic %s' % (auth.encode('base64'),)} |
@@ -147,7 +147,7 @@ | ||
147 | 147 | http_method='DELETE' |
148 | 148 | ) |
149 | 149 | url = oauth_req.get_normalized_http_url() + '?' + ( |
150 | - '&'.join("%s=%s" % (k, v) for k, v in params.items())) | |
150 | + '&'.join("%s=%s" % (k, v) for k, v in list(params.items()))) | |
151 | 151 | oauth_req.sign_request(tests.sign_meth_HMAC_SHA1, |
152 | 152 | tests.consumer2, tests.token2) |
153 | 153 | resp = self.app.delete(url, headers=oauth_req.to_header()) |
@@ -262,7 +262,7 @@ | ||
262 | 262 | http_method='DELETE' |
263 | 263 | ) |
264 | 264 | url = oauth_req.get_normalized_http_url() + '?' + ( |
265 | - '&'.join("%s=%s" % (k, v) for k, v in params.items())) | |
265 | + '&'.join("%s=%s" % (k, v) for k, v in list(params.items()))) | |
266 | 266 | oauth_req.sign_request(tests.sign_meth_HMAC_SHA1, |
267 | 267 | tests.consumer2, tests.token2) |
268 | 268 | resp = self.app.delete(url, headers=oauth_req.to_header()) |
@@ -147,12 +147,12 @@ | ||
147 | 147 | self.assertEqual(doc.rev, new_rev) |
148 | 148 | |
149 | 149 | def test_put_non_ascii_key(self): |
150 | - content = json.dumps({u'key\xe5': u'val'}) | |
150 | + content = json.dumps({'key\xe5': 'val'}) | |
151 | 151 | doc = self.db.create_doc_from_json(content, doc_id='my_doc') |
152 | 152 | self.assertGetDoc(self.db, 'my_doc', doc.rev, content, False) |
153 | 153 | |
154 | 154 | def test_put_non_ascii_value(self): |
155 | - content = json.dumps({'key': u'\xe5'}) | |
155 | + content = json.dumps({'key': '\xe5'}) | |
156 | 156 | doc = self.db.create_doc_from_json(content, doc_id='my_doc') |
157 | 157 | self.assertGetDoc(self.db, 'my_doc', doc.rev, content, False) |
158 | 158 |
@@ -1035,14 +1035,14 @@ | ||
1035 | 1035 | self.db.list_indexes()) |
1036 | 1036 | |
1037 | 1037 | def test_create_index_on_non_ascii_field_name(self): |
1038 | - doc = self.db.create_doc_from_json(json.dumps({u'\xe5': 'value'})) | |
1039 | - self.db.create_index('test-idx', u'\xe5') | |
1038 | + doc = self.db.create_doc_from_json(json.dumps({'\xe5': 'value'})) | |
1039 | + self.db.create_index('test-idx', '\xe5') | |
1040 | 1040 | self.assertEqual([doc], self.db.get_from_index('test-idx', 'value')) |
1041 | 1041 | |
1042 | 1042 | def test_list_indexes_with_non_ascii_field_names(self): |
1043 | - self.db.create_index('test-idx', u'\xe5') | |
1043 | + self.db.create_index('test-idx', '\xe5') | |
1044 | 1044 | self.assertEqual( |
1045 | - [('test-idx', [u'\xe5'])], self.db.list_indexes()) | |
1045 | + [('test-idx', ['\xe5'])], self.db.list_indexes()) | |
1046 | 1046 | |
1047 | 1047 | def test_create_index_evaluates_it(self): |
1048 | 1048 | doc = self.db.create_doc_from_json(simple_doc) |
@@ -1050,15 +1050,15 @@ | ||
1050 | 1050 | self.assertEqual([doc], self.db.get_from_index('test-idx', 'value')) |
1051 | 1051 | |
1052 | 1052 | def test_wildcard_matches_unicode_value(self): |
1053 | - doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"})) | |
1053 | + doc = self.db.create_doc_from_json(json.dumps({"key": "valu\xe5"})) | |
1054 | 1054 | self.db.create_index('test-idx', 'key') |
1055 | 1055 | self.assertEqual([doc], self.db.get_from_index('test-idx', '*')) |
1056 | 1056 | |
1057 | 1057 | def test_retrieve_unicode_value_from_index(self): |
1058 | - doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"})) | |
1058 | + doc = self.db.create_doc_from_json(json.dumps({"key": "valu\xe5"})) | |
1059 | 1059 | self.db.create_index('test-idx', 'key') |
1060 | 1060 | self.assertEqual( |
1061 | - [doc], self.db.get_from_index('test-idx', u"valu\xe5")) | |
1061 | + [doc], self.db.get_from_index('test-idx', "valu\xe5")) | |
1062 | 1062 | |
1063 | 1063 | def test_create_index_fails_if_name_taken(self): |
1064 | 1064 | self.db.create_index('test-idx', 'key') |
@@ -1161,11 +1161,11 @@ | ||
1161 | 1161 | '"key2": ["value2-1", "value2-2", "value2-3"]}') |
1162 | 1162 | self.db.create_index('test-idx', 'split_words(key)', 'key2') |
1163 | 1163 | self.assertEqual( |
1164 | - [(u'value1-1', u'value2-1'), (u'value1-1', u'value2-2'), | |
1165 | - (u'value1-1', u'value2-3'), (u'value1-2', u'value2-1'), | |
1166 | - (u'value1-2', u'value2-2'), (u'value1-2', u'value2-3'), | |
1167 | - (u'value1-3', u'value2-1'), (u'value1-3', u'value2-2'), | |
1168 | - (u'value1-3', u'value2-3')], | |
1164 | + [('value1-1', 'value2-1'), ('value1-1', 'value2-2'), | |
1165 | + ('value1-1', 'value2-3'), ('value1-2', 'value2-1'), | |
1166 | + ('value1-2', 'value2-2'), ('value1-2', 'value2-3'), | |
1167 | + ('value1-3', 'value2-1'), ('value1-3', 'value2-2'), | |
1168 | + ('value1-3', 'value2-3')], | |
1169 | 1169 | sorted(self.db.get_index_keys('test-idx'))) |
1170 | 1170 | |
1171 | 1171 | def test_get_from_index_multi_ordered(self): |
@@ -117,15 +117,15 @@ | ||
117 | 117 | |
118 | 118 | def test_create_index_list_on_non_ascii_field_name(self): |
119 | 119 | self.db = c_backend_wrapper.CDatabase(':memory:') |
120 | - doc = self.db.create_doc_from_json(json.dumps({u'\xe5': 'value'})) | |
121 | - self.db.create_index_list('test-idx', [u'\xe5']) | |
120 | + doc = self.db.create_doc_from_json(json.dumps({'\xe5': 'value'})) | |
121 | + self.db.create_index_list('test-idx', ['\xe5']) | |
122 | 122 | self.assertEqual([doc], self.db.get_from_index('test-idx', 'value')) |
123 | 123 | |
124 | 124 | def test_list_indexes_with_non_ascii_field_names(self): |
125 | 125 | self.db = c_backend_wrapper.CDatabase(':memory:') |
126 | - self.db.create_index_list('test-idx', [u'\xe5']) | |
126 | + self.db.create_index_list('test-idx', ['\xe5']) | |
127 | 127 | self.assertEqual( |
128 | - [('test-idx', [u'\xe5'])], self.db.list_indexes()) | |
128 | + [('test-idx', ['\xe5'])], self.db.list_indexes()) | |
129 | 129 | |
130 | 130 | def test_create_index_evaluates_it(self): |
131 | 131 | self.db = c_backend_wrapper.CDatabase(':memory:') |
@@ -135,7 +135,7 @@ | ||
135 | 135 | |
136 | 136 | def test_wildcard_matches_unicode_value(self): |
137 | 137 | self.db = c_backend_wrapper.CDatabase(':memory:') |
138 | - doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"})) | |
138 | + doc = self.db.create_doc_from_json(json.dumps({"key": "valu\xe5"})) | |
139 | 139 | self.db.create_index_list('test-idx', ['key']) |
140 | 140 | self.assertEqual([doc], self.db.get_from_index('test-idx', '*')) |
141 | 141 |
@@ -624,7 +624,7 @@ | ||
624 | 624 | self.assertIsInstance(uuid, str) |
625 | 625 | self.assertEqual(32, len(uuid)) |
626 | 626 | # This will raise ValueError if it isn't a valid hex string |
627 | - long(uuid, 16) | |
627 | + int(uuid, 16) | |
628 | 628 | # Version 4 uuids have 2 other requirements, the high 4 bits of the |
629 | 629 | # seventh byte are always '0x4', and the middle bits of byte 9 are |
630 | 630 | # always set |
@@ -22,7 +22,7 @@ | ||
22 | 22 | import simplejson as json |
23 | 23 | except ImportError: |
24 | 24 | import json # noqa |
25 | -import StringIO | |
25 | +import io | |
26 | 26 | |
27 | 27 | from u1db import ( |
28 | 28 | __version__ as _u1db_version, |
@@ -40,11 +40,11 @@ | ||
40 | 40 | class TestFencedReader(tests.TestCase): |
41 | 41 | |
42 | 42 | def test_init(self): |
43 | - reader = http_app._FencedReader(StringIO.StringIO(""), 25, 100) | |
43 | + reader = http_app._FencedReader(io.StringIO(""), 25, 100) | |
44 | 44 | self.assertEqual(25, reader.remaining) |
45 | 45 | |
46 | 46 | def test_read_chunk(self): |
47 | - inp = StringIO.StringIO("abcdef") | |
47 | + inp = io.StringIO("abcdef") | |
48 | 48 | reader = http_app._FencedReader(inp, 5, 10) |
49 | 49 | data = reader.read_chunk(2) |
50 | 50 | self.assertEqual("ab", data) |
@@ -52,7 +52,7 @@ | ||
52 | 52 | self.assertEqual(3, reader.remaining) |
53 | 53 | |
54 | 54 | def test_read_chunk_remaining(self): |
55 | - inp = StringIO.StringIO("abcdef") | |
55 | + inp = io.StringIO("abcdef") | |
56 | 56 | reader = http_app._FencedReader(inp, 4, 10) |
57 | 57 | data = reader.read_chunk(9999) |
58 | 58 | self.assertEqual("abcd", data) |
@@ -60,7 +60,7 @@ | ||
60 | 60 | self.assertEqual(0, reader.remaining) |
61 | 61 | |
62 | 62 | def test_read_chunk_nothing_left(self): |
63 | - inp = StringIO.StringIO("abc") | |
63 | + inp = io.StringIO("abc") | |
64 | 64 | reader = http_app._FencedReader(inp, 2, 10) |
65 | 65 | reader.read_chunk(2) |
66 | 66 | self.assertEqual(2, inp.tell()) |
@@ -71,7 +71,7 @@ | ||
71 | 71 | self.assertEqual(0, reader.remaining) |
72 | 72 | |
73 | 73 | def test_read_chunk_kept(self): |
74 | - inp = StringIO.StringIO("abcde") | |
74 | + inp = io.StringIO("abcde") | |
75 | 75 | reader = http_app._FencedReader(inp, 4, 10) |
76 | 76 | reader._kept = "xyz" |
77 | 77 | data = reader.read_chunk(2) # atmost ignored |
@@ -81,7 +81,7 @@ | ||
81 | 81 | self.assertIsNone(reader._kept) |
82 | 82 | |
83 | 83 | def test_getline(self): |
84 | - inp = StringIO.StringIO("abc\r\nde") | |
84 | + inp = io.StringIO("abc\r\nde") | |
85 | 85 | reader = http_app._FencedReader(inp, 6, 10) |
86 | 86 | reader.MAXCHUNK = 6 |
87 | 87 | line = reader.getline() |
@@ -89,7 +89,7 @@ | ||
89 | 89 | self.assertEqual("d", reader._kept) |
90 | 90 | |
91 | 91 | def test_getline_exact(self): |
92 | - inp = StringIO.StringIO("abcd\r\nef") | |
92 | + inp = io.StringIO("abcd\r\nef") | |
93 | 93 | reader = http_app._FencedReader(inp, 6, 10) |
94 | 94 | reader.MAXCHUNK = 6 |
95 | 95 | line = reader.getline() |
@@ -97,14 +97,14 @@ | ||
97 | 97 | self.assertIs(None, reader._kept) |
98 | 98 | |
99 | 99 | def test_getline_no_newline(self): |
100 | - inp = StringIO.StringIO("abcd") | |
100 | + inp = io.StringIO("abcd") | |
101 | 101 | reader = http_app._FencedReader(inp, 4, 10) |
102 | 102 | reader.MAXCHUNK = 6 |
103 | 103 | line = reader.getline() |
104 | 104 | self.assertEqual("abcd", line) |
105 | 105 | |
106 | 106 | def test_getline_many_chunks(self): |
107 | - inp = StringIO.StringIO("abcde\r\nf") | |
107 | + inp = io.StringIO("abcde\r\nf") | |
108 | 108 | reader = http_app._FencedReader(inp, 8, 10) |
109 | 109 | reader.MAXCHUNK = 4 |
110 | 110 | line = reader.getline() |
@@ -114,7 +114,7 @@ | ||
114 | 114 | self.assertEqual("f", line) |
115 | 115 | |
116 | 116 | def test_getline_empty(self): |
117 | - inp = StringIO.StringIO("") | |
117 | + inp = io.StringIO("") | |
118 | 118 | reader = http_app._FencedReader(inp, 0, 10) |
119 | 119 | reader.MAXCHUNK = 4 |
120 | 120 | line = reader.getline() |
@@ -123,7 +123,7 @@ | ||
123 | 123 | self.assertEqual("", line) |
124 | 124 | |
125 | 125 | def test_getline_just_newline(self): |
126 | - inp = StringIO.StringIO("\r\n") | |
126 | + inp = io.StringIO("\r\n") | |
127 | 127 | reader = http_app._FencedReader(inp, 2, 10) |
128 | 128 | reader.MAXCHUNK = 4 |
129 | 129 | line = reader.getline() |
@@ -132,13 +132,13 @@ | ||
132 | 132 | self.assertEqual("", line) |
133 | 133 | |
134 | 134 | def test_getline_too_large(self): |
135 | - inp = StringIO.StringIO("x" * 50) | |
135 | + inp = io.StringIO("x" * 50) | |
136 | 136 | reader = http_app._FencedReader(inp, 50, 25) |
137 | 137 | reader.MAXCHUNK = 4 |
138 | 138 | self.assertRaises(http_app.BadRequest, reader.getline) |
139 | 139 | |
140 | 140 | def test_getline_too_large_complete(self): |
141 | - inp = StringIO.StringIO("x" * 25 + "\r\n") | |
141 | + inp = io.StringIO("x" * 25 + "\r\n") | |
142 | 142 | reader = http_app._FencedReader(inp, 50, 25) |
143 | 143 | reader.MAXCHUNK = 4 |
144 | 144 | self.assertRaises(http_app.BadRequest, reader.getline) |
@@ -267,7 +267,7 @@ | ||
267 | 267 | resource = TestResource() |
268 | 268 | body = '{"body": true}' |
269 | 269 | environ = {'QUERY_STRING': 'a=1', 'REQUEST_METHOD': 'PUT', |
270 | - 'wsgi.input': StringIO.StringIO(body), | |
270 | + 'wsgi.input': io.StringIO(body), | |
271 | 271 | 'CONTENT_LENGTH': str(len(body)), |
272 | 272 | 'CONTENT_TYPE': 'application/json'} |
273 | 273 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -287,7 +287,7 @@ | ||
287 | 287 | ']' |
288 | 288 | ) |
289 | 289 | environ = {'QUERY_STRING': 'a=1', 'REQUEST_METHOD': 'PUT', |
290 | - 'wsgi.input': StringIO.StringIO(body), | |
290 | + 'wsgi.input': io.StringIO(body), | |
291 | 291 | 'CONTENT_LENGTH': str(len(body)), |
292 | 292 | 'CONTENT_TYPE': 'application/x-u1db-sync-stream'} |
293 | 293 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -302,7 +302,7 @@ | ||
302 | 302 | def _put_sync_stream(self, body): |
303 | 303 | resource = TestResource() |
304 | 304 | environ = {'QUERY_STRING': 'a=1&b=2', 'REQUEST_METHOD': 'PUT', |
305 | - 'wsgi.input': StringIO.StringIO(body), | |
305 | + 'wsgi.input': io.StringIO(body), | |
306 | 306 | 'CONTENT_LENGTH': str(len(body)), |
307 | 307 | 'CONTENT_TYPE': 'application/x-u1db-sync-stream'} |
308 | 308 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -343,7 +343,7 @@ | ||
343 | 343 | def test_bad_request_decode_failure(self): |
344 | 344 | resource = TestResource() |
345 | 345 | environ = {'QUERY_STRING': 'a=\xff', 'REQUEST_METHOD': 'PUT', |
346 | - 'wsgi.input': StringIO.StringIO('{}'), | |
346 | + 'wsgi.input': io.StringIO('{}'), | |
347 | 347 | 'CONTENT_LENGTH': '2', |
348 | 348 | 'CONTENT_TYPE': 'application/json'} |
349 | 349 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -353,7 +353,7 @@ | ||
353 | 353 | def test_bad_request_unsupported_content_type(self): |
354 | 354 | resource = TestResource() |
355 | 355 | environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT', |
356 | - 'wsgi.input': StringIO.StringIO('{}'), | |
356 | + 'wsgi.input': io.StringIO('{}'), | |
357 | 357 | 'CONTENT_LENGTH': '2', |
358 | 358 | 'CONTENT_TYPE': 'text/plain'} |
359 | 359 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -363,12 +363,12 @@ | ||
363 | 363 | def test_bad_request_content_length_too_large(self): |
364 | 364 | resource = TestResource() |
365 | 365 | environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT', |
366 | - 'wsgi.input': StringIO.StringIO('{}'), | |
366 | + 'wsgi.input': io.StringIO('{}'), | |
367 | 367 | 'CONTENT_LENGTH': '10000', |
368 | 368 | 'CONTENT_TYPE': 'text/plain'} |
369 | 369 | |
370 | 370 | resource.max_request_size = 5000 |
371 | - resource.max_entry_size = sys.maxint # we don't get to use this | |
371 | + resource.max_entry_size = sys.maxsize # we don't get to use this | |
372 | 372 | |
373 | 373 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
374 | 374 | parameters) |
@@ -377,7 +377,7 @@ | ||
377 | 377 | def test_bad_request_no_content_length(self): |
378 | 378 | resource = TestResource() |
379 | 379 | environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT', |
380 | - 'wsgi.input': StringIO.StringIO('a'), | |
380 | + 'wsgi.input': io.StringIO('a'), | |
381 | 381 | 'CONTENT_TYPE': 'application/json'} |
382 | 382 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
383 | 383 | parameters) |
@@ -386,7 +386,7 @@ | ||
386 | 386 | def test_bad_request_invalid_content_length(self): |
387 | 387 | resource = TestResource() |
388 | 388 | environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT', |
389 | - 'wsgi.input': StringIO.StringIO('abc'), | |
389 | + 'wsgi.input': io.StringIO('abc'), | |
390 | 390 | 'CONTENT_LENGTH': '1unk', |
391 | 391 | 'CONTENT_TYPE': 'application/json'} |
392 | 392 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -396,7 +396,7 @@ | ||
396 | 396 | def test_bad_request_empty_body(self): |
397 | 397 | resource = TestResource() |
398 | 398 | environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT', |
399 | - 'wsgi.input': StringIO.StringIO(''), | |
399 | + 'wsgi.input': io.StringIO(''), | |
400 | 400 | 'CONTENT_LENGTH': '0', |
401 | 401 | 'CONTENT_TYPE': 'application/json'} |
402 | 402 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -413,7 +413,7 @@ | ||
413 | 413 | def test_bad_request_unsupported_method_put_like(self): |
414 | 414 | resource = TestResource() |
415 | 415 | environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT', |
416 | - 'wsgi.input': StringIO.StringIO('{}'), | |
416 | + 'wsgi.input': io.StringIO('{}'), | |
417 | 417 | 'CONTENT_LENGTH': '2', |
418 | 418 | 'CONTENT_TYPE': 'application/json'} |
419 | 419 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -424,7 +424,7 @@ | ||
424 | 424 | resource = TestResource() |
425 | 425 | body = '{}\r\n{}\r\n' |
426 | 426 | environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'POST', |
427 | - 'wsgi.input': StringIO.StringIO(body), | |
427 | + 'wsgi.input': io.StringIO(body), | |
428 | 428 | 'CONTENT_LENGTH': str(len(body)), |
429 | 429 | 'CONTENT_TYPE': 'application/x-u1db-multi-json'} |
430 | 430 | invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ, |
@@ -81,7 +81,7 @@ | ||
81 | 81 | # is a 'str' object. However error['status'] returns a unicode |
82 | 82 | # object. |
83 | 83 | status = str(error['status']) |
84 | - if isinstance(response, unicode): | |
84 | + if isinstance(response, str): | |
85 | 85 | response = str(response) |
86 | 86 | if isinstance(response, str): |
87 | 87 | start_response(status, [('Content-Type', 'text/plain')]) |
@@ -99,7 +99,7 @@ | ||
99 | 99 | # is a 'str' object. However error['status'] returns a unicode |
100 | 100 | # object. |
101 | 101 | status = str(error['status']) |
102 | - if isinstance(response, unicode): | |
102 | + if isinstance(response, str): | |
103 | 103 | response = str(response) |
104 | 104 | if isinstance(response, str): |
105 | 105 | start_response(status, [('Content-Type', 'text/plain')]) |
@@ -120,7 +120,7 @@ | ||
120 | 120 | try: |
121 | 121 | consumer, token, params = oauth_server.verify_request( |
122 | 122 | oauth_req) |
123 | - except oauth.OAuthError, e: | |
123 | + except oauth.OAuthError as e: | |
124 | 124 | start_response("401 Unauthorized", |
125 | 125 | [('Content-Type', 'application/json')]) |
126 | 126 | return [json.dumps({"error": "unauthorized", |
@@ -213,7 +213,7 @@ | ||
213 | 213 | cli._request_json('POST', ['error'], {}, |
214 | 214 | {'status': "500 Internal Error", |
215 | 215 | 'response': "Fail."}) |
216 | - except errors.HTTPError, e: | |
216 | + except errors.HTTPError as e: | |
217 | 217 | pass |
218 | 218 | |
219 | 219 | self.assertEqual(500, e.status) |
@@ -258,7 +258,7 @@ | ||
258 | 258 | cli._request_json('POST', ['error'], {}, |
259 | 259 | {'status': "503 Service Unavailable", |
260 | 260 | 'response': "random unavailable."}) |
261 | - except errors.Unavailable, e: | |
261 | + except errors.Unavailable as e: | |
262 | 262 | pass |
263 | 263 | |
264 | 264 | self.assertEqual(503, e.status) |
@@ -297,7 +297,7 @@ | ||
297 | 297 | cli._request_json('POST', ['error'], {}, |
298 | 298 | {'status': "400 Bad Request", |
299 | 299 | 'response': {"error": "error"}}) |
300 | - except errors.U1DBError, e: | |
300 | + except errors.U1DBError as e: | |
301 | 301 | pass |
302 | 302 | self.assertIs(e.__class__, errors.U1DBError) |
303 | 303 |
@@ -311,7 +311,7 @@ | ||
311 | 311 | cli._request_json('POST', ['error'], {}, |
312 | 312 | {'status': "400 Bad Request", |
313 | 313 | 'response': "<Bad Request>"}) |
314 | - except errors.HTTPError, e: | |
314 | + except errors.HTTPError as e: | |
315 | 315 | pass |
316 | 316 | |
317 | 317 | self.assertEqual(400, e.status) |
@@ -322,13 +322,13 @@ | ||
322 | 322 | cli = self.getClient() |
323 | 323 | cli.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret, |
324 | 324 | tests.token1.key, tests.token1.secret) |
325 | - params = {'x': u'\xf0', 'y': "foo"} | |
325 | + params = {'x': '\xf0', 'y': "foo"} | |
326 | 326 | res, headers = cli._request('GET', ['doc', 'oauth'], params) |
327 | 327 | self.assertEqual( |
328 | 328 | ['/dbase/doc/oauth', tests.token1.key, params], json.loads(res)) |
329 | 329 | |
330 | 330 | # oauth does its own internal quoting |
331 | - params = {'x': u'\xf0', 'y': "foo"} | |
331 | + params = {'x': '\xf0', 'y': "foo"} | |
332 | 332 | res, headers = cli._request('GET', ['doc', 'oauth', 'foo bar'], params) |
333 | 333 | self.assertEqual( |
334 | 334 | ['/dbase/doc/oauth/foo bar', tests.token1.key, params], |
@@ -341,7 +341,7 @@ | ||
341 | 341 | 'token_key': tests.token1.key, |
342 | 342 | 'token_secret': tests.token1.secret, |
343 | 343 | }}) |
344 | - params = {'x': u'\xf0', 'y': "foo"} | |
344 | + params = {'x': '\xf0', 'y': "foo"} | |
345 | 345 | res, headers = cli._request('GET', ['doc', 'oauth'], params) |
346 | 346 | self.assertEqual( |
347 | 347 | ['/dbase/doc/oauth', tests.token1.key, params], json.loads(res)) |
@@ -96,7 +96,7 @@ | ||
96 | 96 | remote_target = self.getSyncTarget('localhost', 'test') |
97 | 97 | try: |
98 | 98 | remote_target.record_sync_info('other-id', 2, 'T-id') |
99 | - except ssl.SSLError, e: | |
99 | + except ssl.SSLError as e: | |
100 | 100 | self.assertIn("certificate verify failed", str(e)) |
101 | 101 | else: |
102 | 102 | self.fail("certificate verification should have failed.") |
@@ -99,7 +99,7 @@ | ||
99 | 99 | def test_keys(self): |
100 | 100 | idx = inmemory.InMemoryIndex('idx-name', ['key']) |
101 | 101 | idx.add_json('doc-id', simple_doc) |
102 | - self.assertEqual(['value'], idx.keys()) | |
102 | + self.assertEqual(['value'], list(idx.keys())) | |
103 | 103 | |
104 | 104 | def test_lookup(self): |
105 | 105 | idx = inmemory.InMemoryIndex('idx-name', ['key']) |
@@ -16,7 +16,7 @@ | ||
16 | 16 | |
17 | 17 | """Tests for the remote sync targets""" |
18 | 18 | |
19 | -import cStringIO | |
19 | +import io | |
20 | 20 | |
21 | 21 | from u1db import ( |
22 | 22 | errors, |
@@ -181,7 +181,7 @@ | ||
181 | 181 | self.startServer() |
182 | 182 | |
183 | 183 | def blackhole_getstderr(inst): |
184 | - return cStringIO.StringIO() | |
184 | + return io.StringIO() | |
185 | 185 | |
186 | 186 | self.patch(self.server.RequestHandlerClass, 'get_stderr', |
187 | 187 | blackhole_getstderr) |
@@ -238,7 +238,7 @@ | ||
238 | 238 | self.startServer() |
239 | 239 | |
240 | 240 | def blackhole_getstderr(inst): |
241 | - return cStringIO.StringIO() | |
241 | + return io.StringIO() | |
242 | 242 | |
243 | 243 | self.patch(self.server.RequestHandlerClass, 'get_stderr', |
244 | 244 | blackhole_getstderr) |
@@ -66,7 +66,7 @@ | ||
66 | 66 | def second_try(): |
67 | 67 | try: |
68 | 68 | db2 = SQLiteDatabaseTesting(dbname, 2) |
69 | - except Exception, e: | |
69 | + except Exception as e: | |
70 | 70 | outcome2.append(e) |
71 | 71 | else: |
72 | 72 | outcome2.append(db2) |
@@ -405,8 +405,8 @@ | ||
405 | 405 | |
406 | 406 | def test__set_trace_hook_shallow(self): |
407 | 407 | if (self.st._set_trace_hook_shallow == self.st._set_trace_hook |
408 | - or self.st._set_trace_hook_shallow.im_func == | |
409 | - SyncTarget._set_trace_hook_shallow.im_func): | |
408 | + or self.st._set_trace_hook_shallow.__func__ == | |
409 | + SyncTarget._set_trace_hook_shallow.__func__): | |
410 | 410 | # shallow same as full |
411 | 411 | expected = ['before whats_changed', |
412 | 412 | 'after whats_changed', |
@@ -469,7 +469,7 @@ | ||
469 | 469 | except AttributeError: |
470 | 470 | http_at = test._http_at = {} |
471 | 471 | path = db._replica_uid |
472 | - while path in http_at.values(): | |
472 | + while path in list(http_at.values()): | |
473 | 473 | path += 'copy' |
474 | 474 | http_at[new_db] = path |
475 | 475 | return new_db |
@@ -59,7 +59,7 @@ | ||
59 | 59 | return True |
60 | 60 | this_is_newer = False |
61 | 61 | other_expand = dict(other._values) |
62 | - for key, value in self._values.iteritems(): | |
62 | + for key, value in self._values.items(): | |
63 | 63 | if key in other_expand: |
64 | 64 | other_value = other_expand.pop(key) |
65 | 65 | if other_value > value: |
@@ -80,7 +80,7 @@ | ||
80 | 80 | self._values[replica_uid] = self._values.get(replica_uid, 0) + 1 |
81 | 81 | |
82 | 82 | def maximize(self, other_vcr): |
83 | - for replica_uid, counter in other_vcr._values.iteritems(): | |
83 | + for replica_uid, counter in other_vcr._values.items(): | |
84 | 84 | if replica_uid not in self._values: |
85 | 85 | self._values[replica_uid] = counter |
86 | 86 | else: |