From 862c8441062a752a6c3921cd3805a61cd6e4878b Mon Sep 17 00:00:00 2001 From: David Basden Date: Mon, 14 Oct 2013 16:00:09 +1100 Subject: [PATCH 001/324] Make sure a buffer is added to socket file object --- riak/transports/http/connection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/transports/http/connection.py b/riak/transports/http/connection.py index 59a13ea9..384842e1 100644 --- a/riak/transports/http/connection.py +++ b/riak/transports/http/connection.py @@ -35,7 +35,7 @@ def _request(self, method, uri, headers={}, body='', stream=False): 'multipart/mixed, application/json, */*;q=0.5') try: self._connection.request(method, uri, body, headers) - response = self._connection.getresponse() + response = self._connection.getresponse(buffering=True) if stream: # The caller is responsible for fully reading the From 85978f49b4f2aca74046ec21427cfd87b0a1f9fb Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Thu, 18 Dec 2014 17:04:55 -0700 Subject: [PATCH 002/324] Add explicitly the supported versions for PyPI: 2.6, 2.7, 2.8 and 2.9 --- setup.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.py b/setup.py index 0935057e..09093ea8 100755 --- a/setup.py +++ b/setup.py @@ -46,5 +46,9 @@ classifiers=['License :: OSI Approved :: Apache Software License', 'Intended Audience :: Developers', 'Operating System :: OS Independent', + 'Programming Language :: Python :: 2.6', + 'Programming Language :: Python :: 2.7', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', 'Topic :: Database'] ) From f9acd6e17ba6cb08d6fee1ca9fe4630c20028460 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Thu, 13 Nov 2014 11:03:21 -0700 Subject: [PATCH 003/324] Initial pass at building multiple versions under tox and pyenv --- buildbot/Makefile | 15 +++++++++++--- buildbot/tox_runner.sh | 17 ++++++++++++++++ buildbot/tox_setup.sh | 46 ++++++++++++++++++++++++++++++++++++++++++ commands.py | 3 +++ tox.ini | 11 ++++++++++ 5 files changed, 89 insertions(+), 3 deletions(-) create mode 100755 buildbot/tox_runner.sh create mode 100755 buildbot/tox_setup.sh create mode 100644 tox.ini diff --git a/buildbot/Makefile b/buildbot/Makefile index e11c13f4..aa1f678f 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -21,14 +21,23 @@ lint: @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/client.crt @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/server.crt -test: test_normal test_security +test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @../setup.py disable_security --riak-admin=${RIAK_ADMIN} - @RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ../setup.py test + @RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=${RIAK_ADMIN} - (cd ..; RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./setup.py test) + @RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + +# These are required to actually build all the Python versions: +# * pip install tox +# * pyenv - https://github.com/yyuu/pyenv +# And two pyenv plugins: +# * pyenv virtualenv - https://github.com/yyuu/pyenv-virtualenv +# * pyenv alias - https://github.com/s1341/pyenv-alias +setup: + ./tox_setup.sh diff --git a/buildbot/tox_runner.sh b/buildbot/tox_runner.sh new file mode 100755 index 00000000..2a1ec737 --- /dev/null +++ b/buildbot/tox_runner.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +# pyenv root +export PYENV_ROOT="$HOME/.pyenv" + +# Add pyenv root to PATH +# and initialize pyenv +PATH="$PYENV_ROOT/bin:$PATH" +# initialize pyenv +eval "$(pyenv init -)" +# initialize pyenv virtualenv +eval "$(pyenv virtualenv-init -)" + +# Change directory if an argument is passed in +if [[ ! -z "$1" ]]; then + cd "$1" +fi +tox diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh new file mode 100755 index 00000000..46a1aee9 --- /dev/null +++ b/buildbot/tox_setup.sh @@ -0,0 +1,46 @@ +#!/usr/bin/env bash +# pyenv root +export PYENV_ROOT="$HOME/.pyenv" + +# Install pyenv if it's missing +if [[ ! -d $PYENV_ROOT ]]; then + git clone git://github.com/yyuu/pyenv.git ${PYENV_ROOT} + cd ${PYENV_ROOT} + # Get the latest tagged version + git checkout `git tag | tail -1` + git clone https://github.com/yyuu/pyenv-virtualenv.git ${PYENV_ROOT}/plugins/pyenv-virtualenv + cd plugins/pyenv-virtualenv + git checkout `git tag | tail -1` + git clone https://github.com/s1341/pyenv-alias.git ${PYENV_ROOT}/plugins/pyenv-alias + + # Add pyenv root to PATH + # and initialize pyenv + PATH="$PYENV_ROOT/bin:$PATH" + # initialize pyenv + eval "$(pyenv init -)" + # initialize pyenv virtualenv + eval "$(pyenv virtualenv-init -)" + + # Now load up (allthethings) + VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 + VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 + VERSION_ALIAS="riak_3.3.6" pyenv install 3.3.6 + VERSION_ALIAS="riak_3.4.2" pyenv install 3.4.2 + + pyenv virtualenv riak_2.6.9 riak-py26 + pyenv virtualenv riak_2.7.9 riak-py27 + pyenv virtualenv riak_3.3.6 riak-py33 + pyenv virtualenv riak_3.4.2 riak-py34 + pyenv global riak-py26 riak-py27 riak-py33 riak-py34 + pyenv versions +fi + +# Now install tox +if [ -z "`pip show tox`" ]; then + pip install tox + if [ -z "`pip show tox`" ]; then + echo "ERROR: Install of tox failed" + exit 1 + fi + pyenv rehash +fi diff --git a/commands.py b/commands.py index e90f074c..dce89fff 100644 --- a/commands.py +++ b/commands.py @@ -421,6 +421,9 @@ def _update_riak_conf(self): r'listener.protobuf.internal = ' + pb_host, conf) conf += 'check_crl = off\n' + # Older versions of OpenSSL client library need to match on the server + conf += 'tls_protocols.tlsv1 = on\n' + conf += 'tls_protocols.tlsv1.1 = on\n' f = open(self.riak_conf, 'w', buffering=1) f.write(conf) f.close() diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..d4f59be0 --- /dev/null +++ b/tox.ini @@ -0,0 +1,11 @@ +# Tox (http://tox.testrun.org/) is a tool for running tests +# in multiple virtualenvs. This configuration file will run the +# test suite on all supported python versions. To use it, "pip install tox" +# and then run "tox" from this directory. + +[tox] +envlist = py26, py27, py33, py34 + +[testenv] +commands = {envpython} setup.py test +deps = six From ede2838fb1014444f69fc57004c95bf6eb6c99b5 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Tue, 20 Jan 2015 15:00:09 -0700 Subject: [PATCH 004/324] Remove pip uninstall to allow buildbot to build on builder --- buildbot/Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index aa1f678f..6f78453c 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -11,7 +11,6 @@ configure: @../setup.py configure --riak-admin=${RIAK_ADMIN} compile: - -@yes y | pip uninstall riak-pb protobuf pyOpenSSL @../setup.py develop lint: From 21b378c5e5fee253e532f64250a1b919f5955fd3 Mon Sep 17 00:00:00 2001 From: Alessio Caprari Date: Mon, 26 Jan 2015 16:26:24 +0100 Subject: [PATCH 005/324] Use ssl module from the standard library also on Python >= 2.7.9 --- riak/security.py | 86 +++++++++-------- riak/tests/test_security.py | 11 +-- riak/transports/http/__init__.py | 16 ++-- riak/transports/pbc/connection.py | 6 +- riak/transports/security.py | 152 +++++++++++++++--------------- 5 files changed, 140 insertions(+), 131 deletions(-) diff --git a/riak/security.py b/riak/security.py index 7da79ea7..542ff225 100644 --- a/riak/security.py +++ b/riak/security.py @@ -16,34 +16,25 @@ under the License. """ +import ssl import warnings -from six import PY2 from riak import RiakError from riak.util import str_to_long -OPENSSL_VERSION_101G = 268439679 -if PY2: +if hasattr(ssl, 'SSLContext'): + # For Python >= 2.7.9 and Python 3.x + USE_STDLIB_SSL = True +else: + # For Python 2.6 and <= 2.7.8 + USE_STDLIB_SSL = False + +if not USE_STDLIB_SSL: import OpenSSL.SSL from OpenSSL import crypto - sslver = OpenSSL.SSL.OPENSSL_VERSION_NUMBER - # Be sure to use at least OpenSSL 1.0.1g - if (sslver < OPENSSL_VERSION_101G) or \ - not hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): - verstring = OpenSSL.SSL.SSLeay_version(OpenSSL.SSL.SSLEAY_VERSION) - msg = "Found {0} version, but expected at least OpenSSL 1.0.1g. " \ - "Security may not support TLS 1.2.".format(verstring) - warnings.warn(msg, UserWarning) - if hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): - DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_2_METHOD - elif hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): - DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_1_METHOD - elif hasattr(OpenSSL.SSL, 'TLSv1_METHOD'): - DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_METHOD - else: - DEFAULT_TLS_VERSION = OpenSSL.SSL.SSLv23_METHOD -else: - import ssl +OPENSSL_VERSION_101G = 268439679 +if hasattr(ssl, 'OPENSSL_VERSION_NUMBER'): + # For Python 2.7 and Python 3.x sslver = ssl.OPENSSL_VERSION_NUMBER # Be sure to use at least OpenSSL 1.0.1g if sslver < OPENSSL_VERSION_101G or \ @@ -61,6 +52,25 @@ else: DEFAULT_TLS_VERSION = ssl.PROTOCOL_SSLv23 +else: + # For Python 2.6 + sslver = OpenSSL.SSL.OPENSSL_VERSION_NUMBER + # Be sure to use at least OpenSSL 1.0.1g + if (sslver < OPENSSL_VERSION_101G) or \ + not hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): + verstring = OpenSSL.SSL.SSLeay_version(OpenSSL.SSL.SSLEAY_VERSION) + msg = "Found {0} version, but expected at least OpenSSL 1.0.1g. " \ + "Security may not support TLS 1.2.".format(verstring) + warnings.warn(msg, UserWarning) + if hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): + DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_2_METHOD + elif hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): + DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_1_METHOD + elif hasattr(OpenSSL.SSL, 'TLSv1_METHOD'): + DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_METHOD + else: + DEFAULT_TLS_VERSION = OpenSSL.SSL.SSLv23_METHOD + class SecurityError(RiakError): """ @@ -197,7 +207,7 @@ def ssl_version(self): """ return self._ssl_version - if PY2: + if not USE_STDLIB_SSL: @property def pkey(self): """ @@ -266,20 +276,20 @@ def _has_credential(self, key): return (getattr(self, internal_key) is not None) or \ (getattr(self, internal_key + "_file") is not None) - def _check_revoked_cert(self, ssl_socket): - """ - Checks whether the server certificate on the passed socket has been - revoked by checking the CRL. + def _check_revoked_cert(self, ssl_socket): + """ + Checks whether the server certificate on the passed socket has been + revoked by checking the CRL. - :param ssl_socket: the SSL/TLS socket - :rtype: bool - :raises SecurityError: when the certificate has been revoked - """ - if not self._has_credential('crl'): - return True - - servcert = ssl_socket.get_peer_certificate() - servserial = servcert.get_serial_number() - for rev in self.crl.get_revoked(): - if servserial == str_to_long(rev.get_serial(), 16): - raise SecurityError("Server certificate has been revoked") + :param ssl_socket: the SSL/TLS socket + :rtype: bool + :raises SecurityError: when the certificate has been revoked + """ + if not self._has_credential('crl'): + return True + + servcert = ssl_socket.get_peer_certificate() + servserial = servcert.get_serial_number() + for rev in self.crl.get_revoked(): + if servserial == str_to_long(rev.get_serial(), 16): + raise SecurityError("Server certificate has been revoked") diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index b036a94b..ffcada84 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -17,8 +17,8 @@ under the License. """ -import platform -if platform.python_version() < '2.7': +import sys +if sys.version_info < (2, 7): unittest = __import__('unittest2') else: import unittest @@ -26,7 +26,6 @@ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT from riak.security import SecurityCreds -from six import PY3 class SecurityTests(object): @@ -110,9 +109,9 @@ def test_security_revoked_cert(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, cacert_file=SECURITY_CACERT, crl_file=SECURITY_REVOKED) - # Curenly Python 3.x native CRL doesn't seem to work - # as advertised - if PY3: + # Currently Python >= 2.7.9 and Python 3.x native CRL doesn't seem to + # work as advertised + if sys.version_info >= (2, 7, 9): return client = self.create_client(credentials=creds) with self.assertRaises(Exception): diff --git a/riak/transports/http/__init__.py b/riak/transports/http/__init__.py index 1d073604..deb334a2 100644 --- a/riak/transports/http/__init__.py +++ b/riak/transports/http/__init__.py @@ -19,16 +19,21 @@ import socket import select from six import PY2 -if PY2: +from riak.security import SecurityError, USE_STDLIB_SSL +if USE_STDLIB_SSL: + import ssl + from riak.transports.security import configure_ssl_context +else: import OpenSSL.SSL + from riak.transports.security import RiakWrappedSocket,\ + configure_pyopenssl_context +if PY2: from httplib import HTTPConnection, \ NotConnected, \ IncompleteRead, \ ImproperConnectionState, \ BadStatusLine, \ HTTPSConnection - from riak.transports.security import RiakWrappedSocket,\ - configure_pyopenssl_context else: from http.client import HTTPConnection, \ HTTPSConnection, \ @@ -36,10 +41,7 @@ IncompleteRead, \ ImproperConnectionState, \ BadStatusLine - import ssl - from riak.transports.security import configure_ssl_context -from riak.security import SecurityError from riak.transports.pool import Pool from riak.transports.http.transport import RiakHttpTransport @@ -106,7 +108,7 @@ def connect(self): Connect to a host on a given (SSL) port using PyOpenSSL. """ sock = socket.create_connection((self.host, self.port), self.timeout) - if PY2: + if not USE_STDLIB_SSL: ssl_ctx = configure_pyopenssl_context(self.credentials) # attempt to upgrade the socket to TLS diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 6f4ee95a..293d05c3 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -19,7 +19,7 @@ import socket import struct import riak_pb -from riak.security import SecurityError +from riak.security import SecurityError, USE_STDLIB_SSL from riak import RiakError from riak_pb.messages import ( MESSAGE_CLASSES, @@ -30,7 +30,7 @@ ) from riak.util import bytes_to_str, str_to_bytes from six import PY2 -if PY2: +if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection from riak.transports.security import configure_pyopenssl_context else: @@ -113,7 +113,7 @@ def _auth(self): else: return False - if PY2: + if not USE_STDLIB_SSL: def _ssl_handshake(self): """ Perform an SSL handshake w/ the server. diff --git a/riak/transports/security.py b/riak/transports/security.py index 8a098449..b108e427 100644 --- a/riak/transports/security.py +++ b/riak/transports/security.py @@ -17,16 +17,15 @@ """ import socket -from six import PY2 -if PY2: +from riak.security import SecurityError, USE_STDLIB_SSL +if USE_STDLIB_SSL: + import ssl +else: import OpenSSL.SSL try: from cStringIO import StringIO except ImportError: from StringIO import StringIO -else: - import ssl -from riak.security import SecurityError def verify_cb(conn, cert, errnum, depth, ok): @@ -39,42 +38,10 @@ def verify_cb(conn, cert, errnum, depth, ok): return ok -if PY2: - def configure_pyopenssl_context(credentials): - """ - Set various options on the SSL context for Python 2.x. - - :param credentials: Riak Security Credentials - :type credentials: :class:`~riak.security.SecurityCreds` - :rtype ssl_ctx: :class:`~OpenSSL.SSL.Context` - """ - - ssl_ctx = OpenSSL.SSL.Context(credentials.ssl_version) - if credentials._has_credential('pkey'): - ssl_ctx.use_privatekey(credentials.pkey) - if credentials._has_credential('cert'): - ssl_ctx.use_certificate(credentials.cert) - if credentials._has_credential('cacert'): - store = ssl_ctx.get_cert_store() - cacerts = credentials.cacert - if not isinstance(cacerts, list): - cacerts = [cacerts] - for cacert in cacerts: - store.add_cert(cacert) - else: - raise SecurityError("cacert_file is required in SecurityCreds") - ciphers = credentials.ciphers - if ciphers is not None: - ssl_ctx.set_cipher_list(ciphers) - # Demand a certificate - ssl_ctx.set_verify(OpenSSL.SSL.VERIFY_PEER | - OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, - verify_cb) - return ssl_ctx -else: +if USE_STDLIB_SSL: def configure_ssl_context(credentials): """ - Set various options on the SSL context for Python 3.x. + Set various options on the SSL context for Python >= 2.7.9 and 3.x. N.B. versions earlier than 3.4 may not support all security measures, e.g., hostname check. @@ -121,49 +88,80 @@ def configure_ssl_context(credentials): return ssl_ctx - -# Inspired by -# https://github.com/shazow/urllib3/blob/master/urllib3/contrib/pyopenssl.py -class RiakWrappedSocket(socket.socket): - def __init__(self, connection, socket): +else: + def configure_pyopenssl_context(credentials): """ - API-compatibility wrapper for Python OpenSSL's Connection-class. + Set various options on the SSL context for Python <= 2.7.8. - :param connection: OpenSSL connection - :type connection: OpenSSL.SSL.Connection - :param socket: Underlying already connected socket - :type socket: socket + :param credentials: Riak Security Credentials + :type credentials: :class:`~riak.security.SecurityCreds` + :rtype ssl_ctx: :class:`~OpenSSL.SSL.Context` """ - self.connection = connection - self.socket = socket - - def fileno(self): - return self.socket.fileno() - - def makefile(self, mode, bufsize=-1): - return fileobject(self.connection, mode, bufsize) - - def settimeout(self, timeout): - return self.socket.settimeout(timeout) - - def sendall(self, data): - # SSL seems to need bytes, so force the data to byte encoding - return self.connection.sendall(bytes(data)) - - def close(self): - try: - return self.connection.shutdown() - except OpenSSL.SSL.Error as err: - if err.args == ([],): - return False - else: - raise err + ssl_ctx = OpenSSL.SSL.Context(credentials.ssl_version) + if credentials._has_credential('pkey'): + ssl_ctx.use_privatekey(credentials.pkey) + if credentials._has_credential('cert'): + ssl_ctx.use_certificate(credentials.cert) + if credentials._has_credential('cacert'): + store = ssl_ctx.get_cert_store() + cacerts = credentials.cacert + if not isinstance(cacerts, list): + cacerts = [cacerts] + for cacert in cacerts: + store.add_cert(cacert) + else: + raise SecurityError("cacert_file is required in SecurityCreds") + ciphers = credentials.ciphers + if ciphers is not None: + ssl_ctx.set_cipher_list(ciphers) + # Demand a certificate + ssl_ctx.set_verify(OpenSSL.SSL.VERIFY_PEER | + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, + verify_cb) + return ssl_ctx -# Blatantly Stolen from -# https://github.com/shazow/urllib3/blob/master/urllib3/contrib/pyopenssl.py -# which is basically a port of the `socket._fileobject` class -if PY2: + # Inspired by + # https://github.com/shazow/urllib3/blob/master/urllib3/contrib/pyopenssl.py + class RiakWrappedSocket(socket.socket): + def __init__(self, connection, socket): + """ + API-compatibility wrapper for Python OpenSSL's Connection-class. + + :param connection: OpenSSL connection + :type connection: OpenSSL.SSL.Connection + :param socket: Underlying already connected socket + :type socket: socket + """ + self.connection = connection + self.socket = socket + + def fileno(self): + return self.socket.fileno() + + def makefile(self, mode, bufsize=-1): + return fileobject(self.connection, mode, bufsize) + + def settimeout(self, timeout): + return self.socket.settimeout(timeout) + + def sendall(self, data): + # SSL seems to need bytes, so force the data to byte encoding + return self.connection.sendall(bytes(data)) + + def close(self): + try: + return self.connection.shutdown() + except OpenSSL.SSL.Error as err: + if err.args == ([],): + return False + else: + raise err + + + # Blatantly Stolen from + # https://github.com/shazow/urllib3/blob/master/urllib3/contrib/pyopenssl.py + # which is basically a port of the `socket._fileobject` class class fileobject(socket._fileobject): """ Extension of the socket module's fileobject to use PyOpenSSL. From 8a127a6b28ca54864e4d5a0d3c4957ad811bc83a Mon Sep 17 00:00:00 2001 From: Alessio Caprari Date: Mon, 26 Jan 2015 16:57:06 +0100 Subject: [PATCH 006/324] Include dependency on pyOpenSSL only on Python < 2.7.9 --- setup.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/setup.py b/setup.py index 09093ea8..549f2799 100755 --- a/setup.py +++ b/setup.py @@ -1,5 +1,5 @@ #!/usr/bin/env python -import platform +import sys from setuptools import setup, find_packages from version import get_version from commands import preconfigure, configure, create_bucket_types, \ @@ -7,16 +7,17 @@ install_requires = ['six >= 1.8.0'] requires = ['six(>=1.8.0)'] -if platform.python_version() < '3.0': +if sys.version_info < (2, 7, 9): install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") +if sys.version_info < (3, ): install_requires.append("riak_pb >=2.0.0") requires.append("riak_pb(>=2.0.0)") else: install_requires.append("python3_riak_pb >=2.0.0") requires.append("python3_riak_pb(>=2.0.0)") tests_require = [] -if platform.python_version() < '2.7': +if sys.version_info < (2, 7): tests_require.append("unittest2") setup( From 3b4403400f58b5202c33ef8cde46313e40e5305c Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 15 Jun 2015 13:50:14 -0600 Subject: [PATCH 007/324] Fix all PEP8 warnings for CLIENTS-361 --- commands.py | 8 +++--- riak/__init__.py | 32 ++++------------------ riak/bucket.py | 7 ++--- riak/client/__init__.py | 3 +- riak/client/operations.py | 37 +++++++++++++++++++------ riak/datatypes/__init__.py | 22 ++++++++++++--- riak/datatypes/counter.py | 20 +++++++++++++- riak/datatypes/datatype.py | 22 +++++++++++++-- riak/datatypes/errors.py | 4 +-- riak/datatypes/flag.py | 20 +++++++++++++- riak/datatypes/map.py | 23 ++++++++++++++-- riak/datatypes/register.py | 20 +++++++++++++- riak/datatypes/set.py | 20 +++++++++++++- riak/datatypes/types.py | 22 +++++++++++++++ riak/mapreduce.py | 11 ++++---- riak/resolver.py | 4 +-- riak/riak_error.py | 38 ++++++++++++++++++++++++++ riak/riak_object.py | 3 +- riak/tests/pool-grinder.py | 27 ++++++++++++++---- riak/tests/test_2i.py | 23 ++++++++++++++-- riak/tests/test_all.py | 35 ++++++++++++++++++------ riak/tests/test_btypes.py | 25 ++++++++++++++--- riak/tests/test_comparison.py | 25 ++++++++++++++--- riak/tests/test_datatypes.py | 28 +++++++++++++++---- riak/tests/test_feature_detection.py | 5 ++-- riak/tests/test_filters.py | 18 ++++++++++++ riak/tests/test_kv.py | 41 ++++++++++++++++++++-------- riak/tests/test_mapreduce.py | 21 ++++++++++++-- riak/tests/test_pool.py | 15 +++++----- riak/tests/test_search.py | 21 ++++++++++++-- riak/tests/test_security.py | 10 +++---- riak/tests/test_yokozuna.py | 21 ++++++++++++-- riak/transports/http/__init__.py | 7 ++--- riak/transports/http/codec.py | 16 +++++------ riak/transports/http/connection.py | 6 ++-- riak/transports/http/resources.py | 6 ++-- riak/transports/http/transport.py | 14 +++++----- riak/transports/pbc/transport.py | 7 +++-- riak/transports/security.py | 3 +- riak/util.py | 4 +-- version.py | 5 ++-- 41 files changed, 535 insertions(+), 164 deletions(-) create mode 100644 riak/datatypes/types.py create mode 100644 riak/riak_error.py diff --git a/commands.py b/commands.py index dce89fff..1cf60eb5 100644 --- a/commands.py +++ b/commands.py @@ -1,10 +1,6 @@ """ distutils commands for riak-python-client """ - -__all__ = ['create_bucket_types', 'setup_security', 'enable_security', - 'disable_security', 'preconfigure', 'configure'] - from distutils import log from distutils.core import Command from distutils.errors import DistutilsOptionError @@ -15,6 +11,10 @@ import os.path +__all__ = ['create_bucket_types', 'setup_security', 'enable_security', + 'disable_security', 'preconfigure', 'configure'] + + # Exception classes used by this module. class CalledProcessError(Exception): """This exception is raised when a process run by check_call() or diff --git a/riak/__init__.py b/riak/__init__.py index 3806af49..eddc69bc 100644 --- a/riak/__init__.py +++ b/riak/__init__.py @@ -30,38 +30,18 @@ @author Jay Baird (@skatterbean) (jay@mochimedia.com) """ -__all__ = ['RiakBucket', 'BucketType', 'RiakNode', 'RiakObject', 'RiakClient', - 'RiakMapReduce', 'RiakKeyFilter', 'RiakLink', 'RiakError', - 'ConflictError', 'ONE', 'ALL', 'QUORUM', 'key_filter'] - - -class RiakError(Exception): - """ - Base class for exceptions generated in the Riak API. - """ - def __init__(self, value): - self.value = value - - def __str__(self): - return repr(self.value) - - -class ConflictError(RiakError): - """ - Raised when an operation is attempted on a - :class:`~riak.riak_object.RiakObject` that has more than one - sibling. - """ - def __init__(self, message="Object in conflict"): - super(ConflictError, self).__init__(message) - - +from riak.riak_error import RiakError, ConflictError from riak.client import RiakClient from riak.bucket import RiakBucket, BucketType from riak.node import RiakNode from riak.riak_object import RiakObject from riak.mapreduce import RiakKeyFilter, RiakMapReduce, RiakLink + +__all__ = ['RiakBucket', 'BucketType', 'RiakNode', 'RiakObject', 'RiakClient', + 'RiakMapReduce', 'RiakKeyFilter', 'RiakLink', 'RiakError', + 'ConflictError', 'ONE', 'ALL', 'QUORUM', 'key_filter'] + ONE = "one" ALL = "all" QUORUM = "quorum" diff --git a/riak/bucket.py b/riak/bucket.py index d7bbd9fb..a6b7b192 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -20,6 +20,7 @@ from six import string_types, PY2 import mimetypes from riak.util import lazy_property +from riak.datatypes import TYPES def bucket_property(name, doc=None): @@ -172,6 +173,7 @@ def new(self, key=None, data=None, content_type='application/json', :class:`~riak.datatypes.Datatype` """ + from riak import RiakObject if self.bucket_type.datatype: return TYPES[self.bucket_type.datatype](bucket=self, key=key) @@ -217,6 +219,7 @@ def get(self, key, r=None, pr=None, timeout=None, include_context=None, :class:`~riak.datatypes.Datatype` """ + from riak import RiakObject if self.bucket_type.datatype: return self._client.fetch_datatype(self, key, r=r, pr=pr, timeout=timeout, @@ -736,7 +739,3 @@ def __ne__(self, other): return hash(self) != hash(other) else: return True - - -from riak.riak_object import RiakObject -from riak.datatypes import TYPES diff --git a/riak/client/__init__.py b/riak/client/__init__.py index 1d7cfa68..002991d8 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -36,6 +36,7 @@ from riak.security import SecurityCreds from riak.util import lazy_property, bytes_to_str, str_to_bytes from six import string_types, PY2 +from riak.client.multiget import MultiGetPool def default_encoder(obj): @@ -371,5 +372,3 @@ def __ne__(self, other): return hash(self) != hash(other) else: return True - -from riak.client.multiget import MultiGetPool diff --git a/riak/client/operations.py b/riak/client/operations.py index 07109846..991c5f67 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -58,11 +58,11 @@ def get_buckets(self, transport, bucket_type=None, timeout=None): """ _validate_timeout(timeout) if bucket_type: - bucketfn = lambda name: bucket_type.bucket(name) + bucketfn = self._bucket_type_bucket_builder else: - bucketfn = lambda name: self.bucket(name) + bucketfn = self._default_type_bucket_builder - return [bucketfn(bytes_to_str(name)) for name in + return [bucketfn(bytes_to_str(name), bucket_type) for name in transport.get_buckets(bucket_type=bucket_type, timeout=timeout)] @@ -103,9 +103,9 @@ def stream_buckets(self, bucket_type=None, timeout=None): """ _validate_timeout(timeout) if bucket_type: - bucketfn = lambda name: bucket_type.bucket(name) + bucketfn = self._bucket_type_bucket_builder else: - bucketfn = lambda name: self.bucket(name) + bucketfn = self._default_type_bucket_builder resource = self._acquire() transport = resource.object @@ -114,7 +114,7 @@ def stream_buckets(self, bucket_type=None, timeout=None): stream.attach(resource) try: for bucket_list in stream: - bucket_list = [bucketfn(bytes_to_str(name)) + bucket_list = [bucketfn(bytes_to_str(name), bucket_type) for name in bucket_list] if len(bucket_list) > 0: yield bucket_list @@ -1000,6 +1000,27 @@ def update_datatype(self, datatype, w=None, dw=None, pw=None, timeout=timeout, include_context=include_context) + def _bucket_type_bucket_builder(self, name, bucket_type): + """ + Build a bucket from a bucket type + + :param name: Bucket name + :param bucket_type: A bucket type + :return: A bucket object + """ + return bucket_type.bucket(name) + + def _default_type_bucket_builder(self, name, unused): + """ + Build a bucket for the default bucket type + + :param name: Default bucket name + :param unused: Unused + :return: A bucket object + """ + del unused # Ignored parameters. + return self.bucket(name) + @retryable def _fetch_datatype(self, transport, bucket, key, r=None, pr=None, basic_quorum=None, notfound_ok=None, @@ -1052,6 +1073,6 @@ def _validate_timeout(timeout): Raises an exception if the given timeout is an invalid value. """ if not (timeout is None or - ((type(timeout) == int or (PY2 and type(timeout) == long)) - and timeout > 0)): + ((type(timeout) == int or (PY2 and type(timeout) == long)) and + timeout > 0)): raise ValueError("timeout must be a positive integer") diff --git a/riak/datatypes/__init__.py b/riak/datatypes/__init__.py index 8ffd49cf..21235ce6 100644 --- a/riak/datatypes/__init__.py +++ b/riak/datatypes/__init__.py @@ -1,8 +1,22 @@ -#: A dict from :attr:`type names ` to the -#: class that implements them. This is used inside :class:`Map` to -#: initialize new values. -TYPES = {} +""" +Copyright 2015 Basho Technologies, Inc. +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + +from .types import TYPES from .datatype import Datatype from .counter import Counter from .flag import Flag diff --git a/riak/datatypes/counter.py b/riak/datatypes/counter.py index af08df2f..1fad5ea8 100644 --- a/riak/datatypes/counter.py +++ b/riak/datatypes/counter.py @@ -1,4 +1,23 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + from riak.datatypes.datatype import Datatype +from riak.datatypes import TYPES class Counter(Datatype): @@ -57,5 +76,4 @@ def _check_type(self, new_value): isinstance(new_value, long)) -from riak.datatypes import TYPES TYPES['counter'] = Counter diff --git a/riak/datatypes/datatype.py b/riak/datatypes/datatype.py index 4aed67e4..a28d11cd 100644 --- a/riak/datatypes/datatype.py +++ b/riak/datatypes/datatype.py @@ -1,4 +1,24 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + + from .errors import ContextRequired +from . import TYPES class Datatype(object): @@ -212,5 +232,3 @@ def _require_context(self): """ if not self._context: raise ContextRequired() - -from . import TYPES diff --git a/riak/datatypes/errors.py b/riak/datatypes/errors.py index 4e68707f..71353f8f 100644 --- a/riak/datatypes/errors.py +++ b/riak/datatypes/errors.py @@ -12,5 +12,5 @@ class ContextRequired(RiakError): "fetch the datatype first") def __init__(self, message=None): - super(ContextRequired, self).__init__(message - or self._default_message) + super(ContextRequired, self).__init__(message or + self._default_message) diff --git a/riak/datatypes/flag.py b/riak/datatypes/flag.py index 0b55f472..494dd799 100644 --- a/riak/datatypes/flag.py +++ b/riak/datatypes/flag.py @@ -1,4 +1,23 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + from riak.datatypes.datatype import Datatype +from riak.datatypes import TYPES class Flag(Datatype): @@ -49,5 +68,4 @@ def _check_type(self, new_value): return isinstance(new_value, bool) -from riak.datatypes import TYPES TYPES['flag'] = Flag diff --git a/riak/datatypes/map.py b/riak/datatypes/map.py index e9460101..4ea64f67 100644 --- a/riak/datatypes/map.py +++ b/riak/datatypes/map.py @@ -1,6 +1,25 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + from collections import Mapping from riak.util import lazy_property from .datatype import Datatype +from riak.datatypes import TYPES class TypedMapView(Mapping): @@ -238,8 +257,7 @@ def modified(self): """ Whether the map has staged local modifications. """ - is_modified = lambda x: x.modified - values_modified = [is_modified(self._value[v]) for v in self._value] + values_modified = [self._value[v].modified for v in self._value] modified = (any(values_modified) or self._removes or self._updates) if modified: return True @@ -282,5 +300,4 @@ def _extract_updates(self, d): yield ('update', key, d[key].to_op()) -from riak.datatypes import TYPES TYPES['map'] = Map diff --git a/riak/datatypes/register.py b/riak/datatypes/register.py index fe231e64..1d6813b8 100644 --- a/riak/datatypes/register.py +++ b/riak/datatypes/register.py @@ -1,6 +1,25 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + from collections import Sized from riak.datatypes.datatype import Datatype from six import string_types +from riak.datatypes import TYPES class Register(Sized, Datatype): @@ -61,5 +80,4 @@ def _check_type(self, new_value): return isinstance(new_value, string_types) -from riak.datatypes import TYPES TYPES['register'] = Register diff --git a/riak/datatypes/set.py b/riak/datatypes/set.py index a2d5b1d9..a055020a 100644 --- a/riak/datatypes/set.py +++ b/riak/datatypes/set.py @@ -1,6 +1,25 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + import collections from .datatype import Datatype from six import string_types +from riak.datatypes import TYPES __all__ = ['Set'] @@ -113,5 +132,4 @@ def _check_element(element): raise TypeError("Set elements can only be strings") -from riak.datatypes import TYPES TYPES['set'] = Set diff --git a/riak/datatypes/types.py b/riak/datatypes/types.py new file mode 100644 index 00000000..f349de25 --- /dev/null +++ b/riak/datatypes/types.py @@ -0,0 +1,22 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + +#: A dict from :attr:`type names ` to the +#: class that implements them. This is used inside :class:`Map` to +#: initialize new values. +TYPES = {} diff --git a/riak/mapreduce.py b/riak/mapreduce.py index c2b797e0..920b7d3d 100644 --- a/riak/mapreduce.py +++ b/riak/mapreduce.py @@ -22,6 +22,8 @@ from collections import Iterable, namedtuple from riak import RiakError from six import string_types, PY2 +from riak.bucket import RiakBucket + #: Links are just bucket/key/tag tuples, this class provides a #: backwards-compatible format: ``RiakLink(bucket, key, tag)`` @@ -66,6 +68,7 @@ def add(self, arg1, arg2=None, arg3=None, bucket_type=None): :type bucket_type: string, None :rtype: :class:`RiakMapReduce` """ + from riak.riak_object import RiakObject if (arg2 is None) and (arg3 is None): if isinstance(arg1, RiakObject): return self.add_object(arg1) @@ -82,6 +85,7 @@ def add_object(self, obj): :type obj: RiakObject :rtype: :class:`RiakMapReduce` """ + from riak.riak_object import RiakObject return self.add_bucket_key_data(obj._bucket._name, obj._key, None) def add_bucket_key_data(self, bucket, key, data, bucket_type=None): @@ -319,8 +323,8 @@ def run(self, timeout=None): raise e # If the last phase is NOT a link phase, then return the result. - if not (link_results_flag - or isinstance(self._phases[-1], RiakLinkPhase)): + if not (link_results_flag or + isinstance(self._phases[-1], RiakLinkPhase)): return result # If there are no results, then return an empty list. @@ -780,6 +784,3 @@ def reduce(self, *args): """ mr = RiakMapReduce(self) return mr.reduce(*args) - -from riak.riak_object import RiakObject -from riak.bucket import RiakBucket diff --git a/riak/resolver.py b/riak/resolver.py index c54779ca..d56ae5f5 100644 --- a/riak/resolver.py +++ b/riak/resolver.py @@ -40,5 +40,5 @@ def last_written_resolver(riak_object): :param riak_object: an object-in-conflict that will be resolved :type riak_object: :class:`RiakObject ` """ - lm = lambda x: x.last_modified - riak_object.siblings = [max(riak_object.siblings, key=lm), ] + riak_object.siblings = [max(riak_object.siblings, + key=lambda x: x.last_modified), ] diff --git a/riak/riak_error.py b/riak/riak_error.py new file mode 100644 index 00000000..ce582bbb --- /dev/null +++ b/riak/riak_error.py @@ -0,0 +1,38 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + + +class RiakError(Exception): + """ + Base class for exceptions generated in the Riak API. + """ + def __init__(self, value): + self.value = value + + def __str__(self): + return repr(self.value) + + +class ConflictError(RiakError): + """ + Raised when an operation is attempted on a + :class:`~riak.riak_object.RiakObject` that has more than one + sibling. + """ + def __init__(self, message="Object in conflict"): + super(ConflictError, self).__init__(message) diff --git a/riak/riak_object.py b/riak/riak_object.py index 800822d9..2db8b5e8 100644 --- a/riak/riak_object.py +++ b/riak/riak_object.py @@ -22,6 +22,7 @@ from riak.content import RiakContent import base64 from six import string_types, PY2 +from riak.mapreduce import RiakMapReduce def content_property(name, doc=None): @@ -410,5 +411,3 @@ def reduce(self, *args): mr = RiakMapReduce(self.client) mr.add(self.bucket.name, self.key) return mr.reduce(*args) - -from riak.mapreduce import RiakMapReduce diff --git a/riak/tests/pool-grinder.py b/riak/tests/pool-grinder.py index 09bef278..6bf0f2d4 100755 --- a/riak/tests/pool-grinder.py +++ b/riak/tests/pool-grinder.py @@ -1,17 +1,34 @@ #!/usr/bin/env python +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" from __future__ import print_function from six import PY2 -if PY2: - from Queue import Queue -else: - from queue import Queue from threading import Thread import sys -sys.path.append("../transports/") from pool import Pool from random import SystemRandom from time import sleep +if PY2: + from Queue import Queue +else: + from queue import Queue +sys.path.append("../transports/") class SimplePool(Pool): diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index 66dd8bee..a5559031 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -1,13 +1,30 @@ # -*- coding: utf-8 -*- +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + import platform +from riak import RiakError +from . import SKIP_INDEXES if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from riak import RiakError -from . import SKIP_INDEXES - class TwoITests(object): def is_2i_supported(self): diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index 48130e7e..b1794291 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -1,16 +1,25 @@ # -*- coding: utf-8 -*- +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" import random import platform from six import PY2 from threading import Thread -if PY2: - from Queue import Queue -else: - from queue import Queue -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest from riak import RiakError from riak.client import RiakClient @@ -32,6 +41,16 @@ HAVE_PROTO, DUMMY_HTTP_PORT, DUMMY_PB_PORT, \ SKIP_SEARCH, RUN_YZ, SECURITY_CREDS, SKIP_POOL, test_six +if PY2: + from Queue import Queue +else: + from queue import Queue + +if platform.python_version() < '2.7': + unittest = __import__('unittest2') +else: + import unittest + testrun_search_bucket = None testrun_props_bucket = None testrun_sibs_bucket = None diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index c55b3e18..89d298b3 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -1,14 +1,31 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + import platform +from . import SKIP_BTYPES +from riak.bucket import RiakBucket, BucketType +from riak import RiakError, RiakObject if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from . import SKIP_BTYPES -from riak.bucket import RiakBucket, BucketType -from riak import RiakError, RiakObject - class BucketTypeTests(object): def test_btype_init(self): diff --git a/riak/tests/test_comparison.py b/riak/tests/test_comparison.py index 3d30f4fd..38a1ef9f 100644 --- a/riak/tests/test_comparison.py +++ b/riak/tests/test_comparison.py @@ -1,14 +1,31 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + import platform +from riak.riak_object import RiakObject +from riak.bucket import RiakBucket, BucketType +from riak.tests.test_all import BaseTestCase if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from riak.riak_object import RiakObject -from riak.bucket import RiakBucket, BucketType -from riak.tests.test_all import BaseTestCase - class BucketTypeRichComparisonTest(unittest.TestCase): def test_btype_eq(self): diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 76a3e132..9c6b3a0b 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -1,15 +1,33 @@ # -*- coding: utf-8 -*- -import platform -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + +import platform from riak import RiakBucket, BucketType, RiakObject import riak.datatypes as datatypes from . import SKIP_DATATYPES from riak.tests import test_six +if platform.python_version() < '2.7': + unittest = __import__('unittest2') +else: + import unittest + class DatatypeUnitTests(object): dtype = None diff --git a/riak/tests/test_feature_detection.py b/riak/tests/test_feature_detection.py index 8efc43f6..11dadc75 100644 --- a/riak/tests/test_feature_detection.py +++ b/riak/tests/test_feature_detection.py @@ -1,5 +1,5 @@ """ -Copyright 2012-2014 Basho Technologies, Inc. +Copyright 2012-2015 Basho Technologies, Inc. This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file @@ -17,14 +17,13 @@ """ import platform +from riak.transports.feature_detect import FeatureDetection if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from riak.transports.feature_detect import FeatureDetection - class IncompleteTransport(FeatureDetection): pass diff --git a/riak/tests/test_filters.py b/riak/tests/test_filters.py index 73d4771c..00e9d0af 100644 --- a/riak/tests/test_filters.py +++ b/riak/tests/test_filters.py @@ -1,3 +1,21 @@ +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + import platform from riak.mapreduce import RiakKeyFilter from riak import key_filter diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index e443412e..822dd9ab 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -1,19 +1,32 @@ # -*- coding: utf-8 -*- +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + import os import platform from six import string_types, PY2, PY3 -if PY2: - import cPickle - test_pickle_dumps = cPickle.dumps - test_pickle_loads = cPickle.loads -else: - import pickle - test_pickle_dumps = pickle.dumps - test_pickle_loads = pickle.loads + import copy from time import sleep from riak import ConflictError, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver +from . import SKIP_RESOLVE + try: import simplejson as json except ImportError: @@ -24,7 +37,14 @@ else: import unittest -from . import SKIP_RESOLVE +if PY2: + import cPickle + test_pickle_dumps = cPickle.dumps + test_pickle_loads = cPickle.loads +else: + import pickle + test_pickle_dumps = pickle.dumps + test_pickle_loads = pickle.loads class NotJsonSerializable(object): @@ -426,8 +446,7 @@ def test_resolution(self): # Define our own custom resolver on the object that returns # the maximum value, overriding the bucket and client resolvers def max_value_resolver(obj): - datafun = lambda s: s.data - obj.siblings = [max(obj.siblings, key=datafun), ] + obj.siblings = [max(obj.siblings, key=lambda s: s.data), ] obj.resolver = max_value_resolver obj.reload() diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index f22a24f6..c15ff7b1 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -1,4 +1,21 @@ # -*- coding: utf-8 -*- +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" from __future__ import print_function from six import PY2 @@ -7,13 +24,13 @@ from riak.tests.test_yokozuna import wait_for_yz_index from riak.tests import RUN_SECURITY import platform + +from . import RUN_YZ if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from . import RUN_YZ - class LinkTests(object): def test_store_and_get_links(self): diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index 7984d436..6355eee0 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -1,5 +1,5 @@ """ -Copyright 2012 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file @@ -18,21 +18,22 @@ from six import PY2 import platform -if PY2: - from Queue import Queue -else: - from queue import Queue from threading import Thread, currentThread from riak.transports.pool import Pool, BadResource from random import SystemRandom from time import sleep +from . import SKIP_POOL +from riak.tests import test_six if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from . import SKIP_POOL -from riak.tests import test_six + +if PY2: + from Queue import Queue +else: + from queue import Queue class SimplePool(Pool): diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index fe8a23bd..eed22e2c 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -1,13 +1,30 @@ # -*- coding: utf-8 -*- +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + from __future__ import print_function import platform +from . import SKIP_SEARCH if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from . import SKIP_SEARCH - class EnableSearchTests(object): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index ffcada84..c76662aa 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- """ -Copyright 2014 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file @@ -18,14 +18,14 @@ """ import sys -if sys.version_info < (2, 7): - unittest = __import__('unittest2') -else: - import unittest from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT from riak.security import SecurityCreds +if sys.version_info < (2, 7): + unittest = __import__('unittest2') +else: + import unittest class SecurityTests(object): diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index 1439373e..4310784a 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -1,12 +1,29 @@ # -*- coding: utf-8 -*- +""" +Copyright 2015 Basho Technologies, Inc. + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + import platform +from . import RUN_YZ if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -from . import RUN_YZ - def wait_for_yz_index(bucket, key, index=None): """ diff --git a/riak/transports/http/__init__.py b/riak/transports/http/__init__.py index deb334a2..c28a0034 100644 --- a/riak/transports/http/__init__.py +++ b/riak/transports/http/__init__.py @@ -1,5 +1,5 @@ """ -Copyright 2014 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file @@ -20,6 +20,8 @@ import select from six import PY2 from riak.security import SecurityError, USE_STDLIB_SSL +from riak.transports.pool import Pool +from riak.transports.http.transport import RiakHttpTransport if USE_STDLIB_SSL: import ssl from riak.transports.security import configure_ssl_context @@ -42,9 +44,6 @@ ImproperConnectionState, \ BadStatusLine -from riak.transports.pool import Pool -from riak.transports.http.transport import RiakHttpTransport - class NoNagleHTTPConnection(HTTPConnection): """ diff --git a/riak/transports/http/codec.py b/riak/transports/http/codec.py index 0a9db54f..9f040220 100644 --- a/riak/transports/http/codec.py +++ b/riak/transports/http/codec.py @@ -19,17 +19,9 @@ under the License. """ -# subtract length of "Link: " header string and newline -MAX_LINK_HEADER_SIZE = 8192 - 8 - - import re import csv from six import PY2, PY3 -if PY2: - from urllib import unquote_plus -else: - from urllib.parse import unquote_plus from cgi import parse_header from email import message_from_string from email.utils import parsedate_tz, mktime_tz @@ -40,6 +32,14 @@ from riak.multidict import MultiDict from riak.transports.http.search import XMLSearchResult from riak.util import decode_index_value, bytes_to_str +if PY2: + from urllib import unquote_plus +else: + from urllib.parse import unquote_plus + + +# subtract length of "Link: " header string and newline +MAX_LINK_HEADER_SIZE = 8192 - 8 class RiakHttpCodec(object): diff --git a/riak/transports/http/connection.py b/riak/transports/http/connection.py index db7689e8..2912f9b1 100644 --- a/riak/transports/http/connection.py +++ b/riak/transports/http/connection.py @@ -1,5 +1,5 @@ """ -Copyright 2012 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file @@ -17,12 +17,12 @@ """ from six import PY2 +import base64 +from riak.util import str_to_bytes if PY2: from httplib import NotConnected, HTTPConnection else: from http.client import NotConnected, HTTPConnection -import base64 -from riak.util import str_to_bytes class RiakHttpConnection(object): diff --git a/riak/transports/http/resources.py b/riak/transports/http/resources.py index 7075952a..ac975109 100644 --- a/riak/transports/http/resources.py +++ b/riak/transports/http/resources.py @@ -1,5 +1,5 @@ """ -Copyright 2012 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file @@ -18,12 +18,12 @@ import re from six import PY2 +from riak import RiakError +from riak.util import lazy_property, bytes_to_str if PY2: from urllib import quote_plus, urlencode else: from urllib.parse import quote_plus, urlencode -from riak import RiakError -from riak.util import lazy_property, bytes_to_str class RiakHttpResources(object): diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index 7214fbe0..f534310a 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -1,5 +1,5 @@ """ -Copyright 2012 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. Copyright 2010 Rusty Klophaus Copyright 2010 Justin Sheehy Copyright 2009 Jay Baird @@ -25,10 +25,6 @@ import json from six import PY2 -if PY2: - from httplib import HTTPConnection -else: - from http.client import HTTPConnection from xml.dom.minidom import Document from riak.transports.transport import RiakTransport from riak.transports.http.resources import RiakHttpResources @@ -42,6 +38,10 @@ from riak import RiakError from riak.security import SecurityError from riak.util import decode_index_value, bytes_to_str, str_to_long +if PY2: + from httplib import HTTPConnection +else: + from http.client import HTTPConnection class RiakHttpTransport(RiakHttpConnection, RiakHttpResources, RiakHttpCodec, @@ -188,8 +188,8 @@ def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, url = self.object_path(robj.bucket.name, robj.key, bucket_type=bucket_type, **params) - use_vclocks = (self.tombstone_vclocks() and hasattr(robj, 'vclock') - and robj.vclock is not None) + use_vclocks = (self.tombstone_vclocks() and hasattr(robj, 'vclock') and + robj.vclock is not None) if use_vclocks: headers['X-Riak-Vclock'] = robj.vclock.encode('base64') response = self._request('DELETE', url, headers) diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index c021e56c..83bbf8e1 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -1,5 +1,5 @@ """ -Copyright 2012 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. Copyright 2010 Rusty Klophaus Copyright 2010 Justin Sheehy Copyright 2009 Jay Baird @@ -20,6 +20,7 @@ """ import riak_pb +import sys from riak import RiakError from riak.transports.transport import RiakTransport from riak.riak_object import VClock @@ -252,8 +253,8 @@ def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, if self.client_timeouts() and timeout: req.timeout = timeout - use_vclocks = (self.tombstone_vclocks() and hasattr(robj, 'vclock') - and robj.vclock) + use_vclocks = (self.tombstone_vclocks() and + hasattr(robj, 'vclock') and robj.vclock) if use_vclocks: req.vclock = robj.vclock.encode('binary') diff --git a/riak/transports/security.py b/riak/transports/security.py index b108e427..dfd4cdc1 100644 --- a/riak/transports/security.py +++ b/riak/transports/security.py @@ -1,5 +1,5 @@ """ -Copyright 2014 Basho Technologies, Inc. +Copyright 2015 Basho Technologies, Inc. This file is provided to you under the Apache License, Version 2.0 (the "License"); you may not use this file @@ -158,7 +158,6 @@ def close(self): else: raise err - # Blatantly Stolen from # https://github.com/shazow/urllib3/blob/master/urllib3/contrib/pyopenssl.py # which is basically a port of the `socket._fileobject` class diff --git a/riak/util.py b/riak/util.py index f083a053..9be8ec4a 100644 --- a/riak/util.py +++ b/riak/util.py @@ -48,8 +48,8 @@ def deep_merge(a, b): if key not in current_dst: current_dst[key] = current_src[key] else: - if (quacks_like_dict(current_src[key]) - and quacks_like_dict(current_dst[key])): + if (quacks_like_dict(current_src[key]) and + quacks_like_dict(current_dst[key])): stack.append((current_dst[key], current_src[key])) else: current_dst[key] = current_src[key] diff --git a/version.py b/version.py index 90f856a0..ff30e22f 100644 --- a/version.py +++ b/version.py @@ -16,9 +16,6 @@ """ from __future__ import print_function - -__all__ = ['get_version'] - from os.path import dirname, isdir, join import re from subprocess import CalledProcessError, Popen, PIPE @@ -62,6 +59,8 @@ def check_output(*popenargs, **kwargs): version_re = re.compile('^Version: (.+)$', re.M) +__all__ = ['get_version'] + def get_version(): d = dirname(__file__) From 574945505b2702d52823c19ea52cd480f6ca5bef Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 15 Jun 2015 16:27:53 -0600 Subject: [PATCH 008/324] Clean up new pyflakes warnings --- riak/mapreduce.py | 1 - riak/transports/pbc/transport.py | 1 - 2 files changed, 2 deletions(-) diff --git a/riak/mapreduce.py b/riak/mapreduce.py index 920b7d3d..fe8cd6e4 100644 --- a/riak/mapreduce.py +++ b/riak/mapreduce.py @@ -85,7 +85,6 @@ def add_object(self, obj): :type obj: RiakObject :rtype: :class:`RiakMapReduce` """ - from riak.riak_object import RiakObject return self.add_bucket_key_data(obj._bucket._name, obj._key, None) def add_bucket_key_data(self, bucket, key, data, bucket_type=None): diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 83bbf8e1..74386fce 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -20,7 +20,6 @@ """ import riak_pb -import sys from riak import RiakError from riak.transports.transport import RiakTransport from riak.riak_object import VClock From ae4d106934f7246e2ef7433f9b99a692549dc5b5 Mon Sep 17 00:00:00 2001 From: vagrant Date: Mon, 13 Jul 2015 18:53:51 +0000 Subject: [PATCH 009/324] Limit the version of tox to be < 2.0 and neuter test_index_timeout since it's racy --- buildbot/tox_setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 46a1aee9..a9581cdc 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -37,7 +37,7 @@ fi # Now install tox if [ -z "`pip show tox`" ]; then - pip install tox + pip install -Iv tox=1.9.0 if [ -z "`pip show tox`" ]; then echo "ERROR: Install of tox failed" exit 1 From aee07d1f5dda05352d855440c72ab6e458808de7 Mon Sep 17 00:00:00 2001 From: vagrant Date: Tue, 14 Jul 2015 02:05:23 +0000 Subject: [PATCH 010/324] Tweak tests to pass in 2.1.x - Limit links in test_index_timeout - Reduce the number of links in test_too_many_link_headers_shouldnt_break_http --- riak/tests/test_2i.py | 13 +++++++------ riak/tests/test_all.py | 4 ++-- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index a5559031..86d14999 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -455,12 +455,13 @@ def test_index_timeout(self): bucket, o1, o2, o3, o4 = self._create_index_objects() - with self.assertRaises(RiakError): - bucket.get_index('field1_bin', 'val1', timeout=1) - - with self.assertRaises(RiakError): - for i in bucket.stream_index('field1_bin', 'val1', timeout=1): - pass + # Disable timeouts since they are too racy + # with self.assertRaises(RiakError): + # bucket.get_index('field1_bin', 'val1', timeout=1) + # + # with self.assertRaises(RiakError): + # for i in bucket.stream_index('field1_bin', 'val1', timeout=1): + # pass # This should not raise self.assertEqual([o1.key], bucket.get_index('field1_bin', 'val1', diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index b1794291..34c9ac8f 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -429,13 +429,13 @@ def test_no_returnbody(self): def test_too_many_link_headers_shouldnt_break_http(self): bucket = self.client.bucket(self.bucket_name) o = bucket.new("lots_of_links", "My god, it's full of links!") - for i in range(0, 400): + for i in range(0, 300): link = ("other", "key%d" % i, "next") o.add_link(link) o.store() stored_object = bucket.get("lots_of_links") - self.assertEqual(len(stored_object.links), 400) + self.assertEqual(len(stored_object.links), 300) if __name__ == '__main__': From 96ad2d448b946a6ff72ac9f1d8f7dcd6ac6bec78 Mon Sep 17 00:00:00 2001 From: vagrant Date: Fri, 11 Sep 2015 00:10:17 +0000 Subject: [PATCH 011/324] Upgrade tox version and Python versions --- buildbot/tox_setup.sh | 19 +++++++++++-------- riak/bucket.py | 9 +++++---- riak/tests/test_kv.py | 8 +++++--- riak/tests/test_six.py | 6 +++--- tox.ini | 3 ++- 5 files changed, 26 insertions(+), 19 deletions(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index a9581cdc..86ecdebe 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -22,22 +22,25 @@ if [[ ! -d $PYENV_ROOT ]]; then eval "$(pyenv virtualenv-init -)" # Now load up (allthethings) - VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 - VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 + VERSION_ALIAS="riak_3.4.3" pyenv install 3.4.3 VERSION_ALIAS="riak_3.3.6" pyenv install 3.3.6 - VERSION_ALIAS="riak_3.4.2" pyenv install 3.4.2 + VERSION_ALIAS="riak_2.7.10" pyenv install 2.7.10 + VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 + VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 - pyenv virtualenv riak_2.6.9 riak-py26 - pyenv virtualenv riak_2.7.9 riak-py27 + pyenv virtualenv riak_3.4.3 riak-py34 pyenv virtualenv riak_3.3.6 riak-py33 - pyenv virtualenv riak_3.4.2 riak-py34 - pyenv global riak-py26 riak-py27 riak-py33 riak-py34 + pyenv virtualenv riak_2.7.10 riak-py27 + pyenv virtualenv riak_2.7.9 riak-py279 + pyenv virtualenv riak_2.6.9 riak-py26 + pyenv global riak-py34 riak-py33 riak-py27 riak-py279 riak-py26 pyenv versions fi # Now install tox +pip install --upgrade pip if [ -z "`pip show tox`" ]; then - pip install -Iv tox=1.9.0 + pip install -Iv tox if [ -z "`pip show tox`" ]; then echo "ERROR: Install of tox failed" exit 1 diff --git a/riak/bucket.py b/riak/bucket.py index a6b7b192..bb95726d 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -52,12 +52,13 @@ def __init__(self, client, name, bucket_type): :param bucket_type: The parent bucket type of this bucket :type bucket_type: :class:`BucketType` """ + + if not isinstance(name, string_types): + raise TypeError('Bucket name must be a string') + if PY2: try: - if isinstance(name, string_types): - name = name.encode('ascii') - else: - raise TypeError('Bucket name must be a string') + name = name.encode('ascii') except UnicodeError: raise TypeError('Unicode bucket names are not supported.') diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 822dd9ab..b355ab81 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -147,9 +147,8 @@ def test_string_bucket_name(self): with self.assert_raises_regex(TypeError, 'must be a string'): self.client.bucket(bad) - if PY2: - with self.assert_raises_regex(TypeError, 'must be a string'): - RiakBucket(self.client, bad, None) + with self.assert_raises_regex(TypeError, 'must be a string'): + RiakBucket(self.client, bad, None) # Unicode bucket names are not supported in Python 2.x, # if they can't be encoded to ASCII. This should be changed in a @@ -192,6 +191,9 @@ def test_stream_keys(self): def test_stream_keys_timeout(self): bucket = self.client.bucket('random_key_bucket') + for key in range(1,1000): + o = bucket.new(None, data={}) + o.store() streamed_keys = [] with self.assertRaises(RiakError): for keylist in self.client.stream_keys(bucket, timeout=1): diff --git a/riak/tests/test_six.py b/riak/tests/test_six.py index c83f2b1e..c68dd150 100644 --- a/riak/tests/test_six.py +++ b/riak/tests/test_six.py @@ -129,8 +129,8 @@ def assertItemsEqual(self, expected_seq, actual_seq, msg=None): diffMsg = '\n'.join(lines) standardMsg = self._truncateMessage(standardMsg, diffMsg) - def assert_raises_regex(self, exception, regexp, msg=None): + def assert_raises_regex(self, exception, regexp): if PY2: - return self.assertRaisesRegexp(exception, regexp, msg) + return self.assertRaisesRegexp(exception, regexp) else: - return self.assertRaisesRegex(exception, regexp, msg) + return self.assertRaisesRegex(exception, regexp) diff --git a/tox.ini b/tox.ini index d4f59be0..a658d160 100644 --- a/tox.ini +++ b/tox.ini @@ -4,8 +4,9 @@ # and then run "tox" from this directory. [tox] -envlist = py26, py27, py33, py34 +envlist = py26, py279, py27, py33, py34 [testenv] commands = {envpython} setup.py test deps = six +passenv = RUN_* SKIP_* RIAK_* From 89b69d1541e09db14795dac7215870e960c1f16f Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Fri, 11 Sep 2015 19:16:13 +0000 Subject: [PATCH 012/324] Update from pyflakes to flake8 to lint test under Python 3 --- buildbot/Makefile | 4 ++-- buildbot/tox_setup.sh | 1 - riak/client/operations.py | 6 +++--- riak/datatypes/counter.py | 2 +- riak/test_server.py | 4 ++-- riak/tests/test_all.py | 2 +- riak/tests/test_kv.py | 4 ++-- riak/transports/http/resources.py | 2 +- riak/transports/pbc/codec.py | 4 ++-- riak/transports/pbc/transport.py | 2 +- riak/util.py | 2 +- 11 files changed, 16 insertions(+), 17 deletions(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index 6f78453c..57edaddf 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -14,9 +14,9 @@ compile: @../setup.py develop lint: - @pip install --upgrade pep8 pyflakes + @pip install --upgrade pip pep8 flake8 @cd ..; pep8 riak *.py - @cd ..; pyflakes riak *.py + @cd ..; flake8 riak *.py @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/client.crt @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/server.crt diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 86ecdebe..a6e3af78 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -33,7 +33,6 @@ if [[ ! -d $PYENV_ROOT ]]; then pyenv virtualenv riak_2.7.10 riak-py27 pyenv virtualenv riak_2.7.9 riak-py279 pyenv virtualenv riak_2.6.9 riak-py26 - pyenv global riak-py34 riak-py33 riak-py27 riak-py279 riak-py26 pyenv versions fi diff --git a/riak/client/operations.py b/riak/client/operations.py index 991c5f67..ecda64af 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -914,7 +914,7 @@ def update_counter(self, bucket, key, value, w=None, dw=None, pw=None, :type returnvalue: bool """ if PY2: - valid_types = (int, long) + valid_types = (int, long) # noqa else: valid_types = (int,) if type(value) not in valid_types: @@ -1073,6 +1073,6 @@ def _validate_timeout(timeout): Raises an exception if the given timeout is an invalid value. """ if not (timeout is None or - ((type(timeout) == int or (PY2 and type(timeout) == long)) and - timeout > 0)): + ((type(timeout) == int or + (PY2 and type(timeout) == long)) and timeout > 0)): # noqa raise ValueError("timeout must be a positive integer") diff --git a/riak/datatypes/counter.py b/riak/datatypes/counter.py index 1fad5ea8..7d2de17b 100644 --- a/riak/datatypes/counter.py +++ b/riak/datatypes/counter.py @@ -73,7 +73,7 @@ def decrement(self, amount=1): def _check_type(self, new_value): return (isinstance(new_value, int) or - isinstance(new_value, long)) + isinstance(new_value, long)) # noqa TYPES['counter'] = Counter diff --git a/riak/test_server.py b/riak/test_server.py index da727fec..545f7e5e 100644 --- a/riak/test_server.py +++ b/riak/test_server.py @@ -31,8 +31,8 @@ def __repr__(self): def __eq__(self, other): return self.str == other - def __cmp__(self, other): - return cmp(self.str, other) + def __lt__(self, other): + return self.str < other def erlang_config(hash, depth=1): diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index 34c9ac8f..992e4997 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -298,7 +298,7 @@ def test_multiget_errors(self): self.assertEqual(failure[1], self.bucket_name) self.assertIn(failure[2], keys) if PY2: - self.assertIsInstance(failure[3], StandardError) + self.assertIsInstance(failure[3], StandardError) # noqa else: self.assertIsInstance(failure[3], Exception) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index b355ab81..b2aa9f92 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -89,7 +89,7 @@ def test_store_and_get(self): # unicode objects are fine, as long as they don't # contain any non-ASCII chars if PY2: - self.client.bucket(unicode(self.bucket_name)) + self.client.bucket(unicode(self.bucket_name)) # noqa else: self.client.bucket(self.bucket_name) if PY2: @@ -191,7 +191,7 @@ def test_stream_keys(self): def test_stream_keys_timeout(self): bucket = self.client.bucket('random_key_bucket') - for key in range(1,1000): + for key in range(1, 1000): o = bucket.new(None, data={}) o.store() streamed_keys = [] diff --git a/riak/transports/http/resources.py b/riak/transports/http/resources.py index ac975109..f30cb36e 100644 --- a/riak/transports/http/resources.py +++ b/riak/transports/http/resources.py @@ -264,7 +264,7 @@ def mkpath(*segments, **query): if query[key] in [False, True]: _query[key] = str(query[key]).lower() elif query[key] is not None: - if PY2 and isinstance(query[key], unicode): + if PY2 and isinstance(query[key], unicode): # noqa _query[key] = query[key].encode('utf-8') else: _query[key] = query[key] diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index ca48ff59..a3f26e8a 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -510,8 +510,8 @@ def _decode_search_doc(self, doc): resultdoc = MultiDict() for pair in doc.fields: if PY2: - ukey = unicode(pair.key, 'utf-8') - uval = unicode(pair.value, 'utf-8') + ukey = unicode(pair.key, 'utf-8') # noqa + uval = unicode(pair.value, 'utf-8') # noqa else: ukey = bytes_to_str(pair.key) uval = bytes_to_str(pair.value) diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 74386fce..c77dab2b 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -565,7 +565,7 @@ def search(self, index, query, **params): if not self.pb_search(): return self._search_mapred_emu(index, query) - if PY2 and isinstance(query, unicode): + if PY2 and isinstance(query, unicode): # noqa query = query.encode('utf8') req = riak_pb.RpbSearchQueryReq(index=str_to_bytes(index), diff --git a/riak/util.py b/riak/util.py index 9be8ec4a..9a5b5a14 100644 --- a/riak/util.py +++ b/riak/util.py @@ -113,6 +113,6 @@ def str_to_long(value, base=10): if value is None: return None elif PY2: - return long(value, base) + return long(value, base) # noqa else: return int(value, base) From 6c061732ba3871b8eadb6504dfe888eb802a42fe Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Sun, 13 Sep 2015 21:47:10 +0000 Subject: [PATCH 013/324] Install each version of Pyenv and Python independently for buildbot --- buildbot/Makefile | 3 ++- buildbot/tox_setup.sh | 56 +++++++++++++++++++++++++++++++------------ tox.ini | 2 ++ 3 files changed, 45 insertions(+), 16 deletions(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index 57edaddf..1323a276 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -3,6 +3,7 @@ RIAK_CONF = ${RIAK_DIR}/etc/riak.conf # RIAK = ${RIAK_DIR}/bin/riak RIAK_ADMIN = ${RIAK_DIR}/bin/riak-admin CERTS_DIR = $(shell pwd)/../riak/tests/resources +unexport PYENV_VERSION preconfigure: @../setup.py preconfigure --riak-conf=${RIAK_CONF} @@ -14,7 +15,7 @@ compile: @../setup.py develop lint: - @pip install --upgrade pip pep8 flake8 + @pip install --upgrade pep8 flake8 @cd ..; pep8 riak *.py @cd ..; flake8 riak *.py @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/client.crt diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index a6e3af78..1dc3f72c 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash # pyenv root export PYENV_ROOT="$HOME/.pyenv" +TEST_ROOT=$PWD/.. # Install pyenv if it's missing if [[ ! -d $PYENV_ROOT ]]; then @@ -8,33 +9,58 @@ if [[ ! -d $PYENV_ROOT ]]; then cd ${PYENV_ROOT} # Get the latest tagged version git checkout `git tag | tail -1` +fi + +# Upgrade it, if it's too old +if [[ -z $(pyenv install --list | grep 3.4.3) ]]; then + cd ${PYENV_ROOT} + git pull origin master + git pull -u origin master + # Get the latest tagged version + git checkout `git tag | tail -1` +fi + +if [[ ! -d ${PYENV_ROOT}/plugins/pyenv-virtualenv ]]; then git clone https://github.com/yyuu/pyenv-virtualenv.git ${PYENV_ROOT}/plugins/pyenv-virtualenv - cd plugins/pyenv-virtualenv + cd ${PYENV_ROOT}/plugins/pyenv-virtualenv git checkout `git tag | tail -1` +fi + +if [[ ! -d ${PYENV_ROOT}/plugins/pyenv-alias ]]; then git clone https://github.com/s1341/pyenv-alias.git ${PYENV_ROOT}/plugins/pyenv-alias +fi - # Add pyenv root to PATH - # and initialize pyenv - PATH="$PYENV_ROOT/bin:$PATH" - # initialize pyenv - eval "$(pyenv init -)" - # initialize pyenv virtualenv - eval "$(pyenv virtualenv-init -)" +# Add pyenv root to PATH +# and initialize pyenv +PATH="$PYENV_ROOT/bin:$PATH" +# initialize pyenv +eval "$(pyenv init -)" +# initialize pyenv virtualenv +eval "$(pyenv virtualenv-init -)" - # Now load up (allthethings) +# Now install (allthethings) versions for testing +if [[ -z $(pyenv versions | grep riak_3.4.3) ]]; then VERSION_ALIAS="riak_3.4.3" pyenv install 3.4.3 - VERSION_ALIAS="riak_3.3.6" pyenv install 3.3.6 - VERSION_ALIAS="riak_2.7.10" pyenv install 2.7.10 - VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 - VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 - pyenv virtualenv riak_3.4.3 riak-py34 +fi +if [[ -z $(pyenv versions | grep riak_3.3.6) ]]; then + VERSION_ALIAS="riak_3.3.6" pyenv install 3.3.6 pyenv virtualenv riak_3.3.6 riak-py33 +fi +if [[ -z $(pyenv versions | grep riak_2.7.10) ]]; then + VERSION_ALIAS="riak_2.7.10" pyenv install 2.7.10 pyenv virtualenv riak_2.7.10 riak-py27 +fi +if [[ -z $(pyenv versions | grep riak_2.7.9) ]]; then + VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 pyenv virtualenv riak_2.7.9 riak-py279 +fi +if [[ -z $(pyenv versions | grep riak_2.6.9) ]]; then + VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 pyenv virtualenv riak_2.6.9 riak-py26 - pyenv versions fi +pyenv global riak-py34 riak-py33 riak-py27 riak-py279 riak-py26 +pyenv versions # Now install tox pip install --upgrade pip diff --git a/tox.ini b/tox.ini index a658d160..1bb27de4 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,8 @@ envlist = py26, py279, py27, py33, py34 [testenv] +install_command = pip install --upgrade {packages} commands = {envpython} setup.py test deps = six + pip passenv = RUN_* SKIP_* RIAK_* From b3c4c03315d27e128f8814f67ee7d2447fbc76e5 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 5 Oct 2015 15:14:50 +0000 Subject: [PATCH 014/324] Add support for fetching the bucket/key preflist --- riak/bucket.py | 10 ++++++++++ riak/client/operations.py | 17 +++++++++++++++++ riak/tests/test_kv.py | 11 +++++++++++ riak/transports/feature_detect.py | 12 +++++++++++- riak/transports/http/resources.py | 28 ++++++++++++++++++++++++++++ riak/transports/http/transport.py | 20 ++++++++++++++++++++ riak/transports/pbc/codec.py | 13 +++++++++++++ riak/transports/pbc/transport.py | 23 +++++++++++++++++++++++ riak/transports/transport.py | 6 ++++++ 9 files changed, 139 insertions(+), 1 deletion(-) diff --git a/riak/bucket.py b/riak/bucket.py index bb95726d..4342d7ad 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -586,6 +586,16 @@ def update_counter(self, key, value, **kwargs): increment_counter = update_counter + def get_preflist(self, key): + """ + Retrieve the preflist associated with a given bucket/key + + :param key: Name of the key. + :type key: string + :rtype: list of dict() + """ + return self._client.get_preflist(self, key) + def __str__(self): if self.bucket_type.is_default(): return ''.format(self.name) diff --git a/riak/client/operations.py b/riak/client/operations.py index ecda64af..8239e4ac 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -1000,6 +1000,23 @@ def update_datatype(self, datatype, w=None, dw=None, pw=None, timeout=timeout, include_context=include_context) + @retryable + def get_preflist(self, transport, bucket, key): + """ + Fetch the preflist for a given bucket and key. + + .. note:: This request is automatically retried :attr:`retries` + times if it fails due to network error. + + :param bucket: the bucket whose index will be queried + :type bucket: RiakBucket + :param key: the key of the preflist + :type key: string + + :return: list of dicts (partition, node, primary) + """ + return transport.get_preflist(bucket, key) + def _bucket_type_bucket_builder(self, name, bucket_type): """ Build a bucket from a bucket type diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index b2aa9f92..4306cc30 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -579,6 +579,17 @@ def test_get_params(self): basic_quorum=True) self.assertFalse(missing.exists) + def test_preflist(self): + bucket = self.client.bucket(self.bucket_name) + bucket.new(self.key_name, data={"foo": "one", + "bar": "baz"}).store() + preflist = bucket.get_preflist(self.key_name) + preflist2 = self.client.get_preflist(bucket, self.key_name) + for pref in (preflist, preflist2): + self.assertEqual(len(pref), 3) + self.assertEqual(pref[0]['node'], 'riak@127.0.0.1') + [self.assertTrue(node['primary']) for node in pref] + def generate_siblings(self, original, count=5, delay=None): vals = [] for _ in range(count): diff --git a/riak/transports/feature_detect.py b/riak/transports/feature_detect.py index e860a4c6..642fac96 100644 --- a/riak/transports/feature_detect.py +++ b/riak/transports/feature_detect.py @@ -26,7 +26,9 @@ 1.2: LooseVersion("1.2.0"), 1.4: LooseVersion("1.4.0"), 1.44: LooseVersion("1.4.4"), - 2.0: LooseVersion("2.0.0") + 2.0: LooseVersion("2.0.0"), + 2.1: LooseVersion("2.1.0"), + 2.12: LooseVersion("2.1.2") } @@ -192,6 +194,14 @@ def datatypes(self): """ return self.server_version >= versions[2.0] + def preflists(self): + """ + Whether bucket/key preflists are supported. + + :rtype: bool + """ + return self.server_version >= versions[2.1] + @lazy_property def server_version(self): return LooseVersion(self._server_version()) diff --git a/riak/transports/http/resources.py b/riak/transports/http/resources.py index f30cb36e..c13925bc 100644 --- a/riak/transports/http/resources.py +++ b/riak/transports/http/resources.py @@ -172,6 +172,30 @@ def datatypes_path(self, bucket_type, bucket, key=None, **options): return mkpath("/types", quote_plus(bucket_type), "buckets", quote_plus(bucket), "datatypes", key, **options) + def preflist_path(self, bucket, key, bucket_type=None, **options): + """ + Generate the URL for bucket/key preflist information + + :param bucket: Name of a Riak bucket + :type bucket: string + :param key: Name of a Key + :type key: string + :param bucket_type: Optional Riak Bucket Type + :type bucket_type: None or string + :rtype URL string + """ + if not self.riak_kv_wm_preflist: + raise RiakError("Preflists are unsupported by this Riak node") + if self.riak_kv_wm_bucket_type and bucket_type: + return mkpath("/types", quote_plus(bucket_type), + "buckets", quote_plus(bucket), + "keys", quote_plus(key), + "preflist", **options) + else: + return mkpath("/buckets", quote_plus(bucket), + "keys", quote_plus(key), + "preflist", **options) + # Feature detection overrides def bucket_types(self): return self.riak_kv_wm_bucket_type is not None @@ -225,6 +249,10 @@ def riak_solr_indexer_wm(self): def riak_kv_wm_counter(self): return self.resources.get('riak_kv_wm_counter') + @lazy_property + def riak_kv_wm_preflist(self): + return self.resources.get('riak_kv_wm_preflist') + @lazy_property def yz_wm_search(self): return self.resources.get('yz_wm_search') diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index f534310a..aaac3f92 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -775,6 +775,26 @@ def update_datatype(self, datatype, **options): return True + def get_preflist(self, bucket, key): + """ + Get the preflist for a bucket/key + + :param bucket: Riak Bucket + :type bucket: :class:`~riak.bucket.RiakBucket` + :param key: Riak Key + :type key: string + :rtype: list of dicts + """ + bucket_type = self._get_bucket_type(bucket.bucket_type) + url = self.preflist_path(bucket.name, key, bucket_type=bucket_type) + status, headers, body = self._request('GET', url) + + if status == 200: + preflist = json.loads(bytes_to_str(body)) + return preflist['preflist'] + else: + raise RiakError('Error getting bucket/key preflist.') + def check_http_code(self, status, expected_statuses): if status not in expected_statuses: raise RiakError('Expected status %s, received %s' % diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index a3f26e8a..1a2bdd1b 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -622,3 +622,16 @@ def _encode_map_update(self, dtype, msg, op): msg.flag_op = riak_pb.MapUpdate.ENABLE else: msg.flag_op = riak_pb.MapUpdate.DISABLE + + def _decode_preflist(self, item): + """ + Decodes a preflist response + + :param preflist: a bucket/key preflist + :type preflist: list of riak_pb.RpbBucketKeyPreflistItem + :rtype dict + """ + result = {'partition': item.partition, + 'node': bytes_to_str(item.node), + 'primary': item. primary} + return result diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index c77dab2b..5e527c51 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -56,6 +56,8 @@ MSG_CODE_SET_BUCKET_RESP, MSG_CODE_GET_BUCKET_TYPE_REQ, MSG_CODE_SET_BUCKET_TYPE_REQ, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP, MSG_CODE_MAP_RED_REQ, MSG_CODE_INDEX_REQ, MSG_CODE_INDEX_RESP, @@ -699,3 +701,24 @@ def update_datatype(self, datatype, **options): datatype._set_value(self._decode_dt_value(type_name, resp)) return True + + def get_preflist(self, bucket, key): + """ + Get the preflist for a bucket/key + + :param bucket: Riak Bucket + :type bucket: :class:`~riak.bucket.RiakBucket` + :param key: Riak Key + :type key: string + :rtype: list of dicts + """ + req = riak_pb.RpbGetBucketKeyPreflistReq() + req.bucket = str_to_bytes(bucket.name) + req.key = str_to_bytes(key) + req.type = str_to_bytes(bucket.bucket_type.name) + + msg_code, resp = self._request(MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, + req, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) + + return [self._decode_preflist(item) for item in resp.preflist] diff --git a/riak/transports/transport.py b/riak/transports/transport.py index 85dcae43..be598511 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -270,6 +270,12 @@ def update_datatype(self, datatype, w=None, dw=None, pw=None, """ raise NotImplementedError + def get_preflist(self, bucket, key): + """ + Fetches the preflist for a bucket/key. + """ + raise NotImplementedError + def _search_mapred_emu(self, index, query): """ Emulates a search request via MapReduce. Used in the case From 315535220994bac197707e340beefe7a3349859e Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Sun, 11 Oct 2015 17:30:12 +0000 Subject: [PATCH 015/324] Add support for write-once bucket type property --- commands.py | 2 ++ riak/tests/test_btypes.py | 21 +++++++++++++++++++++ riak/transports/feature_detect.py | 8 ++++++++ riak/transports/pbc/codec.py | 3 ++- 4 files changed, 33 insertions(+), 1 deletion(-) diff --git a/commands.py b/commands.py index 1cf60eb5..c2afab0e 100644 --- a/commands.py +++ b/commands.py @@ -81,6 +81,7 @@ class create_bucket_types(Command): * `pytest-sets` with ``{"datatype":"set"}`` * `pytest-counters` with ``{"datatype":"counter"}`` * `pytest-consistent` with ``{"consistent":true}`` + * `pytest-write-once` with ``{"write_once": true}`` * `pytest-mr` * `pytest` with ``{"allow_mult":false}`` """ @@ -96,6 +97,7 @@ class create_bucket_types(Command): 'pytest-sets': {'datatype': 'set'}, 'pytest-counters': {'datatype': 'counter'}, 'pytest-consistent': {'consistent': True}, + 'pytest-write-once': {'write_once': True}, 'pytest-mr': {}, 'pytest': {'allow_mult': False} } diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index 89d298b3..b3ea5db2 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -176,3 +176,24 @@ def test_multiget_bucket_types(self): self.assertIsInstance(mobj, RiakObject) self.assertEqual(bucket, mobj.bucket) self.assertEqual(btype, mobj.bucket.bucket_type) + + @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") + def test_write_once_bucket_type(self): + btype = self.client.bucket_type('pytest-write-once') + btype.set_property('write_once', True) + bucket = btype.bucket(self.bucket_name) + + for i in range(100): + obj = bucket.new(self.key_name + str(i)) + obj.data = {'id': i} + obj.store() + + mget = bucket.multiget([self.key_name + str(i) for i in range(100)]) + for mobj in mget: + self.assertIsInstance(mobj, RiakObject) + self.assertEqual(bucket, mobj.bucket) + self.assertEqual(btype, mobj.bucket.bucket_type) + + props = btype.get_properties() + self.assertIn('write_once', props) + self.assertEqual(True, props['write_once']) diff --git a/riak/transports/feature_detect.py b/riak/transports/feature_detect.py index 642fac96..c73ba37d 100644 --- a/riak/transports/feature_detect.py +++ b/riak/transports/feature_detect.py @@ -202,6 +202,14 @@ def preflists(self): """ return self.server_version >= versions[2.1] + def write_once(self): + """ + Whether write-once operations are supported. + + :rtype: bool + """ + return self.server_version >= versions[2.1] + @lazy_property def server_version(self): return LooseVersion(self._server_version()) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 1a2bdd1b..02c53ca5 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -51,7 +51,8 @@ def _invert(d): NORMAL_PROPS = ['n_val', 'allow_mult', 'last_write_wins', 'old_vclock', 'young_vclock', 'big_vclock', 'small_vclock', 'basic_quorum', - 'notfound_ok', 'search', 'backend', 'search_index', 'datatype'] + 'notfound_ok', 'search', 'backend', 'search_index', 'datatype', + 'write_once'] COMMIT_HOOK_PROPS = ['precommit', 'postcommit'] MODFUN_PROPS = ['chash_keyfun', 'linkfun'] QUORUM_PROPS = ['r', 'pr', 'w', 'pw', 'dw', 'rw'] From 63a205b67dd778eab248987bde4b093d96504b10 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Sun, 11 Oct 2015 19:48:52 +0000 Subject: [PATCH 016/324] Update the feature detection tests for Riak 2.1 --- riak/tests/test_feature_detection.py | 35 ++++++++++++++++++++++++++++ 1 file changed, 35 insertions(+) diff --git a/riak/tests/test_feature_detection.py b/riak/tests/test_feature_detection.py index 11dadc75..682c5ac2 100644 --- a/riak/tests/test_feature_detection.py +++ b/riak/tests/test_feature_detection.py @@ -60,6 +60,8 @@ def test_pre_10(self): self.assertFalse(t.index_term_regex()) self.assertFalse(t.bucket_types()) self.assertFalse(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) def test_10(self): t = DummyTransport("1.0.3") @@ -77,6 +79,8 @@ def test_10(self): self.assertFalse(t.index_term_regex()) self.assertFalse(t.bucket_types()) self.assertFalse(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) def test_11(self): t = DummyTransport("1.1.4") @@ -94,6 +98,8 @@ def test_11(self): self.assertFalse(t.index_term_regex()) self.assertFalse(t.bucket_types()) self.assertFalse(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) def test_12(self): t = DummyTransport("1.2.0") @@ -111,6 +117,8 @@ def test_12(self): self.assertFalse(t.index_term_regex()) self.assertFalse(t.bucket_types()) self.assertFalse(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) def test_12_loose(self): t = DummyTransport("1.2.1p3") @@ -128,6 +136,8 @@ def test_12_loose(self): self.assertFalse(t.index_term_regex()) self.assertFalse(t.bucket_types()) self.assertFalse(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) def test_14(self): t = DummyTransport("1.4.0rc1") @@ -145,6 +155,8 @@ def test_14(self): self.assertFalse(t.index_term_regex()) self.assertFalse(t.bucket_types()) self.assertFalse(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) def test_144(self): t = DummyTransport("1.4.6") @@ -162,6 +174,8 @@ def test_144(self): self.assertTrue(t.index_term_regex()) self.assertFalse(t.bucket_types()) self.assertFalse(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) def test_20(self): t = DummyTransport("2.0.1") @@ -179,6 +193,27 @@ def test_20(self): self.assertTrue(t.index_term_regex()) self.assertTrue(t.bucket_types()) self.assertTrue(t.datatypes()) + self.assertFalse(t.preflists()) + self.assertFalse(t.write_once()) + + def test_21(self): + t = DummyTransport("2.1.0") + self.assertTrue(t.phaseless_mapred()) + self.assertTrue(t.pb_indexes()) + self.assertTrue(t.pb_search()) + self.assertTrue(t.pb_conditionals()) + self.assertTrue(t.quorum_controls()) + self.assertTrue(t.tombstone_vclocks()) + self.assertTrue(t.pb_head()) + self.assertTrue(t.pb_clear_bucket_props()) + self.assertTrue(t.pb_all_bucket_props()) + self.assertTrue(t.counters()) + self.assertTrue(t.stream_indexes()) + self.assertTrue(t.index_term_regex()) + self.assertTrue(t.bucket_types()) + self.assertTrue(t.datatypes()) + self.assertTrue(t.preflists()) + self.assertTrue(t.write_once()) if __name__ == '__main__': unittest.main() From 30f8ae09173aa7a50b83372f954b4878afc77f4c Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Tue, 13 Oct 2015 15:30:22 +0000 Subject: [PATCH 017/324] Add optional timeout to YZ create index command --- riak/client/operations.py | 7 +++++-- riak/tests/test_all.py | 2 +- riak/transports/http/transport.py | 7 ++++++- riak/transports/pbc/transport.py | 5 ++++- riak/transports/transport.py | 3 ++- 5 files changed, 18 insertions(+), 6 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index 8239e4ac..b5972f69 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -685,7 +685,8 @@ def stream_mapred(self, inputs, query, timeout): stream.close() @retryable - def create_search_index(self, transport, index, schema=None, n_val=None): + def create_search_index(self, transport, index, schema=None, n_val=None, + timeout=None): """ create_search_index(index, schema=None, n_val=None) @@ -698,8 +699,10 @@ def create_search_index(self, transport, index, schema=None, n_val=None): :type schema: string, None :param n_val: this indexes N value :type n_val: integer, None + :param timeout: optional timeout (in ms) + :type timeout: integer, None """ - return transport.create_search_index(index, schema, n_val) + return transport.create_search_index(index, schema, n_val, timeout) @retryable def get_search_index(self, transport, index): diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index 992e4997..2a6ef8cc 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -87,7 +87,7 @@ def setUpModule(): 'index': 'mrbucket'} for yz in (testrun_yz, testrun_yz_index, testrun_yz_mr): - c.create_search_index(yz['index']) + c.create_search_index(yz['index'], timeout=30000) if yz['btype'] is not None: t = c.bucket_type(yz['btype']) b = t.bucket(yz['bucket']) diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index aaac3f92..2b197966 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -444,7 +444,8 @@ def stream_index(self, bucket, index, startkey, endkey=None, else: raise RiakError('Error streaming secondary index.') - def create_search_index(self, index, schema=None, n_val=None): + def create_search_index(self, index, schema=None, n_val=None, + timeout=None): """ Create a Solr search index for Yokozuna. @@ -454,6 +455,8 @@ def create_search_index(self, index, schema=None, n_val=None): :type schema: string :param n_val: N value of the write :type n_val: int + :param timeout: optional timeout (in ms) + :type timeout: integer, None :rtype boolean """ @@ -468,6 +471,8 @@ def create_search_index(self, index, schema=None, n_val=None): content_dict['schema'] = schema if n_val: content_dict['n_val'] = n_val + if timeout: + content_dict['timeout'] = timeout content = json.dumps(content_dict) # Run the request... diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 5e527c51..e385c698 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -487,7 +487,8 @@ def stream_index(self, bucket, index, startkey, endkey=None, return RiakPbcIndexStream(self, index, return_terms) - def create_search_index(self, index, schema=None, n_val=None): + def create_search_index(self, index, schema=None, n_val=None, + timeout=None): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") @@ -498,6 +499,8 @@ def create_search_index(self, index, schema=None, n_val=None): if n_val: idx.n_val = n_val req = riak_pb.RpbYokozunaIndexPutReq(index=idx) + if timeout is not None: + req.timeout = timeout self._request(MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, MSG_CODE_PUT_RESP) diff --git a/riak/transports/transport.py b/riak/transports/transport.py index be598511..a7428359 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -172,7 +172,8 @@ def get_client_id(self): """ raise NotImplementedError - def create_search_index(self, index, schema=None, n_val=None): + def create_search_index(self, index, schema=None, n_val=None, + timeout=None): """ Creates a yokozuna search index. """ From 6e4f11a4a42b04b87ad0a85cae81b63740af5d54 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Fri, 16 Oct 2015 03:29:50 +0000 Subject: [PATCH 018/324] Update preflist test --- riak/tests/test_kv.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 4306cc30..d1b28298 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -585,9 +585,10 @@ def test_preflist(self): "bar": "baz"}).store() preflist = bucket.get_preflist(self.key_name) preflist2 = self.client.get_preflist(bucket, self.key_name) + nodes = ['riak@127.0.0.1', 'dev1@127.0.0.1'] for pref in (preflist, preflist2): self.assertEqual(len(pref), 3) - self.assertEqual(pref[0]['node'], 'riak@127.0.0.1') + self.assertIn(pref[0]['node'], nodes) [self.assertTrue(node['primary']) for node in pref] def generate_siblings(self, original, count=5, delay=None): From b01bf1b27d7b9c52dc832aa71c20fe1e4ab723bf Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Fri, 16 Oct 2015 15:33:29 +0000 Subject: [PATCH 019/324] Add in correct permissions for riak_kv.get_preflist --- commands.py | 1 + 1 file changed, 1 insertion(+) diff --git a/commands.py b/commands.py index c2afab0e..06ee3039 100644 --- a/commands.py +++ b/commands.py @@ -238,6 +238,7 @@ class setup_security(Command, security_commands): _grants = { "riak_kv.get": ["any"], + "riak_kv.get_preflist": ["any"], "riak_kv.put": ["any"], "riak_kv.delete": ["any"], "riak_kv.index": ["any"], From d527055121a66220e5d2473ebfaadba2d937f0bf Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sun, 18 Oct 2015 18:16:34 -0700 Subject: [PATCH 020/324] Implementing timeseries types and code --- .gitignore | 1 + docs/client.rst | 7 ++++ riak/__init__.py | 8 +++-- riak/bucket.py | 2 +- riak/client/operations.py | 18 ++++++++++ riak/table.py | 57 ++++++++++++++++++++++++++++++++ riak/tests/test_all.py | 3 ++ riak/transports/pbc/codec.py | 17 ++++++++++ riak/transports/pbc/transport.py | 17 +++++++++- riak/transports/transport.py | 6 ++++ riak/ts_object.py | 54 ++++++++++++++++++++++++++++++ 11 files changed, 185 insertions(+), 5 deletions(-) create mode 100644 riak/table.py create mode 100644 riak/ts_object.py diff --git a/.gitignore b/.gitignore index 34e7a5bb..553047bf 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ build/ dist/ riak.egg-info/ *.egg +.eggs/ #*# *~ diff --git a/docs/client.rst b/docs/client.rst index edf9d14a..a2a3b135 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -123,6 +123,13 @@ Key-level Operations .. automethod:: RiakClient.fetch_datatype .. automethod:: RiakClient.update_datatype +-------------------- +Timeseries Operations +-------------------- + +.. automethod:: RiakClient.ts_put +.. automethod:: RiakClient.ts_query + ---------------- Query Operations ---------------- diff --git a/riak/__init__.py b/riak/__init__.py index eddc69bc..3ba87f64 100644 --- a/riak/__init__.py +++ b/riak/__init__.py @@ -33,14 +33,16 @@ from riak.riak_error import RiakError, ConflictError from riak.client import RiakClient from riak.bucket import RiakBucket, BucketType +from riak.table import Table from riak.node import RiakNode from riak.riak_object import RiakObject from riak.mapreduce import RiakKeyFilter, RiakMapReduce, RiakLink -__all__ = ['RiakBucket', 'BucketType', 'RiakNode', 'RiakObject', 'RiakClient', - 'RiakMapReduce', 'RiakKeyFilter', 'RiakLink', 'RiakError', - 'ConflictError', 'ONE', 'ALL', 'QUORUM', 'key_filter'] +__all__ = ['RiakBucket', 'Table', 'BucketType', 'RiakNode', + 'RiakObject', 'RiakClient', 'RiakMapReduce', 'RiakKeyFilter', + 'RiakLink', 'RiakError', 'ConflictError', + 'ONE', 'ALL', 'QUORUM', 'key_filter'] ONE = "one" ALL = "all" diff --git a/riak/bucket.py b/riak/bucket.py index bb95726d..2df7faca 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -196,7 +196,7 @@ def new(self, key=None, data=None, content_type='application/json', def get(self, key, r=None, pr=None, timeout=None, include_context=None, basic_quorum=None, notfound_ok=None): """ - Retrieve an :class:`~riak.riak_object.RiakObject` or + Retrieve a :class:`~riak.riak_object.RiakObject` or :class:`~riak.datatypes.Datatype`, based on the presence and value of the :attr:`datatype ` bucket property. diff --git a/riak/client/operations.py b/riak/client/operations.py index ecda64af..41bee63a 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -553,6 +553,24 @@ def put(self, transport, robj, w=None, dw=None, pw=None, return_body=None, if_none_match=if_none_match, timeout=timeout) + @retryable + def ts_put(self, transport, tsobj, timeout=None): + """ + ts_put(tsobj, timeout=None) + + Stores time series data in the Riak cluster. + + .. note:: This request is automatically retried :attr:`retries` + times if it fails due to network error. + + :param tsobj: the time series object to store + :type tsobj: RiakTsObject + :param timeout: a timeout value in milliseconds + :type timeout: int + """ + _validate_timeout(timeout) + return transport.ts_put(tsobj, timeout=timeout) + @retryable def get(self, transport, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None): diff --git a/riak/table.py b/riak/table.py new file mode 100644 index 00000000..5259b2cb --- /dev/null +++ b/riak/table.py @@ -0,0 +1,57 @@ +""" +Copyright 2015 Basho Technologies + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" +from six import string_types, PY2 + + +class RiakTable(object): + """ + The ``RiakTable`` object allows you to access properties on a Riak table + (bucket type) and query timeseries data. + """ + def __init__(self, client, name): + """ + Returns a new ``Table`` instance. + + :param client: A :class:`RiakClient ` + instance + :type client: :class:`RiakClient ` + :param name: The tables's name + :type name: string + """ + + if not isinstance(name, string_types): + raise TypeError('Bucket name must be a string') + + if PY2: + try: + name = name.encode('ascii') + except UnicodeError: + raise TypeError('Unicode table names are not supported.') + + self._client = client + self.name = name + + def query(self, key): + """ + Retrieve a bucket-type property. + + :param key: The property to retrieve. + :type key: string + :rtype: mixed + """ + return self.get_properties()[key] diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index 992e4997..de4b6f62 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -238,6 +238,9 @@ def test_timeout_validation(self): with self.assertRaises(ValueError): self.client.put(obj, timeout=bad) + with self.assertRaises(ValueError): + self.client.ts_put(obj, timeout=bad) + with self.assertRaises(ValueError): self.client.get(obj, timeout=bad) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index a3f26e8a..0b457376 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -622,3 +622,20 @@ def _encode_map_update(self, dtype, msg, op): msg.flag_op = riak_pb.MapUpdate.ENABLE else: msg.flag_op = riak_pb.MapUpdate.DISABLE + + def _encode_timeseries(self, tsobj, ts_put_req): + """ + Fills an TsPutReq message with the appropriate data and + metadata from a RiakTsObject. + + :param tsobj: a RiakTsObject + :type tsobj: RiakTsObject + :param ts_put_req: the protobuf message to fill + :type ts_put_req: riak_pb.TsPutReq + """ + ts_put_req.table = str_to_bytes(tsobj.table) + # TODO RTS-367 columns / rows + if tsobj.columns: + if tsobj.rows: + else: + raise RiakError("RiakTsObject requires rows") diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index c77dab2b..867a5e40 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -77,7 +77,9 @@ MSG_CODE_DT_FETCH_REQ, MSG_CODE_DT_FETCH_RESP, MSG_CODE_DT_UPDATE_REQ, - MSG_CODE_DT_UPDATE_RESP + MSG_CODE_DT_UPDATE_RESP, + MSG_CODE_TS_PUT_REQ, + MSG_CODE_TS_PUT_RESP ) @@ -231,6 +233,19 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, return robj + def ts_put(self, tsobj): + req = riak_pb.TsPutReq() + + self._encode_timeseries(tsobj, req) + + msg_code, resp = self._request(MSG_CODE_TS_PUT_REQ, req, + MSG_CODE_TS_PUT_RESP) + + if resp is not None: + return True + elif not robj.key: + raise RiakError("missing response object") + def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): req = riak_pb.RpbDelReq() diff --git a/riak/transports/transport.py b/riak/transports/transport.py index 85dcae43..e58f2ddf 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -85,6 +85,12 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=None, """ raise NotImplementedError + def ts_put(self, tsobj, timeout=None): + """ + Stores a time series object. + """ + raise NotImplementedError + def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): """ diff --git a/riak/ts_object.py b/riak/ts_object.py new file mode 100644 index 00000000..2b855f2e --- /dev/null +++ b/riak/ts_object.py @@ -0,0 +1,54 @@ +""" +Copyright 2015 Basho Technologies + +This file is provided to you under the Apache License, +Version 2.0 (the "License"); you may not use this file +except in compliance with the License. You may obtain +a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, +software distributed under the License is distributed on an +"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, either express or implied. See the License for the +specific language governing permissions and limitations +under the License. +""" + +# TODO RTS-367 +# Should the table parameter be its own object that has a query method on it? +# Like Bucket? +class TsObject(object): + """ + The TsObject holds meta information about Timeseries data, + plus the data itself. + """ + def __init__(self, client, table, rows, columns=None): + """ + Construct a new TsObject. + + :param client: A RiakClient object. + :type client: :class:`RiakClient ` + :param table: The table for the timeseries data as a Table object. + :type table: :class:`Table` + :param rows: An array of arrays with timeseries data + :type rows: array + :param columns: An array Column names and types. Optional. + :type columns: array + """ + + if table is None or len(table) == 0: + raise ValueError('Table must either be a non-empty string.') + + self.client = client + self.table = table + # TODO RTS-367 rows, columns + + def store(self): + """ + Store the timeseries data in Riak. + :rtype: boolean + """ + + return self.client.ts_put(self) From a75843bab72331a997e3065bdfd309144ab784a0 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 19 Oct 2015 08:20:39 -0700 Subject: [PATCH 021/324] Table object, timeseries ops do not use timeout --- riak/__init__.py | 1 + riak/client/operations.py | 9 +++------ riak/table.py | 20 ++++++++++---------- riak/tests/test_all.py | 3 --- riak/transports/pbc/codec.py | 6 ++++++ riak/transports/transport.py | 16 +++++++++++----- 6 files changed, 31 insertions(+), 24 deletions(-) diff --git a/riak/__init__.py b/riak/__init__.py index 3ba87f64..415f6660 100644 --- a/riak/__init__.py +++ b/riak/__init__.py @@ -1,4 +1,5 @@ """ +Copyright 2015 Basho Technologies Copyright 2010 Rusty Klophaus Copyright 2010 Justin Sheehy Copyright 2009 Jay Baird diff --git a/riak/client/operations.py b/riak/client/operations.py index 41bee63a..063eca9e 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -554,9 +554,9 @@ def put(self, transport, robj, w=None, dw=None, pw=None, return_body=None, timeout=timeout) @retryable - def ts_put(self, transport, tsobj, timeout=None): + def ts_put(self, transport, tsobj): """ - ts_put(tsobj, timeout=None) + ts_put(tsobj) Stores time series data in the Riak cluster. @@ -565,11 +565,8 @@ def ts_put(self, transport, tsobj, timeout=None): :param tsobj: the time series object to store :type tsobj: RiakTsObject - :param timeout: a timeout value in milliseconds - :type timeout: int """ - _validate_timeout(timeout) - return transport.ts_put(tsobj, timeout=timeout) + return transport.ts_put(tsobj) @retryable def get(self, transport, robj, r=None, pr=None, timeout=None, diff --git a/riak/table.py b/riak/table.py index 5259b2cb..a739e9bb 100644 --- a/riak/table.py +++ b/riak/table.py @@ -18,9 +18,9 @@ from six import string_types, PY2 -class RiakTable(object): +class Table(object): """ - The ``RiakTable`` object allows you to access properties on a Riak table + The ``Table`` object allows you to access properties on a Riak table (bucket type) and query timeseries data. """ def __init__(self, client, name): @@ -30,12 +30,12 @@ def __init__(self, client, name): :param client: A :class:`RiakClient ` instance :type client: :class:`RiakClient ` - :param name: The tables's name + :param name: The table's name :type name: string """ if not isinstance(name, string_types): - raise TypeError('Bucket name must be a string') + raise TypeError('Table name must be a string') if PY2: try: @@ -46,12 +46,12 @@ def __init__(self, client, name): self._client = client self.name = name - def query(self, key): + def query(self, query, interpolations=None): """ - Retrieve a bucket-type property. + Queries a timeseries table. - :param key: The property to retrieve. - :type key: string - :rtype: mixed + :param query: The timeseries query. + :type query: string + :rtype: :class:`TsObject ` """ - return self.get_properties()[key] + return self.client.ts_query(query, interpolations) diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index de4b6f62..992e4997 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -238,9 +238,6 @@ def test_timeout_validation(self): with self.assertRaises(ValueError): self.client.put(obj, timeout=bad) - with self.assertRaises(ValueError): - self.client.ts_put(obj, timeout=bad) - with self.assertRaises(ValueError): self.client.get(obj, timeout=bad) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 0b457376..e67a4780 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -639,3 +639,9 @@ def _encode_timeseries(self, tsobj, ts_put_req): if tsobj.rows: else: raise RiakError("RiakTsObject requires rows") + + def _decode_timeseries(self, ts_put_resp, tsobj): + """ + TODO RTS-367 + """ + raise NotImplementedError diff --git a/riak/transports/transport.py b/riak/transports/transport.py index e58f2ddf..4089bbda 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -85,16 +85,22 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=None, """ raise NotImplementedError - def ts_put(self, tsobj, timeout=None): + def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, + pw=None, timeout=None): """ - Stores a time series object. + Deletes an object. """ raise NotImplementedError - def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, - pw=None, timeout=None): + def ts_put(self, tsobj): """ - Deletes an object. + Stores a timeseries object. + """ + raise NotImplementedError + + def ts_query(self, query, interpolations=None): + """ + Query timeseries data. """ raise NotImplementedError From 26dfdfcadd8ad1ec8d1c1084f0c6dc8a6e5902f0 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 19 Oct 2015 14:13:00 -0700 Subject: [PATCH 022/324] Use known working list of ciphers for tests. Tweaks to SecurityError exceptions to fix bug and show more info. --- riak/tests/__init__.py | 5 ++++- riak/tests/test_security.py | 37 +++++++++++++++++++------------ riak/transports/pbc/connection.py | 4 ++-- 3 files changed, 29 insertions(+), 17 deletions(-) diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index c4d64bf1..d85447ff 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -60,9 +60,12 @@ SECURITY_CERT_PASSWD = os.environ.get('RIAK_TEST_SECURITY_CERT_PASSWD', 'certpass') +SECURITY_CIPHERS = 'DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:AES128-SHA256:AES128-SHA:AES256-SHA256:AES256-SHA:RC4-SHA' + SECURITY_CREDS = None if RUN_SECURITY: SECURITY_CREDS = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, - cacert_file=SECURITY_CACERT) + cacert_file=SECURITY_CACERT, + ciphers=SECURITY_CIPHERS) SKIP_DATATYPES = int(os.environ.get('SKIP_DATATYPES', '0')) diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index c76662aa..f0489039 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -20,7 +20,8 @@ import sys from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ - SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT + SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT, \ + SECURITY_CREDS, SECURITY_CIPHERS from riak.security import SecurityCreds if sys.version_info < (2, 7): unittest = __import__('unittest2') @@ -31,10 +32,7 @@ class SecurityTests(object): @unittest.skipIf(RUN_SECURITY, 'RUN_SECURITY is set') def test_security_disabled(self): - creds = SecurityCreds(username=SECURITY_USER, - password=SECURITY_PASSWD, - cacert_file=SECURITY_CACERT) - client = self.create_client(credentials=creds) + client = self.create_client(credentials=SECURITY_CREDS) myBucket = client.bucket('test') val1 = "foobar" key1 = myBucket.new('x', data=val1) @@ -51,31 +49,39 @@ def test_security_basic_connection(self): @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_bad_user(self): - creds = SecurityCreds(username='foo', password=SECURITY_PASSWD, - cacert_file=SECURITY_CACERT) + creds = SecurityCreds(username='foo', + password=SECURITY_PASSWD, + cacert_file=SECURITY_CACERT, + ciphers=SECURITY_CIPHERS) client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_bad_password(self): - creds = SecurityCreds(username=SECURITY_USER, password='foo', - cacert_file=SECURITY_CACERT) + creds = SecurityCreds(username=SECURITY_USER, + password='foo', + cacert_file=SECURITY_CACERT, + ciphers=SECURITY_CIPHERS) client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_invalid_cert(self): - creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, - cacert_file='/tmp/foo') + creds = SecurityCreds(username=SECURITY_USER, + password=SECURITY_PASSWD, + cacert_file='/tmp/foo', + ciphers=SECURITY_CIPHERS) client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_password_without_cacert(self): - creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD) + creds = SecurityCreds(username=SECURITY_USER, + password=SECURITY_PASSWD, + ciphers=SECURITY_CIPHERS) client = self.create_client(credentials=creds) with self.assertRaises(Exception): myBucket = client.bucket('test') @@ -87,6 +93,7 @@ def test_security_password_without_cacert(self): def test_security_cert_authentication(self): creds = SecurityCreds(username=SECURITY_CERT_USER, password=SECURITY_CERT_PASSWD, + ciphers=SECURITY_CIPHERS, cert_file=SECURITY_CERT, pkey_file=SECURITY_KEY, cacert_file=SECURITY_CACERT) @@ -107,6 +114,7 @@ def test_security_cert_authentication(self): @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_revoked_cert(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, + ciphers=SECURITY_CIPHERS, cacert_file=SECURITY_CACERT, crl_file=SECURITY_REVOKED) # Currently Python >= 2.7.9 and Python 3.x native CRL doesn't seem to @@ -120,6 +128,7 @@ def test_security_revoked_cert(self): @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_bad_ca_cert(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, + ciphers=SECURITY_CIPHERS, cacert_file=SECURITY_BAD_CERT) client = self.create_client(credentials=creds) with self.assertRaises(Exception): @@ -128,8 +137,8 @@ def test_security_bad_ca_cert(self): @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_ciphers(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, - cacert_file=SECURITY_CACERT, - ciphers='DHE-RSA-AES256-SHA') + ciphers=SECURITY_CIPHERS, + cacert_file=SECURITY_CACERT) client = self.create_client(credentials=creds) myBucket = client.bucket('test') val1 = "foobar" diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 293d05c3..0bc58232 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -136,7 +136,7 @@ def _ssl_handshake(self): return True except Exception as e: # fail if *any* exceptions are thrown during SSL handshake - raise SecurityError(e.message) + raise SecurityError(e) else: def _ssl_handshake(self): """ @@ -165,7 +165,7 @@ def _ssl_handshake(self): return True except ssl.SSLError as e: - raise SecurityError(e.library + ": " + e.reason) + raise SecurityError(e) except Exception as e: # fail if *any* exceptions are thrown during SSL handshake raise SecurityError(e) From 9e015caf0482a6d6489e687367749df5f5300789 Mon Sep 17 00:00:00 2001 From: bobby Date: Mon, 19 Oct 2015 21:16:20 -0700 Subject: [PATCH 023/324] Updated README.md with Python3 information and use of pip Updated README.md: * pip * Python3 information * adding syntax highlighting --- README.rst | 69 +++++++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 55 insertions(+), 14 deletions(-) diff --git a/README.rst b/README.rst index b2785ddc..99bfe27b 100644 --- a/README.rst +++ b/README.rst @@ -16,22 +16,41 @@ Documentation for Riak is available at http://docs.basho.com/riak/latest Install ======= -The recommended version of Python for use with this client is Python -2.7. From the Riak Python Client root directory, execute:: +The recommended versions of Python for use with this client are Python +`2.7.x` and `3.3/3.4`. + +From the Riak Python Client root directory, execute + +From Source +----------- + +.. code-block:: console python setup.py install There is an additional dependency on the Python package `setuptools`. +From PyPI +--------- + Official packages are signed and published to `PyPI `_. +To install from `PyPI `_ directly you can use +`pip`. + +.. code-block:: console + + pip install riak + Testing ======= To setup the default test configuration build a test Riak node (from -a ``riak`` directory):: +a ``riak`` directory) + +.. code-block:: console make rel @@ -41,13 +60,17 @@ for more details. For all of the simple default values, set the ``RIAK_DIR`` environment variable to the root of your Riak installation. Then from the -``riak-python-client`` directory :: +``riak-python-client`` directory + +.. code-block:: console cd buildbot make preconfigure Start your Riak node with ``riak start`` from the the Riak directory, -then back in ``buildbot`` type:: +then back in ``buildbot`` type + +.. code-block:: console make configure make test @@ -61,7 +84,9 @@ Testing Options If you wish to change the default options you can run the setup by hand. First configure the test node by adjusting the ``riak.conf`` settings, where ``RIAK_DIR`` is the path to the top your -Riak installation:: +Riak installation + +.. code-block:: console python setup.py preconfigure --riak-conf=$RIAK_DIR/etc/riak.conf @@ -73,7 +98,9 @@ arguments: - ``--http-port=`` http port number (default is ``8098``) - ``--https-port=`` https port number (default is ``8099``) -You may alternately add these lines to ``setup.cfg``:: +You may alternately add these lines to ``setup.cfg`` + +.. code-block:: ini [preconfigure] riak-conf=/Users/sean/dev/riak/rel/riak/etc/riak.conf @@ -83,7 +110,9 @@ You may alternately add these lines to ``setup.cfg``:: https-port=8099 Next start the test node. Once it is running, a test configuration is -installed which includes security test users and bucket types:: +installed which includes security test users and bucket types + +.. code-block:: console python setup.py configure --riak-admin=$RIAK_DIR/bin/riak-admin @@ -97,7 +126,9 @@ Optionally these configuration settings can be changed, too: ``certpass``) Similarly ``setup.cfg`` may be modified instead. To run the tests against a -Riak server (with configured TCP port configuration) on localhost, execute:: +Riak server (with configured TCP port configuration) on localhost, execute + +.. code-block:: console python setup.py test @@ -132,11 +163,15 @@ Testing Bucket Types (Riak 2+) To test bucket-types, you must run the ``create_bucket_types`` setup command, which will create the bucket-types used in testing, or create them manually yourself. It can be run like so (substituting ``$RIAK`` -with the root of your Riak install):: +with the root of your Riak install) + +.. code-block:: console ./setup.py create_bucket_types --riak-admin=$RIAK/bin/riak-admin -You may alternately add these lines to `setup.cfg`:: +You may alternately add these lines to `setup.cfg` + +.. code-block:: ini [create_bucket_types] riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin @@ -160,14 +195,20 @@ enabled on Riak. Once ``security = on`` is configured in the ``riak.conf`` file it can be enabled with ``riak-admin``. If you have set up the test environment outlined in the `Testing`_ section -you can go ahead and use this command to enable security:: +you can go ahead and use this command to enable security + +.. code-block:: console python setup.py enable_security --riak-admin=$RIAK_DIR/bin/riak-admin -Once you are done testing security you can also:: +Once you are done testing security you can also + +.. code-block:: console python setup.py disable_security --riak-admin=$RIAK_DIR/bin/riak-admin -To run the tests, then simply:: +To run the tests, then simply + +.. code-block:: console RUN_SECURITY=1 RIAK_TEST_HTTP_PORT=18098 python setup.py test From bdbc35fd2f0c6bd8a3efe0e1376031f112a3f5ae Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 20 Oct 2015 11:00:48 -0700 Subject: [PATCH 024/324] Some small things to ignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 34e7a5bb..f9515221 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ *.pyc +.python-version docs/_build @@ -9,6 +10,7 @@ build/ dist/ riak.egg-info/ *.egg +.eggs/ #*# *~ From b40078b3bf65aa395c783e5f2cf432e7f7565cb2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 22 Oct 2015 10:44:32 -0700 Subject: [PATCH 025/324] Add setup code for Timeseries, begin to add integration tests --- README.rst | 22 +++++++++ commands.py | 88 +++++++++++++++++++++-------------- riak/tests/__init__.py | 2 + riak/tests/test_all.py | 4 +- riak/tests/test_timeseries.py | 22 +++++++++ setup.py | 3 +- 6 files changed, 104 insertions(+), 37 deletions(-) create mode 100644 riak/tests/test_timeseries.py diff --git a/README.rst b/README.rst index 99bfe27b..85468584 100644 --- a/README.rst +++ b/README.rst @@ -179,6 +179,28 @@ You may alternately add these lines to `setup.cfg` To skip the bucket-type tests, set the ``SKIP_BTYPES`` environment variable to ``1``. +Testing Timeseries (Riak 2+) +------------------------------ + +To test timeseries data, you must run the ``setup_timeseries`` command, +which will create the bucket-types used in testing, or create them +manually yourself. It can be run like so (substituting ``$RIAK`` with +the root of your Riak install) + +.. code-block:: console + + ./setup.py setup_timeseries --riak-admin=$RIAK/bin/riak-admin + +You may alternately add these lines to `setup.cfg` + +.. code-block:: ini + + [setup_timeseries] + riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin + +To enable the timeseries tests, set the ``SKIP_TIMESERIES`` environment +variable to ``0``. + Testing Secondary Indexes ------------------------- diff --git a/commands.py b/commands.py index 06ee3039..0a736284 100644 --- a/commands.py +++ b/commands.py @@ -11,8 +11,10 @@ import os.path -__all__ = ['create_bucket_types', 'setup_security', 'enable_security', - 'disable_security', 'preconfigure', 'configure'] +__all__ = ['create_bucket_types', + 'setup_security', 'enable_security', 'disable_security', + 'setup_timeseries', + 'preconfigure', 'configure'] # Exception classes used by this module. @@ -72,36 +74,7 @@ def check_output(*popenargs, **kwargs): except ImportError: import json - -class create_bucket_types(Command): - """ - Creates bucket-types appropriate for testing. By default this will create: - - * `pytest-maps` with ``{"datatype":"map"}`` - * `pytest-sets` with ``{"datatype":"set"}`` - * `pytest-counters` with ``{"datatype":"counter"}`` - * `pytest-consistent` with ``{"consistent":true}`` - * `pytest-write-once` with ``{"write_once": true}`` - * `pytest-mr` - * `pytest` with ``{"allow_mult":false}`` - """ - - description = "create bucket-types used in integration tests" - - user_options = [ - ('riak-admin=', None, 'path to the riak-admin script') - ] - - _props = { - 'pytest-maps': {'datatype': 'map'}, - 'pytest-sets': {'datatype': 'set'}, - 'pytest-counters': {'datatype': 'counter'}, - 'pytest-consistent': {'consistent': True}, - 'pytest-write-once': {'write_once': True}, - 'pytest-mr': {}, - 'pytest': {'allow_mult': False} - } - +class bucket_type_commands: def initialize_options(self): self.riak_admin = None @@ -170,6 +143,53 @@ def _btype_command(self, *args): cmd.extend(args) return cmd +class create_bucket_types(bucket_type_commands, Command): + """ + Creates bucket-types appropriate for testing. By default this will create: + + * `pytest-maps` with ``{"datatype":"map"}`` + * `pytest-sets` with ``{"datatype":"set"}`` + * `pytest-counters` with ``{"datatype":"counter"}`` + * `pytest-consistent` with ``{"consistent":true}`` + * `pytest-write-once` with ``{"write_once": true}`` + * `pytest-mr` + * `pytest` with ``{"allow_mult":false}`` + """ + + description = "create bucket-types used in integration tests" + + user_options = [ + ('riak-admin=', None, 'path to the riak-admin script') + ] + + _props = { + 'pytest-maps': {'datatype': 'map'}, + 'pytest-sets': {'datatype': 'set'}, + 'pytest-counters': {'datatype': 'counter'}, + 'pytest-consistent': {'consistent': True}, + 'pytest-write-once': {'write_once': True}, + 'pytest-mr': {}, + 'pytest': {'allow_mult': False} + } + + +class setup_timeseries(bucket_type_commands, Command): + """ + Creates bucket-types appropriate for timeseries. By default this will create: + + * `GeoCheckin` with ``{"props": {"n_val": 3, "table_def": "CREATE TABLE GeoCheckin (geohash varchar not null, user varchar not null, time timestamp not null, weather varchar not null, temperature float, PRIMARY KEY((quantum(time, 15, m),user), time, user))"}}`` + """ + + description = "create bucket-types used in timeseries tests" + + user_options = [ + ('riak-admin=', None, 'path to the riak-admin script') + ] + + _props = { + 'GeoCheckin': {'n_val': 3, 'table_def': 'CREATE TABLE GeoCheckin (geohash varchar not null, user varchar not null, time timestamp not null, weather varchar not null, temperature float, PRIMARY KEY((quantum(time, 15, m),user), time, user))'}, + } + class security_commands(object): def check_security_command(self, *args): @@ -469,6 +489,4 @@ def run(self): for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) - sub_commands = [('create_bucket_types', None), - ('setup_security', None) - ] + sub_commands = [('create_bucket_types', None), ('setup_security', None)] diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index d85447ff..46f092c9 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -36,6 +36,8 @@ SKIP_INDEXES = int(os.environ.get('SKIP_INDEXES', '1')) +SKIP_TIMESERIES = int(os.environ.get('SKIP_TIMESERIES', '1')) + SKIP_POOL = os.environ.get('SKIP_POOL') SKIP_RESOLVE = int(os.environ.get('SKIP_RESOLVE', '0')) SKIP_BTYPES = int(os.environ.get('SKIP_BTYPES', '0')) diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index 2a6ef8cc..51e256f8 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -36,6 +36,7 @@ from riak.tests.test_btypes import BucketTypeTests from riak.tests.test_security import SecurityTests from riak.tests.test_datatypes import DatatypeIntegrationTests +from riak.tests.test_timeseries import TimeseriesTests from riak.tests import HOST, PB_HOST, PB_PORT, HTTP_HOST, HTTP_PORT, \ HAVE_PROTO, DUMMY_HTTP_PORT, DUMMY_PB_PORT, \ @@ -58,7 +59,6 @@ testrun_yz_index = {'btype': None, 'bucket': None, 'index': None} testrun_yz_mr = {'btype': None, 'bucket': None, 'index': None} - def setUpModule(): global testrun_search_bucket, testrun_props_bucket, \ testrun_sibs_bucket, testrun_yz, testrun_yz_index, testrun_yz_mr @@ -375,6 +375,7 @@ class RiakPbcTransportTestCase(BasicKVTests, BucketTypeTests, SecurityTests, DatatypeIntegrationTests, + TimeseriesTests, BaseTestCase, unittest.TestCase, test_six.Comparison): @@ -393,6 +394,7 @@ def test_uses_client_id_if_given(self): self.assertEqual(zero_client_id, c.client_id) +# NB: no Timeseries support in HTTP class RiakHttpTransportTestCase(BasicKVTests, KVFileTests, BucketPropsTest, diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py new file mode 100644 index 00000000..d082d3df --- /dev/null +++ b/riak/tests/test_timeseries.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +import platform + +from . import SKIP_TIMESERIES + +if platform.python_version() < '2.7': + unittest = __import__('unittest2') +else: + import unittest + + +class TimeseriesTests(BaseTestCase, unittest.TestCase): + + @unittest.skipIf(SKIP_TIMESERIES == '1', "skip requested for timeseries tests") + def test_store(self): + table = self.client.table(self.table_name) + measurements = [ + [ ] + ] + obj = table.new(measurements) + result = obj.store() + self.assertTrue(result) diff --git a/setup.py b/setup.py index 549f2799..4f8acfe0 100755 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ from setuptools import setup, find_packages from version import get_version from commands import preconfigure, configure, create_bucket_types, \ - setup_security, enable_security, disable_security + setup_security, enable_security, disable_security, setup_timeseries install_requires = ['six >= 1.8.0'] requires = ['six(>=1.8.0)'] @@ -39,6 +39,7 @@ test_suite='riak.tests.suite', url='https://github.com/basho/riak-python-client', cmdclass={'create_bucket_types': create_bucket_types, + 'setup_timeseries': setup_timeseries, 'setup_security': setup_security, 'preconfigure': preconfigure, 'configure': configure, From 9de23fdb032cf3afcc33bc9d5e51f43507cbe33c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 24 Oct 2015 09:11:01 -0700 Subject: [PATCH 026/324] Refactor tests to allow running individuall. Still have to implement super().setUp() --- riak/tests/__init__.py | 5 +- riak/tests/base.py | 87 ++++++++++++ riak/tests/{test_six.py => comparison.py} | 18 +-- riak/tests/test_2i.py | 24 +--- riak/tests/test_all.py | 154 +--------------------- riak/tests/test_btypes.py | 25 +--- riak/tests/test_comparison.py | 23 +--- riak/tests/test_datatypes.py | 43 ++---- riak/tests/test_feature_detection.py | 19 +-- riak/tests/test_filters.py | 19 +-- riak/tests/test_kv.py | 30 +---- riak/tests/test_mapreduce.py | 56 ++++---- riak/tests/test_pool.py | 27 +--- riak/tests/test_search.py | 28 +--- riak/tests/test_security.py | 22 +--- riak/tests/test_timeseries.py | 24 +++- riak/tests/test_yokozuna.py | 88 +++++++------ riak/tests/yz_setup.py | 31 +++++ riak/transports/pbc/codec.py | 8 +- riak/transports/pbc/connection.py | 5 +- riak/transports/pbc/transport.py | 4 +- setup.py | 22 +++- 22 files changed, 289 insertions(+), 473 deletions(-) create mode 100644 riak/tests/base.py rename riak/tests/{test_six.py => comparison.py} (88%) create mode 100644 riak/tests/yz_setup.py diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 46f092c9..85599d4b 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -19,6 +19,8 @@ HOST = os.environ.get('RIAK_TEST_HOST', '127.0.0.1') +PROTOCOL = 'pbc' + PB_HOST = os.environ.get('RIAK_TEST_PB_HOST', HOST) PB_PORT = int(os.environ.get('RIAK_TEST_PB_PORT', '8087')) @@ -30,7 +32,6 @@ DUMMY_HTTP_PORT = int(os.environ.get('DUMMY_HTTP_PORT', '1023')) DUMMY_PB_PORT = int(os.environ.get('DUMMY_PB_PORT', '1022')) - SKIP_SEARCH = int(os.environ.get('SKIP_SEARCH', '1')) RUN_YZ = int(os.environ.get('RUN_YZ', '0')) @@ -38,7 +39,7 @@ SKIP_TIMESERIES = int(os.environ.get('SKIP_TIMESERIES', '1')) -SKIP_POOL = os.environ.get('SKIP_POOL') +SKIP_POOL = int(os.environ.get('SKIP_POOL', '1')) SKIP_RESOLVE = int(os.environ.get('SKIP_RESOLVE', '0')) SKIP_BTYPES = int(os.environ.get('SKIP_BTYPES', '0')) diff --git a/riak/tests/base.py b/riak/tests/base.py new file mode 100644 index 00000000..dbd662e0 --- /dev/null +++ b/riak/tests/base.py @@ -0,0 +1,87 @@ +# -*- coding: utf-8 -*- +import random + +from riak.client import RiakClient +from riak.tests import HOST, PROTOCOL,PB_PORT, HTTP_PORT, SECURITY_CREDS + +testrun_search_bucket = 'searchbucket' +testrun_props_bucket = 'propsbucket' +testrun_sibs_bucket = 'sibsbucket' + +def setUpModule(): + + c = RiakClient(host=PB_HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + + c.bucket(testrun_sibs_bucket).allow_mult = True + + if (not SKIP_SEARCH and not RUN_YZ): + b = c.bucket(testrun_search_bucket) + b.enable_search() + + +def tearDownModule(): + c = RiakClient(host=HTTP_HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + + c.bucket(testrun_sibs_bucket).clear_properties() + c.bucket(testrun_props_bucket).clear_properties() + + if not SKIP_SEARCH and not RUN_YZ: + b = c.bucket(testrun_search_bucket) + b.clear_properties() + +class BaseTestCase(object): + + host = None + pb_port = None + http_port = None + credentials = None + + @staticmethod + def randint(): + return random.randint(1, 999999) + + @staticmethod + def randname(length=12): + out = '' + for i in range(length): + out += chr(random.randint(ord('a'), ord('z'))) + return out + + def create_client(self, host=None, http_port=None, pb_port=None, + protocol=None, credentials=None, + **client_args): + host = host or self.host or HOST + http_port = http_port or self.http_port or HTTP_PORT + pb_port = pb_port or self.pb_port or PB_PORT + + if protocol is None: + if hasattr(self, 'protocol') and (self.protocol is not None): + protocol = self.protocol + else: + protocol = PROTOCOL + + self.protocol = protocol + + credentials = credentials or SECURITY_CREDS + + return RiakClient(protocol=protocol, + host=host, + http_port=http_port, + credentials=credentials, + pb_port=pb_port, **client_args) + + def setUp(self): + self.table_name = 'GeoCheckin' + self.bucket_name = self.randname() + self.key_name = self.randname() + self.search_bucket = testrun_search_bucket + self.sibs_bucket = testrun_sibs_bucket + self.props_bucket = testrun_props_bucket + # self.yz = testrun_yz + # self.yz_index = testrun_yz_index + # self.yz_mr = testrun_yz_mr + self.credentials = SECURITY_CREDS + self.client = self.create_client() + diff --git a/riak/tests/test_six.py b/riak/tests/comparison.py similarity index 88% rename from riak/tests/test_six.py rename to riak/tests/comparison.py index c68dd150..30cde091 100644 --- a/riak/tests/test_six.py +++ b/riak/tests/comparison.py @@ -1,20 +1,4 @@ -""" -Copyright 2014 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" +# -*- coding: utf-8 -*- from six import PY2, PY3 import collections import warnings diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index 86d14999..68b2810a 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -1,32 +1,16 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import platform from riak import RiakError -from . import SKIP_INDEXES +from riak.tests import SKIP_INDEXES +from riak.tests.base import BaseTestCase + if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -class TwoITests(object): +class TwoITests(BaseTestCase, unittest.TestCase): def is_2i_supported(self): # Immediate test to see if 2i is even supported w/ the backend try: diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index 51e256f8..385fef7b 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -1,22 +1,4 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" -import random import platform from six import PY2 from threading import Thread @@ -40,7 +22,7 @@ from riak.tests import HOST, PB_HOST, PB_PORT, HTTP_HOST, HTTP_PORT, \ HAVE_PROTO, DUMMY_HTTP_PORT, DUMMY_PB_PORT, \ - SKIP_SEARCH, RUN_YZ, SECURITY_CREDS, SKIP_POOL, test_six + SKIP_SEARCH, RUN_YZ, SECURITY_CREDS, SKIP_POOL if PY2: from Queue import Queue @@ -52,131 +34,6 @@ else: import unittest -testrun_search_bucket = None -testrun_props_bucket = None -testrun_sibs_bucket = None -testrun_yz = {'btype': None, 'bucket': None, 'index': None} -testrun_yz_index = {'btype': None, 'bucket': None, 'index': None} -testrun_yz_mr = {'btype': None, 'bucket': None, 'index': None} - -def setUpModule(): - global testrun_search_bucket, testrun_props_bucket, \ - testrun_sibs_bucket, testrun_yz, testrun_yz_index, testrun_yz_mr - - c = RiakClient(host=PB_HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) - - testrun_props_bucket = 'propsbucket' - testrun_sibs_bucket = 'sibsbucket' - c.bucket(testrun_sibs_bucket).allow_mult = True - - if (not SKIP_SEARCH and not RUN_YZ): - testrun_search_bucket = 'searchbucket' - b = c.bucket(testrun_search_bucket) - b.enable_search() - - if RUN_YZ: - # YZ index on bucket of the same name - testrun_yz = {'btype': None, 'bucket': 'yzbucket', - 'index': 'yzbucket'} - # YZ index on bucket of a different name - testrun_yz_index = {'btype': None, 'bucket': 'yzindexbucket', - 'index': 'yzindex'} - # Add bucket and type for Search 2.0 -> MapReduce - testrun_yz_mr = {'btype': 'pytest-mr', 'bucket': 'mrbucket', - 'index': 'mrbucket'} - - for yz in (testrun_yz, testrun_yz_index, testrun_yz_mr): - c.create_search_index(yz['index'], timeout=30000) - if yz['btype'] is not None: - t = c.bucket_type(yz['btype']) - b = t.bucket(yz['bucket']) - else: - b = c.bucket(yz['bucket']) - # Keep trying to set search bucket property until it succeeds - index_set = False - while not index_set: - try: - b.set_property('search_index', yz['index']) - index_set = True - except RiakError: - pass - - -def tearDownModule(): - global testrun_search_bucket, testrun_props_bucket, \ - testrun_sibs_bucket, testrun_yz_bucket - - c = RiakClient(host=HTTP_HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) - - c.bucket(testrun_sibs_bucket).clear_properties() - c.bucket(testrun_props_bucket).clear_properties() - - if not SKIP_SEARCH and not RUN_YZ: - b = c.bucket(testrun_search_bucket) - b.clear_properties() - - if RUN_YZ: - for yz in (testrun_yz, testrun_yz_index, testrun_yz_mr): - if yz['btype'] is not None: - t = c.bucket_type(yz['btype']) - b = t.bucket(yz['bucket']) - else: - b = c.bucket(yz['bucket']) - b.set_property('search_index', '_dont_index_') - c.delete_search_index(yz['index']) - for keys in b.stream_keys(): - for key in keys: - b.delete(key) - - -class BaseTestCase(object): - - host = None - pb_port = None - http_port = None - credentials = None - - @staticmethod - def randint(): - return random.randint(1, 999999) - - @staticmethod - def randname(length=12): - out = '' - for i in range(length): - out += chr(random.randint(ord('a'), ord('z'))) - return out - - def create_client(self, host=None, http_port=None, pb_port=None, - protocol=None, credentials=None, - **client_args): - host = host or self.host or HOST - http_port = http_port or self.http_port or HTTP_PORT - pb_port = pb_port or self.pb_port or PB_PORT - protocol = protocol or self.protocol - credentials = credentials or SECURITY_CREDS - return RiakClient(protocol=protocol, - host=host, - http_port=http_port, - credentials=credentials, - pb_port=pb_port, **client_args) - - def setUp(self): - self.bucket_name = self.randname() - self.key_name = self.randname() - self.search_bucket = testrun_search_bucket - self.sibs_bucket = testrun_sibs_bucket - self.props_bucket = testrun_props_bucket - self.yz = testrun_yz - self.yz_index = testrun_yz_index - self.yz_mr = testrun_yz_mr - self.credentials = SECURITY_CREDS - - self.client = self.create_client() - - class ClientTests(object): def test_request_retries(self): # We guess at some ports that will be unused by Riak or @@ -375,10 +232,7 @@ class RiakPbcTransportTestCase(BasicKVTests, BucketTypeTests, SecurityTests, DatatypeIntegrationTests, - TimeseriesTests, - BaseTestCase, - unittest.TestCase, - test_six.Comparison): + unittest.TestCase): def setUp(self): if not HAVE_PROTO: @@ -413,9 +267,7 @@ class RiakHttpTransportTestCase(BasicKVTests, BucketTypeTests, SecurityTests, DatatypeIntegrationTests, - BaseTestCase, - unittest.TestCase, - test_six.Comparison): + unittest.TestCase): def setUp(self): self.host = HTTP_HOST diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index b3ea5db2..e1992e04 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -1,25 +1,8 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import platform -from . import SKIP_BTYPES -from riak.bucket import RiakBucket, BucketType from riak import RiakError, RiakObject +from riak.bucket import RiakBucket, BucketType +from riak.tests import SKIP_BTYPES +from riak.tests.base import BaseTestCase if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -27,7 +10,7 @@ import unittest -class BucketTypeTests(object): +class BucketTypeTests(BaseTestCase, unittest.TestCase): def test_btype_init(self): btype = self.client.bucket_type('foo') self.assertIsInstance(btype, BucketType) diff --git a/riak/tests/test_comparison.py b/riak/tests/test_comparison.py index 38a1ef9f..c21bfee5 100644 --- a/riak/tests/test_comparison.py +++ b/riak/tests/test_comparison.py @@ -1,25 +1,8 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - +# -*- coding: utf-8 -*- import platform from riak.riak_object import RiakObject from riak.bucket import RiakBucket, BucketType -from riak.tests.test_all import BaseTestCase +from riak.tests.base import BaseTestCase if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -153,7 +136,7 @@ def test_object_valid_key(self): self.assertIsNone(b, 'empty object key not allowed') -class RiakClientComparisonTest(unittest.TestCase, BaseTestCase): +class RiakClientComparisonTest(BaseTestCase, unittest.TestCase): def test_client_eq(self): self.protocol = 'http' a = self.create_client(host='host1', http_port=11) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 9c6b3a0b..5e5f0614 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -1,27 +1,10 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import platform from riak import RiakBucket, BucketType, RiakObject import riak.datatypes as datatypes -from . import SKIP_DATATYPES -from riak.tests import test_six +from riak.tests import SKIP_DATATYPES +from riak.tests.base import BaseTestCase +from riak.tests.comparison import Comparison if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -29,7 +12,7 @@ import unittest -class DatatypeUnitTests(object): +class DatatypeUnitTestBase(object): dtype = None bucket = RiakBucket(None, 'test', BucketType(None, 'datatypes')) @@ -67,8 +50,7 @@ def test_op_output(self): self.check_op_output(op) -class FlagUnitTests(DatatypeUnitTests, - unittest.TestCase): +class FlagUnitTests(DatatypeUnitTestBase, unittest.TestCase): dtype = datatypes.Flag def op(self, dtype): @@ -87,8 +69,7 @@ def test_disables_require_context(self): self.assertTrue(dtype.modified) -class RegisterUnitTests(DatatypeUnitTests, - unittest.TestCase): +class RegisterUnitTests(DatatypeUnitTestBase, unittest.TestCase): dtype = datatypes.Register def op(self, dtype): @@ -98,8 +79,7 @@ def check_op_output(self, op): self.assertEqual(('assign', 'foobarbaz'), op) -class CounterUnitTests(DatatypeUnitTests, - unittest.TestCase): +class CounterUnitTests(DatatypeUnitTestBase, unittest.TestCase): dtype = datatypes.Counter def op(self, dtype): @@ -109,9 +89,7 @@ def check_op_output(self, op): self.assertEqual(('increment', 5), op) -class SetUnitTests(DatatypeUnitTests, - unittest.TestCase, - test_six.Comparison): +class SetUnitTests(DatatypeUnitTestBase, unittest.TestCase, Comparison): dtype = datatypes.Set def op(self, dtype): @@ -136,8 +114,7 @@ def test_removes_require_context(self): self.assertTrue(dtype.modified) -class MapUnitTests(DatatypeUnitTests, - unittest.TestCase): +class MapUnitTests(DatatypeUnitTestBase, unittest.TestCase): dtype = datatypes.Map def op(self, dtype): @@ -170,7 +147,7 @@ def test_removes_require_context(self): self.assertTrue(dtype.modified) -class DatatypeIntegrationTests(object): +class DatatypeIntegrationTests(BaseTestCase, unittest.TestCase): @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_counter(self): btype = self.client.bucket_type('pytest-counters') diff --git a/riak/tests/test_feature_detection.py b/riak/tests/test_feature_detection.py index 682c5ac2..d88334aa 100644 --- a/riak/tests/test_feature_detection.py +++ b/riak/tests/test_feature_detection.py @@ -1,21 +1,4 @@ -""" -Copyright 2012-2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - +# -*- coding: utf-8 -*- import platform from riak.transports.feature_detect import FeatureDetection diff --git a/riak/tests/test_filters.py b/riak/tests/test_filters.py index 00e9d0af..c821ce95 100644 --- a/riak/tests/test_filters.py +++ b/riak/tests/test_filters.py @@ -1,21 +1,4 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - +# -*- coding: utf-8 -*- import platform from riak.mapreduce import RiakKeyFilter from riak import key_filter diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index d1b28298..b1752e8f 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -1,22 +1,4 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import os import platform from six import string_types, PY2, PY3 @@ -25,7 +7,9 @@ from time import sleep from riak import ConflictError, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver -from . import SKIP_RESOLVE +from riak.tests import SKIP_RESOLVE +from riak.tests.base import BaseTestCase +from riak.tests.comparison import Comparison try: import simplejson as json @@ -71,7 +55,7 @@ def __eq__(self, other): return True -class BasicKVTests(object): +class BasicKVTests(BaseTestCase, unittest.TestCase, Comparison): def test_is_alive(self): self.assertTrue(self.client.is_alive()) @@ -610,7 +594,7 @@ def generate_siblings(self, original, count=5, delay=None): return vals -class BucketPropsTest(object): +class BucketPropsTest(BaseTestCase, unittest.TestCase): def test_rw_settings(self): bucket = self.client.bucket(self.props_bucket) self.assertEqual(bucket.r, "quorum") @@ -663,7 +647,7 @@ def test_clear_bucket_properties(self): self.assertEqual(bucket.n_val, 3) -class KVFileTests(object): +class KVFileTests(BaseTestCase, unittest.TestCase): def test_store_binary_object_from_file(self): bucket = self.client.bucket(self.bucket_name) filepath = os.path.join(os.path.dirname(__file__), 'test_all.py') @@ -691,7 +675,7 @@ def test_store_binary_object_from_file_should_fail_if_file_not_found(self): self.assertFalse(obj.exists) -class CounterTests(object): +class CounterTests(BaseTestCase, unittest.TestCase): def test_counter_requires_allow_mult(self): bucket = self.client.bucket(self.bucket_name) if bucket.allow_mult: diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index c15ff7b1..99ed8a92 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -1,38 +1,38 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import print_function -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" +import platform -from __future__ import print_function from six import PY2 from riak.mapreduce import RiakMapReduce -from riak import key_filter, RiakError +from riak import key_filter, RiakClient, RiakError +from riak.tests import RUN_YZ, PB_HOST, PB_PORT, HTTP_HOST, HTTP_PORT, SECURITY_CREDS +from riak.tests.base import BaseTestCase from riak.tests.test_yokozuna import wait_for_yz_index from riak.tests import RUN_SECURITY -import platform +from riak.tests.yz_setup import yzSetUpModule, yzTearDownModule -from . import RUN_YZ if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest +# Add bucket and type for Search 2.0 -> MapReduce +testrun_yz_mr = {'btype': 'pytest-mr', 'bucket': 'mrbucket', 'index': 'mrbucket'} + +def setUpModule(): + if RUN_YZ: + c = RiakClient(host=PB_HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + yzSetUpModule(c, testrun_yz_mr) + +def tearDownModule(): + if RUN_YZ: + c = RiakClient(host=HTTP_HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + yzTearDownModule(c, testrun_yz_mr) -class LinkTests(object): +class LinkTests(BaseTestCase, unittest.TestCase): def test_store_and_get_links(self): # Create the object... bucket = self.client.bucket(self.bucket_name) @@ -98,7 +98,7 @@ def test_link_walking(self): self.assertEqual(len(results), 1) -class ErlangMapReduceTests(object): +class ErlangMapReduceTests(BaseTestCase, unittest.TestCase): def test_erlang_map_reduce(self): # Create the object... bucket = self.client.bucket(self.bucket_name) @@ -204,7 +204,7 @@ def test_client_exceptional_paths(self): mr.add_key_filter("tokenize", "-", 1) -class JSMapReduceTests(object): +class JSMapReduceTests(BaseTestCase, unittest.TestCase): def test_javascript_source_map(self): # Create the object... bucket = self.client.bucket(self.bucket_name) @@ -525,8 +525,8 @@ def test_mr_search(self): """ Try a successful map/reduce from search results. """ - btype = self.client.bucket_type(self.yz_mr['btype']) - bucket = btype.bucket(self.yz_mr['bucket']) + btype = self.client.bucket_type(testrun_yz_mr['btype']) + bucket = btype.bucket(testrun_yz_mr['bucket']) bucket.new("Pebbles", {"name_s": "Fruity Pebbles", "maker_s": "Post", "sugar_i": 9, @@ -554,7 +554,7 @@ def test_mr_search(self): "fruit_b": False}).store() # Wait for Solr to catch up wait_for_yz_index(bucket, "Crunch") - mr = RiakMapReduce(self.client).search(self.yz_mr['bucket'], + mr = RiakMapReduce(self.client).search(testrun_yz_mr['bucket'], 'fruit_b:false') mr.map("""function(v) { var solr_doc = JSON.parse(v.values[0].data); @@ -564,7 +564,7 @@ def test_mr_search(self): self.assertEqual(result, [100]) -class MapReduceAliasTests(object): +class MapReduceAliasTests(BaseTestCase, unittest.TestCase): """This tests the map reduce aliases""" def test_map_values(self): @@ -759,7 +759,7 @@ def test_filter_not_found(self): self.assertEqual(sorted(result), [1, 2]) -class MapReduceStreamTests(object): +class MapReduceStreamTests(BaseTestCase, unittest.TestCase): def test_stream_results(self): bucket = self.client.bucket(self.bucket_name) bucket.new('one', data=1).store() diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index 6355eee0..1f0e2f19 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -1,21 +1,4 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - +# -*- coding: utf-8 -*- from six import PY2 import platform from threading import Thread, currentThread @@ -23,7 +6,7 @@ from random import SystemRandom from time import sleep from . import SKIP_POOL -from riak.tests import test_six +from riak.tests.comparison import Comparison if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -54,10 +37,8 @@ def create_resource(self): return [] -@unittest.skipIf(SKIP_POOL, - 'Skipping connection pool tests') -class PoolTest(unittest.TestCase, - test_six.Comparison): +@unittest.skipIf(SKIP_POOL, 'Skipping connection pool tests') +class PoolTest(unittest.TestCase, Comparison): def test_yields_new_object_when_empty(self): """ diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index eed22e2c..11f9f4c9 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -1,32 +1,16 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from __future__ import print_function import platform -from . import SKIP_SEARCH +from riak.tests import SKIP_SEARCH +from riak.tests.base import BaseTestCase + if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -class EnableSearchTests(object): +class EnableSearchTests(BaseTestCase, unittest.TestCase): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_bucket_search_enabled(self): bucket = self.client.bucket(self.bucket_name) @@ -57,7 +41,7 @@ def test_disable_search_commit_hook(self): bucket.enable_search() -class SolrSearchTests(object): +class SolrSearchTests(BaseTestCase, unittest.TestCase): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_add_document_to_index(self): self.client.fulltext_add(self.search_bucket, @@ -116,7 +100,7 @@ def test_delete_documents_from_search_by_query_and_id(self): self.assertEqual(0, len(results['docs'])) -class SearchTests(object): +class SearchTests(BaseTestCase, unittest.TestCase): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search_from_bucket(self): bucket = self.client.bucket(self.search_bucket) diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index f0489039..065e16ee 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -1,35 +1,19 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import sys from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT, \ SECURITY_CREDS, SECURITY_CIPHERS from riak.security import SecurityCreds +from riak.tests.base import BaseTestCase + if sys.version_info < (2, 7): unittest = __import__('unittest2') else: import unittest -class SecurityTests(object): +class SecurityTests(BaseTestCase, unittest.TestCase): @unittest.skipIf(RUN_SECURITY, 'RUN_SECURITY is set') def test_security_disabled(self): client = self.create_client(credentials=SECURITY_CREDS) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index d082d3df..2bfa4585 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,22 +1,36 @@ # -*- coding: utf-8 -*- import platform +import time +import sys -from . import SKIP_TIMESERIES +from riak.tests import SKIP_TIMESERIES +from riak.tests.base import BaseTestCase if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest - class TimeseriesTests(BaseTestCase, unittest.TestCase): + def setUp(self): + super(TimeseriesTests, self).setUp() @unittest.skipIf(SKIP_TIMESERIES == '1', "skip requested for timeseries tests") def test_store(self): + now = int(round(time.time() * 1000)) # NB: millis since Jan 1 1970 UTC + fiveMinsInMsec = 5 * 60 * 1000 + fiveMinsAgo = now - fiveMinsInMsec + tenMinsAgo = fiveMinsAgo - fiveMinsInMsec + fifteenMinsAgo = tenMinsAgo - fiveMinsInMsec + twentyMinsAgo = fifteenMinsAgo - fiveMinsInMsec + table = self.client.table(self.table_name) measurements = [ - [ ] + [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', '84.3' ], + [ 'hash1', 'user2', fifteenMinsAgo, 'rain', '79.0' ], + [ 'hash1', 'user2', fiveMinsAgo, 'wind', 50.5 ], + [ 'hash1', 'user2', now, 'snow', 20.1 ] ] - obj = table.new(measurements) - result = obj.store() + ts_obj = table.new(measurements) + result = ts_obj.store() self.assertTrue(result) diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index 4310784a..521be2e4 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -1,29 +1,32 @@ # -*- coding: utf-8 -*- -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import platform -from . import RUN_YZ +from riak import RiakClient +from riak.tests import RUN_YZ, PB_HOST, PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak.tests.base import BaseTestCase +from riak.tests.yz_setup import yzSetUpModule, yzTearDownModule + if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest +# YZ index on bucket of the same name +testrun_yz = {'btype': None, 'bucket': 'yzbucket', 'index': 'yzbucket'} +# YZ index on bucket of a different name +testrun_yz_index = {'btype': None, 'bucket': 'yzindexbucket', 'index': 'yzindex'} + +def setUpModule(): + if RUN_YZ: + c = RiakClient(host=PB_HOST, protocol='pbc', + pb_port=PB_PORT, credentials=SECURITY_CREDS) + yzSetUpModule(c, testrun_yz, testrun_yz_index) + +def tearDownModule(): + if RUN_YZ: + c = RiakClient(host=PB_HOST, protocol='pbc', + pb_port=PB_PORT, credentials=SECURITY_CREDS) + yzTearDownModule(c, testrun_yz, testrun_yz_index) + def wait_for_yz_index(bucket, key, index=None): """ @@ -38,10 +41,13 @@ def wait_for_yz_index(bucket, key, index=None): pass -class YZSearchTests(object): +class YZSearchTests(BaseTestCase, unittest.TestCase): + def setUp(self): + super(YZSearchTests, self).setUp() + @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_from_bucket(self): - bucket = self.client.bucket(self.yz['bucket']) + bucket = self.client.bucket(testrun_yz['bucket']) bucket.new("user", {"user_s": "Z"}).store() wait_for_yz_index(bucket, "user") results = bucket.search("user_s:Z") @@ -51,60 +57,60 @@ def test_yz_search_from_bucket(self): self.assertIn('_yz_rk', result) self.assertEqual(u'user', result['_yz_rk']) self.assertIn('_yz_rb', result) - self.assertEqual(self.yz['bucket'], result['_yz_rb']) + self.assertEqual(testrun_yz['bucket'], result['_yz_rb']) self.assertIn('score', result) self.assertIn('user_s', result) self.assertEqual(u'Z', result['user_s']) @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_index_using_bucket(self): - bucket = self.client.bucket(self.yz_index['bucket']) + bucket = self.client.bucket(testrun_yz_index['bucket']) bucket.new("feliz", {"name_s": "Felix", "species_s": "Felis catus"}).store() - wait_for_yz_index(bucket, "feliz", index=self.yz_index['index']) - results = bucket.search('name_s:Felix', index=self.yz_index['index']) + wait_for_yz_index(bucket, "feliz", index=testrun_yz_index['index']) + results = bucket.search('name_s:Felix', index=testrun_yz_index['index']) self.assertEqual(1, len(results['docs'])) @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_index_using_wrong_bucket(self): - bucket = self.client.bucket(self.yz_index['bucket']) + bucket = self.client.bucket(testrun_yz_index['bucket']) bucket.new("feliz", {"name_s": "Felix", "species_s": "Felis catus"}).store() - wait_for_yz_index(bucket, "feliz", index=self.yz_index['index']) + wait_for_yz_index(bucket, "feliz", index=testrun_yz_index['index']) with self.assertRaises(Exception): bucket.search('name_s:Felix') @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_get_search_index(self): - index = self.client.get_search_index(self.yz['bucket']) - self.assertEqual(self.yz['bucket'], index['name']) + index = self.client.get_search_index(testrun_yz['bucket']) + self.assertEqual(testrun_yz['bucket'], index['name']) self.assertEqual('_yz_default', index['schema']) self.assertEqual(3, index['n_val']) with self.assertRaises(Exception): - self.client.get_search_index('NOT' + self.yz['bucket']) + self.client.get_search_index('NOT' + testrun_yz['bucket']) @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_delete_search_index(self): # expected to fail, since there's an attached bucket with self.assertRaises(Exception): - self.client.delete_search_index(self.yz['bucket']) + self.client.delete_search_index(testrun_yz['bucket']) # detatch bucket from index then delete - b = self.client.bucket(self.yz['bucket']) + b = self.client.bucket(testrun_yz['bucket']) b.set_property('search_index', '_dont_index_') - self.assertTrue(self.client.delete_search_index(self.yz['bucket'])) + self.assertTrue(self.client.delete_search_index(testrun_yz['bucket'])) # create it again - self.client.create_search_index(self.yz['bucket'], '_yz_default', 3) - b = self.client.bucket(self.yz['bucket']) - b.set_property('search_index', self.yz['bucket']) + self.client.create_search_index(testrun_yz['bucket'], '_yz_default', 3) + b = self.client.bucket(testrun_yz['bucket']) + b.set_property('search_index', testrun_yz['bucket']) # Wait for index to apply indexes = [] - while self.yz['bucket'] not in indexes: + while testrun_yz['bucket'] not in indexes: indexes = [i['name'] for i in self.client.list_search_indexes()] @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_list_search_indexes(self): indexes = self.client.list_search_indexes() - self.assertIn(self.yz['bucket'], [item['name'] for item in indexes]) + self.assertIn(testrun_yz['bucket'], [item['name'] for item in indexes]) self.assertLessEqual(1, len(indexes)) @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') @@ -153,7 +159,7 @@ def test_yz_create_bad_schema(self): @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_queries(self): - bucket = self.client.bucket(self.yz['bucket']) + bucket = self.client.bucket(testrun_yz['bucket']) bucket.new("Z", {"username_s": "Z", "name_s": "ryan", "age_i": 30}).store() bucket.new("R", {"username_s": "R", "name_s": "eric", @@ -191,7 +197,7 @@ def test_yz_search_queries(self): @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_utf8(self): - bucket = self.client.bucket(self.yz['bucket']) + bucket = self.client.bucket(testrun_yz['bucket']) body = {"text_ja": u"私はハイビスカスを食べるのが 大好き"} bucket.new(self.key_name, body).store() wait_for_yz_index(bucket, self.key_name) @@ -201,7 +207,7 @@ def test_yz_search_utf8(self): @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_multivalued_fields(self): - bucket = self.client.bucket(self.yz['bucket']) + bucket = self.client.bucket(testrun_yz['bucket']) body = {"groups_ss": ['a', 'b', 'c']} bucket.new(self.key_name, body).store() wait_for_yz_index(bucket, self.key_name) diff --git a/riak/tests/yz_setup.py b/riak/tests/yz_setup.py new file mode 100644 index 00000000..d0e83147 --- /dev/null +++ b/riak/tests/yz_setup.py @@ -0,0 +1,31 @@ +from riak import RiakError + +def yzSetUpModule(c, *yzdata): + for yz in yzdata: + c.create_search_index(yz['index'], timeout=30000) + if yz['btype'] is not None: + t = c.bucket_type(yz['btype']) + b = t.bucket(yz['bucket']) + else: + b = c.bucket(yz['bucket']) + # Keep trying to set search bucket property until it succeeds + index_set = False + while not index_set: + try: + b.set_property('search_index', yz['index']) + index_set = True + except RiakError: + pass + +def yzTearDownModule(c, *yzdata): + for yz in yzdata: + if yz['btype'] is not None: + t = c.bucket_type(yz['btype']) + b = t.bucket(yz['bucket']) + else: + b = c.bucket(yz['bucket']) + b.set_property('search_index', '_dont_index_') + c.delete_search_index(yz['index']) + for keys in b.stream_keys(): + for key in keys: + b.delete(key) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 19eabf75..e8b47420 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -636,10 +636,10 @@ def _encode_timeseries(self, tsobj, ts_put_req): """ ts_put_req.table = str_to_bytes(tsobj.table) # TODO RTS-367 columns / rows - if tsobj.columns: - if tsobj.rows: - else: - raise RiakError("RiakTsObject requires rows") + # if tsobj.columns: + # if tsobj.rows: + # else: + # raise RiakError("RiakTsObject requires rows") def _decode_timeseries(self, ts_put_resp, tsobj): """ diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 0bc58232..7f0e8b5d 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -175,7 +175,10 @@ def _recv_msg(self, expect=None): msg_code, = struct.unpack("B", self._inbuf[:1]) if msg_code is MSG_CODE_ERROR_RESP: err = self._parse_msg(msg_code, self._inbuf[1:]) - raise RiakError(bytes_to_str(err.errmsg)) + if err is None: + raise RiakError('no error provided!') + else: + raise RiakError(bytes_to_str(err.errmsg)) elif msg_code in MESSAGE_CLASSES: msg = self._parse_msg(msg_code, self._inbuf[1:]) else: diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 15b69f6f..5e3e91c8 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -80,8 +80,8 @@ MSG_CODE_DT_FETCH_RESP, MSG_CODE_DT_UPDATE_REQ, MSG_CODE_DT_UPDATE_RESP, - MSG_CODE_TS_PUT_REQ, - MSG_CODE_TS_PUT_RESP + # MSG_CODE_TS_PUT_REQ, + # MSG_CODE_TS_PUT_RESP ) diff --git a/setup.py b/setup.py index 4f8acfe0..dfcce1f3 100755 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ #!/usr/bin/env python +import os import sys from setuptools import setup, find_packages from version import get_version @@ -10,12 +11,23 @@ if sys.version_info < (2, 7, 9): install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") -if sys.version_info < (3, ): - install_requires.append("riak_pb >=2.0.0") - requires.append("riak_pb(>=2.0.0)") + +riak_pb_in_pythonpath = False +PYTHONPATH = os.environ.get('PYTHONPATH') +if PYTHONPATH is not None and PYTHONPATH.find('riak_pb/python/lib') != -1: + riak_pb_in_pythonpath = True + +if riak_pb_in_pythonpath: + install_requires.append("protobuf ==2.6.1") + requires.append("protobuf(==2.6.1)") else: - install_requires.append("python3_riak_pb >=2.0.0") - requires.append("python3_riak_pb(>=2.0.0)") + if sys.version_info < (3, ): + install_requires.append("riak_pb >=2.0.0") + requires.append("riak_pb(>=2.0.0)") + else: + install_requires.append("python3_riak_pb >=2.0.0") + requires.append("python3_riak_pb(>=2.0.0)") + tests_require = [] if sys.version_info < (2, 7): tests_require.append("unittest2") From a8227e1d837a938b952a7f4297d948279be1f9dd Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sun, 25 Oct 2015 11:36:37 -0700 Subject: [PATCH 027/324] Refactored unit tests to allow running explicit test suites. Moved setUp / tearDown into tests that use them. --- .gitignore | 2 + buildbot/Makefile | 6 +- riak/tests/__init__.py | 2 +- riak/tests/base.py | 56 ++++------ riak/tests/test_2i.py | 28 ++--- riak/tests/test_btypes.py | 5 +- riak/tests/{test_all.py => test_client.py} | 114 ++------------------- riak/tests/test_comparison.py | 4 +- riak/tests/test_datatypes.py | 4 +- riak/tests/test_kv.py | 73 +++++++++---- riak/tests/test_mapreduce.py | 32 +++--- riak/tests/test_pool.py | 2 +- riak/tests/test_search.py | 82 +++++++++------ riak/tests/test_security.py | 15 ++- riak/tests/test_timeseries.py | 41 ++++---- riak/tests/test_yokozuna.py | 39 +++---- riak/tests/yz_setup.py | 69 +++++++------ riak/transports/http/__init__.py | 17 ++- riak/transports/http/connection.py | 19 ++-- riak/transports/http/transport.py | 3 +- setup.py | 11 +- 21 files changed, 299 insertions(+), 325 deletions(-) rename riak/tests/{test_all.py => test_client.py} (63%) diff --git a/.gitignore b/.gitignore index f9515221..24c0bded 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ *.pyc .python-version +.tox/ + docs/_build .*.swp diff --git a/buildbot/Makefile b/buildbot/Makefile index 1323a276..1209ac84 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -26,12 +26,14 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @../setup.py disable_security --riak-admin=${RIAK_ADMIN} - @RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=${RIAK_ADMIN} - @RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. # These are required to actually build all the Python versions: # * pip install tox diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 85599d4b..a7bb4742 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -19,7 +19,7 @@ HOST = os.environ.get('RIAK_TEST_HOST', '127.0.0.1') -PROTOCOL = 'pbc' +PROTOCOL = os.environ.get('RIAK_TEST_PROTOCOL', 'pbc') PB_HOST = os.environ.get('RIAK_TEST_PB_HOST', HOST) PB_PORT = int(os.environ.get('RIAK_TEST_PB_PORT', '8087')) diff --git a/riak/tests/base.py b/riak/tests/base.py index dbd662e0..7ac2dde4 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -1,37 +1,13 @@ # -*- coding: utf-8 -*- +import logging +import os import random +import sys from riak.client import RiakClient -from riak.tests import HOST, PROTOCOL,PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak.tests import HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS -testrun_search_bucket = 'searchbucket' -testrun_props_bucket = 'propsbucket' -testrun_sibs_bucket = 'sibsbucket' - -def setUpModule(): - - c = RiakClient(host=PB_HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) - - c.bucket(testrun_sibs_bucket).allow_mult = True - - if (not SKIP_SEARCH and not RUN_YZ): - b = c.bucket(testrun_search_bucket) - b.enable_search() - - -def tearDownModule(): - c = RiakClient(host=HTTP_HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) - - c.bucket(testrun_sibs_bucket).clear_properties() - c.bucket(testrun_props_bucket).clear_properties() - - if not SKIP_SEARCH and not RUN_YZ: - b = c.bucket(testrun_search_bucket) - b.clear_properties() - -class BaseTestCase(object): +class IntegrationTestBase(object): host = None pb_port = None @@ -66,6 +42,9 @@ def create_client(self, host=None, http_port=None, pb_port=None, credentials = credentials or SECURITY_CREDS + if self.logging_enabled: + self.logger.debug("RiakClient(protocol='%s', host='%s', pb_port='%d', http_port='%d', credentials='%s', client_args='%s')", protocol, host, pb_port, http_port, credentials, client_args) + return RiakClient(protocol=protocol, host=host, http_port=http_port, @@ -73,15 +52,22 @@ def create_client(self, host=None, http_port=None, pb_port=None, pb_port=pb_port, **client_args) def setUp(self): + self.logging_enabled = False + distutils_debug = os.environ.get('DISTUTILS_DEBUG', '0') + if distutils_debug == '1': + self.logging_enabled = True + self.logger = logging.getLogger() + self.logger.level = logging.DEBUG + self.logging_stream_handler = logging.StreamHandler(sys.stdout) + self.logger.addHandler(self.logging_stream_handler) + self.table_name = 'GeoCheckin' self.bucket_name = self.randname() self.key_name = self.randname() - self.search_bucket = testrun_search_bucket - self.sibs_bucket = testrun_sibs_bucket - self.props_bucket = testrun_props_bucket - # self.yz = testrun_yz - # self.yz_index = testrun_yz_index - # self.yz_mr = testrun_yz_mr self.credentials = SECURITY_CREDS self.client = self.create_client() + def tearDown(self): + if self.logging_enabled: + self.logger.removeHandler(self.logging_stream_handler) + diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index 68b2810a..419bce60 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -2,7 +2,7 @@ import platform from riak import RiakError from riak.tests import SKIP_INDEXES -from riak.tests.base import BaseTestCase +from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -10,7 +10,7 @@ import unittest -class TwoITests(BaseTestCase, unittest.TestCase): +class TwoITests(IntegrationTestBase, unittest.TestCase): def is_2i_supported(self): # Immediate test to see if 2i is even supported w/ the backend try: @@ -219,7 +219,7 @@ def test_secondary_index_invalid_name(self): with self.assertRaises(RiakError): bucket.new('k', 'a').add_index('field1', 'value1') - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_set_index(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -237,7 +237,7 @@ def test_set_index(self): obj.set_index('bar2_int', 10) self.assertEqual(set((('bar_int', 3), ('bar2_int', 10))), obj.indexes) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_stream_index(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -250,7 +250,7 @@ def test_stream_index(self): self.assertEqual(sorted([o1.key, o2.key, o3.key]), sorted(keys)) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -274,7 +274,7 @@ def test_index_return_terms(self): self.assertEqual([(1002, o2.key), (1003, o3.key), (1004, o4.key)], sorted(spairs)) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_pagination(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -309,7 +309,7 @@ def test_index_pagination(self): self.assertEqual(3, pagecount) self.assertEqual([o1.key, o2.key, o3.key, o4.key], presults) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_pagination_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -334,7 +334,7 @@ def test_index_pagination_return_terms(self): self.assertLessEqual(2, len(results)) self.assertEqual([('val3', o3.key), ('val4', o4.key)], page2) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_pagination_stream(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -377,7 +377,7 @@ def test_index_pagination_stream(self): self.assertEqual(3, pagecount) self.assertEqual([o1.key, o2.key, o3.key, o4.key], presults) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_pagination_stream_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -409,7 +409,7 @@ def test_index_pagination_stream_return_terms(self): self.assertLessEqual(2, len(results)) self.assertEqual([('val3', o3.key), ('val4', o4.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_eq_query_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -419,7 +419,7 @@ def test_index_eq_query_return_terms(self): results = bucket.get_index('field2_int', 1001, return_terms=True) self.assertEqual([(1001, o1.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_eq_query_stream_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -432,7 +432,7 @@ def test_index_eq_query_stream_return_terms(self): self.assertEqual([(1001, o1.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_timeout(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -451,7 +451,7 @@ def test_index_timeout(self): self.assertEqual([o1.key], bucket.get_index('field1_bin', 'val1', timeout='infinity')) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_regex(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -466,7 +466,7 @@ def test_index_regex(self): self.assertEqual([('val2', o2.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEX is defined') + @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') def test_index_falsey_endkey_gh378(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index e1992e04..c1b57f6c 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -2,7 +2,8 @@ from riak import RiakError, RiakObject from riak.bucket import RiakBucket, BucketType from riak.tests import SKIP_BTYPES -from riak.tests.base import BaseTestCase +from riak.tests.base import IntegrationTestBase +from riak.tests.comparison import Comparison if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -10,7 +11,7 @@ import unittest -class BucketTypeTests(BaseTestCase, unittest.TestCase): +class BucketTypeTests(IntegrationTestBase, unittest.TestCase, Comparison): def test_btype_init(self): btype = self.client.bucket_type('foo') self.assertIsInstance(btype, BucketType) diff --git a/riak/tests/test_all.py b/riak/tests/test_client.py similarity index 63% rename from riak/tests/test_all.py rename to riak/tests/test_client.py index 385fef7b..a53e7ba6 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_client.py @@ -1,28 +1,9 @@ -# -*- coding: utf-8 -*- import platform from six import PY2 from threading import Thread - -from riak import RiakError -from riak.client import RiakClient from riak.riak_object import RiakObject - -from riak.tests.test_yokozuna import YZSearchTests -from riak.tests.test_search import SearchTests, \ - EnableSearchTests, SolrSearchTests -from riak.tests.test_mapreduce import MapReduceAliasTests, \ - ErlangMapReduceTests, JSMapReduceTests, LinkTests, MapReduceStreamTests -from riak.tests.test_kv import BasicKVTests, KVFileTests, \ - BucketPropsTest, CounterTests -from riak.tests.test_2i import TwoITests -from riak.tests.test_btypes import BucketTypeTests -from riak.tests.test_security import SecurityTests -from riak.tests.test_datatypes import DatatypeIntegrationTests -from riak.tests.test_timeseries import TimeseriesTests - -from riak.tests import HOST, PB_HOST, PB_PORT, HTTP_HOST, HTTP_PORT, \ - HAVE_PROTO, DUMMY_HTTP_PORT, DUMMY_PB_PORT, \ - SKIP_SEARCH, RUN_YZ, SECURITY_CREDS, SKIP_POOL +from riak.tests import DUMMY_HTTP_PORT, DUMMY_PB_PORT, SKIP_POOL +from riak.tests.base import IntegrationTestBase if PY2: from Queue import Queue @@ -34,7 +15,15 @@ else: import unittest -class ClientTests(object): +class ClientTests(IntegrationTestBase, unittest.TestCase): + def test_uses_client_id_if_given(self): + if self.protocol == 'pbc': + zero_client_id = "\0\0\0\0" + c = self.create_client(client_id=zero_client_id) + self.assertEqual(zero_client_id, c.client_id) + else: + pass + def test_request_retries(self): # We guess at some ports that will be unused by Riak or # anything else. @@ -213,84 +202,3 @@ def test_pool_close(self): self.client.close() self.assertEqual(len(self.client._http_pool.resources), 0) self.assertEqual(len(self.client._pb_pool.resources), 0) - - -class RiakPbcTransportTestCase(BasicKVTests, - KVFileTests, - BucketPropsTest, - TwoITests, - LinkTests, - ErlangMapReduceTests, - JSMapReduceTests, - MapReduceAliasTests, - MapReduceStreamTests, - EnableSearchTests, - SearchTests, - YZSearchTests, - ClientTests, - CounterTests, - BucketTypeTests, - SecurityTests, - DatatypeIntegrationTests, - unittest.TestCase): - - def setUp(self): - if not HAVE_PROTO: - self.skipTest('protobuf is unavailable') - self.host = PB_HOST - self.pb_port = PB_PORT - self.protocol = 'pbc' - super(RiakPbcTransportTestCase, self).setUp() - - def test_uses_client_id_if_given(self): - zero_client_id = "\0\0\0\0" - c = self.create_client(client_id=zero_client_id) - self.assertEqual(zero_client_id, c.client_id) - - -# NB: no Timeseries support in HTTP -class RiakHttpTransportTestCase(BasicKVTests, - KVFileTests, - BucketPropsTest, - TwoITests, - LinkTests, - ErlangMapReduceTests, - JSMapReduceTests, - MapReduceAliasTests, - MapReduceStreamTests, - EnableSearchTests, - SolrSearchTests, - SearchTests, - YZSearchTests, - ClientTests, - CounterTests, - BucketTypeTests, - SecurityTests, - DatatypeIntegrationTests, - unittest.TestCase): - - def setUp(self): - self.host = HTTP_HOST - self.http_port = HTTP_PORT - self.protocol = 'http' - super(RiakHttpTransportTestCase, self).setUp() - - def test_no_returnbody(self): - bucket = self.client.bucket(self.bucket_name) - o = bucket.new(self.key_name, "bar").store(return_body=False) - self.assertEqual(o.vclock, None) - - def test_too_many_link_headers_shouldnt_break_http(self): - bucket = self.client.bucket(self.bucket_name) - o = bucket.new("lots_of_links", "My god, it's full of links!") - for i in range(0, 300): - link = ("other", "key%d" % i, "next") - o.add_link(link) - - o.store() - stored_object = bucket.get("lots_of_links") - self.assertEqual(len(stored_object.links), 300) - - -if __name__ == '__main__': - unittest.main() diff --git a/riak/tests/test_comparison.py b/riak/tests/test_comparison.py index c21bfee5..6f9e6b9c 100644 --- a/riak/tests/test_comparison.py +++ b/riak/tests/test_comparison.py @@ -2,7 +2,7 @@ import platform from riak.riak_object import RiakObject from riak.bucket import RiakBucket, BucketType -from riak.tests.base import BaseTestCase +from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -136,7 +136,7 @@ def test_object_valid_key(self): self.assertIsNone(b, 'empty object key not allowed') -class RiakClientComparisonTest(BaseTestCase, unittest.TestCase): +class RiakClientComparisonTest(IntegrationTestBase, unittest.TestCase): def test_client_eq(self): self.protocol = 'http' a = self.create_client(host='host1', http_port=11) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 5e5f0614..b8ca881f 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -3,7 +3,7 @@ from riak import RiakBucket, BucketType, RiakObject import riak.datatypes as datatypes from riak.tests import SKIP_DATATYPES -from riak.tests.base import BaseTestCase +from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison if platform.python_version() < '2.7': @@ -147,7 +147,7 @@ def test_removes_require_context(self): self.assertTrue(dtype.modified) -class DatatypeIntegrationTests(BaseTestCase, unittest.TestCase): +class DatatypeIntegrationTests(IntegrationTestBase, unittest.TestCase, Comparison): @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_counter(self): btype = self.client.bucket_type('pytest-counters') diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index b1752e8f..78decef2 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -5,10 +5,10 @@ import copy from time import sleep -from riak import ConflictError, RiakBucket, RiakError +from riak import ConflictError, RiakClient, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver -from riak.tests import SKIP_RESOLVE -from riak.tests.base import BaseTestCase +from riak.tests import SKIP_RESOLVE, HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison try: @@ -31,6 +31,21 @@ test_pickle_loads = pickle.loads +testrun_sibs_bucket = 'sibsbucket' +testrun_props_bucket = 'propsbucket' + +def setUpModule(): + c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + c.bucket(testrun_sibs_bucket).allow_mult = True + +def tearDownModule(): + c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + c.bucket(testrun_sibs_bucket).clear_properties() + c.bucket(testrun_props_bucket).clear_properties() + + class NotJsonSerializable(object): def __init__(self, *args, **kwargs): @@ -55,7 +70,23 @@ def __eq__(self, other): return True -class BasicKVTests(BaseTestCase, unittest.TestCase, Comparison): +class BasicKVTests(IntegrationTestBase, unittest.TestCase, Comparison): + def test_no_returnbody(self): + bucket = self.client.bucket(self.bucket_name) + o = bucket.new(self.key_name, "bar").store(return_body=False) + self.assertEqual(o.vclock, None) + + def test_many_link_headers_should_work_fine(self): + bucket = self.client.bucket(self.bucket_name) + o = bucket.new("lots_of_links", "My god, it's full of links!") + for i in range(0, 300): + link = ("other", "key%d" % i, "next") + o.add_link(link) + + o.store() + stored_object = bucket.get("lots_of_links") + self.assertEqual(len(stored_object.links), 300) + def test_is_alive(self): self.assertTrue(self.client.is_alive()) @@ -324,19 +355,19 @@ def test_bucket_delete(self): self.assertFalse(obj.exists) def test_set_bucket_properties(self): - bucket = self.client.bucket(self.props_bucket) + bucket = self.client.bucket(testrun_props_bucket) # Test setting allow mult... bucket.allow_mult = True # Test setting nval... bucket.n_val = 1 - bucket2 = self.create_client().bucket(self.props_bucket) + bucket2 = self.create_client().bucket(testrun_props_bucket) self.assertTrue(bucket2.allow_mult) self.assertEqual(bucket2.n_val, 1) # Test setting multiple properties... bucket.set_properties({"allow_mult": False, "n_val": 2}) - bucket3 = self.create_client().bucket(self.props_bucket) + bucket3 = self.create_client().bucket(testrun_props_bucket) self.assertFalse(bucket3.allow_mult) self.assertEqual(bucket3.n_val, 2) @@ -357,7 +388,7 @@ def test_if_none_match(self): def test_siblings(self): # Set up the bucket, clear any existing object... - bucket = self.client.bucket(self.sibs_bucket) + bucket = self.client.bucket(testrun_sibs_bucket) obj = bucket.get(self.key_name) bucket.allow_mult = True @@ -396,7 +427,7 @@ def test_siblings(self): @unittest.skipIf(SKIP_RESOLVE == '1', "skip requested for resolvers test") def test_resolution(self): - bucket = self.client.bucket(self.sibs_bucket) + bucket = self.client.bucket(testrun_sibs_bucket) obj = bucket.get(self.key_name) bucket.allow_mult = True @@ -454,13 +485,13 @@ def max_value_resolver(obj): "skip requested for resolvers test") def test_resolution_default(self): # If no resolver is setup, be sure to resolve to default_resolver - bucket = self.client.bucket(self.sibs_bucket) + bucket = self.client.bucket(testrun_sibs_bucket) self.assertEqual(self.client.resolver, default_resolver) self.assertEqual(bucket.resolver, default_resolver) def test_tombstone_siblings(self): # Set up the bucket, clear any existing object... - bucket = self.client.bucket(self.sibs_bucket) + bucket = self.client.bucket(testrun_sibs_bucket) obj = bucket.get(self.key_name) bucket.allow_mult = True @@ -594,9 +625,9 @@ def generate_siblings(self, original, count=5, delay=None): return vals -class BucketPropsTest(BaseTestCase, unittest.TestCase): +class BucketPropsTest(IntegrationTestBase, unittest.TestCase): def test_rw_settings(self): - bucket = self.client.bucket(self.props_bucket) + bucket = self.client.bucket(testrun_props_bucket) self.assertEqual(bucket.r, "quorum") self.assertEqual(bucket.w, "quorum") self.assertEqual(bucket.dw, "quorum") @@ -621,7 +652,7 @@ def test_rw_settings(self): bucket.clear_properties() def test_primary_quora(self): - bucket = self.client.bucket(self.props_bucket) + bucket = self.client.bucket(testrun_props_bucket) self.assertEqual(bucket.pr, 0) self.assertEqual(bucket.pw, 0) @@ -635,7 +666,7 @@ def test_primary_quora(self): bucket.clear_properties() def test_clear_bucket_properties(self): - bucket = self.client.bucket(self.props_bucket) + bucket = self.client.bucket(testrun_props_bucket) bucket.allow_mult = True self.assertTrue(bucket.allow_mult) bucket.n_val = 1 @@ -647,15 +678,15 @@ def test_clear_bucket_properties(self): self.assertEqual(bucket.n_val, 3) -class KVFileTests(BaseTestCase, unittest.TestCase): +class KVFileTests(IntegrationTestBase, unittest.TestCase): def test_store_binary_object_from_file(self): bucket = self.client.bucket(self.bucket_name) - filepath = os.path.join(os.path.dirname(__file__), 'test_all.py') - obj = bucket.new_from_file(self.key_name, filepath) + obj = bucket.new_from_file(self.key_name, __file__) obj.store() obj = bucket.get(self.key_name) self.assertNotEqual(obj.encoded_data, None) - self.assertEqual(obj.content_type, "text/x-python") + self.assertTrue(obj.content_type == 'text/x-python' or + obj.content_type == 'application/x-python-code') def test_store_binary_object_from_file_should_use_default_mimetype(self): bucket = self.client.bucket(self.bucket_name) @@ -675,7 +706,7 @@ def test_store_binary_object_from_file_should_fail_if_file_not_found(self): self.assertFalse(obj.exists) -class CounterTests(BaseTestCase, unittest.TestCase): +class CounterTests(IntegrationTestBase, unittest.TestCase): def test_counter_requires_allow_mult(self): bucket = self.client.bucket(self.bucket_name) if bucket.allow_mult: @@ -686,7 +717,7 @@ def test_counter_requires_allow_mult(self): bucket.update_counter(self.key_name, 10) def test_counter_ops(self): - bucket = self.client.bucket(self.sibs_bucket) + bucket = self.client.bucket(testrun_sibs_bucket) self.assertTrue(bucket.allow_mult) # Non-existent counter has no value diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index 99ed8a92..e0897f6a 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -5,34 +5,29 @@ from six import PY2 from riak.mapreduce import RiakMapReduce -from riak import key_filter, RiakClient, RiakError -from riak.tests import RUN_YZ, PB_HOST, PB_PORT, HTTP_HOST, HTTP_PORT, SECURITY_CREDS -from riak.tests.base import BaseTestCase +from riak import key_filter, RiakError +from riak.tests import RUN_YZ +from riak.tests.base import IntegrationTestBase from riak.tests.test_yokozuna import wait_for_yz_index from riak.tests import RUN_SECURITY -from riak.tests.yz_setup import yzSetUpModule, yzTearDownModule +from riak.tests.yz_setup import yzSetUp, yzTearDown if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -# Add bucket and type for Search 2.0 -> MapReduce + testrun_yz_mr = {'btype': 'pytest-mr', 'bucket': 'mrbucket', 'index': 'mrbucket'} def setUpModule(): - if RUN_YZ: - c = RiakClient(host=PB_HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) - yzSetUpModule(c, testrun_yz_mr) + yzSetUp(testrun_yz_mr) def tearDownModule(): - if RUN_YZ: - c = RiakClient(host=HTTP_HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) - yzTearDownModule(c, testrun_yz_mr) + yzTearDown(testrun_yz_mr) + -class LinkTests(BaseTestCase, unittest.TestCase): +class LinkTests(IntegrationTestBase, unittest.TestCase): def test_store_and_get_links(self): # Create the object... bucket = self.client.bucket(self.bucket_name) @@ -98,7 +93,7 @@ def test_link_walking(self): self.assertEqual(len(results), 1) -class ErlangMapReduceTests(BaseTestCase, unittest.TestCase): +class ErlangMapReduceTests(IntegrationTestBase, unittest.TestCase): def test_erlang_map_reduce(self): # Create the object... bucket = self.client.bucket(self.bucket_name) @@ -204,7 +199,8 @@ def test_client_exceptional_paths(self): mr.add_key_filter("tokenize", "-", 1) -class JSMapReduceTests(BaseTestCase, unittest.TestCase): +class JSMapReduceTests(IntegrationTestBase, unittest.TestCase): + def test_javascript_source_map(self): # Create the object... bucket = self.client.bucket(self.bucket_name) @@ -564,7 +560,7 @@ def test_mr_search(self): self.assertEqual(result, [100]) -class MapReduceAliasTests(BaseTestCase, unittest.TestCase): +class MapReduceAliasTests(IntegrationTestBase, unittest.TestCase): """This tests the map reduce aliases""" def test_map_values(self): @@ -759,7 +755,7 @@ def test_filter_not_found(self): self.assertEqual(sorted(result), [1, 2]) -class MapReduceStreamTests(BaseTestCase, unittest.TestCase): +class MapReduceStreamTests(IntegrationTestBase, unittest.TestCase): def test_stream_results(self): bucket = self.client.bucket(self.bucket_name) bucket.new('one', data=1).store() diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index 1f0e2f19..edfba3f5 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -5,7 +5,7 @@ from riak.transports.pool import Pool, BadResource from random import SystemRandom from time import sleep -from . import SKIP_POOL +from riak.tests import SKIP_POOL from riak.tests.comparison import Comparison if platform.python_version() < '2.7': diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index 11f9f4c9..799ce8e3 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -1,16 +1,32 @@ # -*- coding: utf-8 -*- from __future__ import print_function import platform -from riak.tests import SKIP_SEARCH -from riak.tests.base import BaseTestCase +from riak import RiakClient +from riak.tests import SKIP_SEARCH, HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest +testrun_search_bucket = 'searchbucket' -class EnableSearchTests(BaseTestCase, unittest.TestCase): +def setUpModule(): + if not SKIP_SEARCH and not RUN_YZ: + c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + b = c.bucket(testrun_search_bucket) + b.enable_search() + +def tearDownModule(): + if not SKIP_SEARCH and not RUN_YZ: + c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + b = c.bucket(testrun_search_bucket) + b.clear_properties() + +class EnableSearchTests(IntegrationTestBase, unittest.TestCase): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_bucket_search_enabled(self): bucket = self.client.bucket(self.bucket_name) @@ -18,124 +34,124 @@ def test_bucket_search_enabled(self): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_enable_search_commit_hook(self): - bucket = self.client.bucket(self.search_bucket) + bucket = self.client.bucket(testrun_search_bucket) bucket.clear_properties() self.assertFalse(self.create_client(). - bucket(self.search_bucket). + bucket(testrun_search_bucket). search_enabled()) bucket.enable_search() self.assertTrue(self.create_client(). - bucket(self.search_bucket). + bucket(testrun_search_bucket). search_enabled()) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_disable_search_commit_hook(self): - bucket = self.client.bucket(self.search_bucket) + bucket = self.client.bucket(testrun_search_bucket) bucket.clear_properties() bucket.enable_search() - self.assertTrue(self.create_client().bucket(self.search_bucket) + self.assertTrue(self.create_client().bucket(testrun_search_bucket) .search_enabled()) bucket.disable_search() - self.assertFalse(self.create_client().bucket(self.search_bucket) + self.assertFalse(self.create_client().bucket(testrun_search_bucket) .search_enabled()) bucket.enable_search() -class SolrSearchTests(BaseTestCase, unittest.TestCase): +class SolrSearchTests(IntegrationTestBase, unittest.TestCase): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_add_document_to_index(self): - self.client.fulltext_add(self.search_bucket, + self.client.fulltext_add(testrun_search_bucket, [{"id": "doc", "username": "tony"}]) - results = self.client.fulltext_search(self.search_bucket, + results = self.client.fulltext_search(testrun_search_bucket, "username:tony") self.assertEqual("tony", results['docs'][0]['username']) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_add_multiple_documents_to_index(self): self.client.fulltext_add( - self.search_bucket, + testrun_search_bucket, [{"id": "dizzy", "username": "dizzy"}, {"id": "russell", "username": "russell"}]) results = self.client.fulltext_search( - self.search_bucket, "username:russell OR username:dizzy") + testrun_search_bucket, "username:russell OR username:dizzy") self.assertEqual(2, len(results['docs'])) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_delete_documents_from_search_by_id(self): self.client.fulltext_add( - self.search_bucket, + testrun_search_bucket, [{"id": "dizzy", "username": "dizzy"}, {"id": "russell", "username": "russell"}]) - self.client.fulltext_delete(self.search_bucket, docs=["dizzy"]) + self.client.fulltext_delete(testrun_search_bucket, docs=["dizzy"]) results = self.client.fulltext_search( - self.search_bucket, "username:russell OR username:dizzy") + testrun_search_bucket, "username:russell OR username:dizzy") self.assertEqual(1, len(results['docs'])) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_delete_documents_from_search_by_query(self): self.client.fulltext_add( - self.search_bucket, + testrun_search_bucket, [{"id": "dizzy", "username": "dizzy"}, {"id": "russell", "username": "russell"}]) self.client.fulltext_delete( - self.search_bucket, + testrun_search_bucket, queries=["username:dizzy", "username:russell"]) results = self.client.fulltext_search( - self.search_bucket, "username:russell OR username:dizzy") + testrun_search_bucket, "username:russell OR username:dizzy") self.assertEqual(0, len(results['docs'])) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_delete_documents_from_search_by_query_and_id(self): self.client.fulltext_add( - self.search_bucket, + testrun_search_bucket, [{"id": "dizzy", "username": "dizzy"}, {"id": "russell", "username": "russell"}]) self.client.fulltext_delete( - self.search_bucket, + testrun_search_bucket, docs=["dizzy"], queries=["username:russell"]) results = self.client.fulltext_search( - self.search_bucket, + testrun_search_bucket, "username:russell OR username:dizzy") self.assertEqual(0, len(results['docs'])) -class SearchTests(BaseTestCase, unittest.TestCase): +class SearchTests(IntegrationTestBase, unittest.TestCase): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search_from_bucket(self): - bucket = self.client.bucket(self.search_bucket) + bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() results = bucket.search("username:roidrage") self.assertEqual(1, len(results['docs'])) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search_with_params_from_bucket(self): - bucket = self.client.bucket(self.search_bucket) + bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() results = bucket.search("username:roidrage", wt="xml") self.assertEqual(1, len(results['docs'])) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search_with_params(self): - bucket = self.client.bucket(self.search_bucket) + bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() results = self.client.fulltext_search( - self.search_bucket, + testrun_search_bucket, "username:roidrage", wt="xml") self.assertEqual(1, len(results['docs'])) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search(self): - bucket = self.client.bucket(self.search_bucket) + bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() - results = self.client.fulltext_search(self.search_bucket, + results = self.client.fulltext_search(testrun_search_bucket, "username:roidrage") self.assertEqual(1, len(results["docs"])) @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_search_integration(self): # Create some objects to search across... - bucket = self.client.bucket(self.search_bucket) + bucket = self.client.bucket(testrun_search_bucket) bucket.new("one", {"foo": "one", "bar": "red"}).store() bucket.new("two", {"foo": "two", "bar": "green"}).store() bucket.new("three", {"foo": "three", "bar": "blue"}).store() @@ -143,7 +159,7 @@ def test_search_integration(self): bucket.new("five", {"foo": "five", "bar": "yellow"}).store() # Run some operations... - results = self.client.fulltext_search(self.search_bucket, + results = self.client.fulltext_search(testrun_search_bucket, "foo:one OR foo:two") if (len(results) == 0): print("\n\nNot running test \"testSearchIntegration()\".\n") @@ -154,6 +170,6 @@ def test_search_integration(self): self.assertEqual(len(results['docs']), 2) query = "(foo:one OR foo:two OR foo:three OR foo:four) AND\ (NOT bar:green)" - results = self.client.fulltext_search(self.search_bucket, query) + results = self.client.fulltext_search(testrun_search_bucket, query) self.assertEqual(len(results['docs']), 3) diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index 065e16ee..f3339e28 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -1,22 +1,29 @@ # -*- coding: utf-8 -*- +import platform import sys + from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT, \ SECURITY_CREDS, SECURITY_CIPHERS from riak.security import SecurityCreds -from riak.tests.base import BaseTestCase +from riak.tests.base import IntegrationTestBase -if sys.version_info < (2, 7): +if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -class SecurityTests(BaseTestCase, unittest.TestCase): +class SecurityTests(IntegrationTestBase, unittest.TestCase): @unittest.skipIf(RUN_SECURITY, 'RUN_SECURITY is set') def test_security_disabled(self): - client = self.create_client(credentials=SECURITY_CREDS) + topts = { 'timeout' : 1 } + creds = SecurityCreds(username='foo', + password='bar', + cacert_file=SECURITY_CACERT, + ciphers=SECURITY_CIPHERS) + client = self.create_client(credentials=creds, transport_options=topts) myBucket = client.bucket('test') val1 = "foobar" key1 = myBucket.new('x', data=val1) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 2bfa4585..9e710d7e 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -4,33 +4,32 @@ import sys from riak.tests import SKIP_TIMESERIES -from riak.tests.base import BaseTestCase +from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -class TimeseriesTests(BaseTestCase, unittest.TestCase): - def setUp(self): - super(TimeseriesTests, self).setUp() - +class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @unittest.skipIf(SKIP_TIMESERIES == '1', "skip requested for timeseries tests") def test_store(self): - now = int(round(time.time() * 1000)) # NB: millis since Jan 1 1970 UTC - fiveMinsInMsec = 5 * 60 * 1000 - fiveMinsAgo = now - fiveMinsInMsec - tenMinsAgo = fiveMinsAgo - fiveMinsInMsec - fifteenMinsAgo = tenMinsAgo - fiveMinsInMsec - twentyMinsAgo = fifteenMinsAgo - fiveMinsInMsec + pass + # TODO RTS-367 + # now = int(round(time.time() * 1000)) # NB: millis since Jan 1 1970 UTC + # fiveMinsInMsec = 5 * 60 * 1000 + # fiveMinsAgo = now - fiveMinsInMsec + # tenMinsAgo = fiveMinsAgo - fiveMinsInMsec + # fifteenMinsAgo = tenMinsAgo - fiveMinsInMsec + # twentyMinsAgo = fifteenMinsAgo - fiveMinsInMsec - table = self.client.table(self.table_name) - measurements = [ - [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', '84.3' ], - [ 'hash1', 'user2', fifteenMinsAgo, 'rain', '79.0' ], - [ 'hash1', 'user2', fiveMinsAgo, 'wind', 50.5 ], - [ 'hash1', 'user2', now, 'snow', 20.1 ] - ] - ts_obj = table.new(measurements) - result = ts_obj.store() - self.assertTrue(result) + # table = self.client.table(self.table_name) + # measurements = [ + # [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', '84.3' ], + # [ 'hash1', 'user2', fifteenMinsAgo, 'rain', '79.0' ], + # [ 'hash1', 'user2', fiveMinsAgo, 'wind', 50.5 ], + # [ 'hash1', 'user2', now, 'snow', 20.1 ] + # ] + # ts_obj = table.new(measurements) + # result = ts_obj.store() + # self.assertTrue(result) diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index 521be2e4..55399aac 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -1,33 +1,15 @@ # -*- coding: utf-8 -*- import platform -from riak import RiakClient -from riak.tests import RUN_YZ, PB_HOST, PB_PORT, HTTP_PORT, SECURITY_CREDS -from riak.tests.base import BaseTestCase -from riak.tests.yz_setup import yzSetUpModule, yzTearDownModule +from riak.tests import RUN_YZ +from riak.tests.base import IntegrationTestBase +from riak.tests.comparison import Comparison +from riak.tests.yz_setup import yzSetUp, yzTearDown if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest -# YZ index on bucket of the same name -testrun_yz = {'btype': None, 'bucket': 'yzbucket', 'index': 'yzbucket'} -# YZ index on bucket of a different name -testrun_yz_index = {'btype': None, 'bucket': 'yzindexbucket', 'index': 'yzindex'} - -def setUpModule(): - if RUN_YZ: - c = RiakClient(host=PB_HOST, protocol='pbc', - pb_port=PB_PORT, credentials=SECURITY_CREDS) - yzSetUpModule(c, testrun_yz, testrun_yz_index) - -def tearDownModule(): - if RUN_YZ: - c = RiakClient(host=PB_HOST, protocol='pbc', - pb_port=PB_PORT, credentials=SECURITY_CREDS) - yzTearDownModule(c, testrun_yz, testrun_yz_index) - - def wait_for_yz_index(bucket, key, index=None): """ Wait until Solr index has been updated and a value returns from a query. @@ -40,11 +22,18 @@ def wait_for_yz_index(bucket, key, index=None): while len(bucket.search('_yz_rk:' + key, index=index)['docs']) == 0: pass +# YZ index on bucket of the same name +testrun_yz = {'btype': None, 'bucket': 'yzbucket', 'index': 'yzbucket'} +# YZ index on bucket of a different name +testrun_yz_index = {'btype': None, 'bucket': 'yzindexbucket', 'index': 'yzindex'} + +def setUpModule(): + yzSetUp(testrun_yz, testrun_yz_index) -class YZSearchTests(BaseTestCase, unittest.TestCase): - def setUp(self): - super(YZSearchTests, self).setUp() +def tearDownModule(): + yzTearDown(testrun_yz, testrun_yz_index) +class YZSearchTests(IntegrationTestBase, unittest.TestCase, Comparison): @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_from_bucket(self): bucket = self.client.bucket(testrun_yz['bucket']) diff --git a/riak/tests/yz_setup.py b/riak/tests/yz_setup.py index d0e83147..3d4a720e 100644 --- a/riak/tests/yz_setup.py +++ b/riak/tests/yz_setup.py @@ -1,31 +1,42 @@ -from riak import RiakError +import logging -def yzSetUpModule(c, *yzdata): - for yz in yzdata: - c.create_search_index(yz['index'], timeout=30000) - if yz['btype'] is not None: - t = c.bucket_type(yz['btype']) - b = t.bucket(yz['bucket']) - else: - b = c.bucket(yz['bucket']) - # Keep trying to set search bucket property until it succeeds - index_set = False - while not index_set: - try: - b.set_property('search_index', yz['index']) - index_set = True - except RiakError: - pass +from riak import RiakClient, RiakError +from riak.tests import RUN_YZ, PROTOCOL, HOST, PB_PORT, HTTP_PORT, SECURITY_CREDS -def yzTearDownModule(c, *yzdata): - for yz in yzdata: - if yz['btype'] is not None: - t = c.bucket_type(yz['btype']) - b = t.bucket(yz['bucket']) - else: - b = c.bucket(yz['bucket']) - b.set_property('search_index', '_dont_index_') - c.delete_search_index(yz['index']) - for keys in b.stream_keys(): - for key in keys: - b.delete(key) +def yzSetUp(*yzdata): + if RUN_YZ: + c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + for yz in yzdata: + logging.debug("yzSetUp: %s", yz) + c.create_search_index(yz['index'], timeout=30000) + if yz['btype'] is not None: + t = c.bucket_type(yz['btype']) + b = t.bucket(yz['bucket']) + else: + b = c.bucket(yz['bucket']) + # Keep trying to set search bucket property until it succeeds + index_set = False + while not index_set: + try: + b.set_property('search_index', yz['index']) + index_set = True + except RiakError: + pass + +def yzTearDown(c, *yzdata): + if RUN_YZ: + c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + for yz in yzdata: + logging.debug("yzTearDown: %s", yz) + if yz['btype'] is not None: + t = c.bucket_type(yz['btype']) + b = t.bucket(yz['bucket']) + else: + b = c.bucket(yz['bucket']) + b.set_property('search_index', '_dont_index_') + c.delete_search_index(yz['index']) + for keys in b.stream_keys(): + for key in keys: + b.delete(key) diff --git a/riak/transports/http/__init__.py b/riak/transports/http/__init__.py index c28a0034..c4a19a96 100644 --- a/riak/transports/http/__init__.py +++ b/riak/transports/http/__init__.py @@ -16,6 +16,7 @@ under the License. """ +import logging import socket import select from six import PY2 @@ -86,11 +87,21 @@ def __init__(self, :type timeout: int """ if PY2: + # TODO LRB RTS-367 it appears that pkey_file / cert_file are never set + # in riak/transports/http/connection.py#_connect() method + pkf = pkey_file + if pkf is None and credentials is not None: + pkf = credentials._pkey_file + + cf = cert_file + if cf is None and credentials is not None: + cf = credentials._cert_file + HTTPSConnection.__init__(self, host, port, - key_file=pkey_file, - cert_file=cert_file) + key_file=pkf, + cert_file=cf) else: super(RiakHTTPSConnection, self). \ __init__(host=host, @@ -128,6 +139,8 @@ def connect(self): else: ssl_ctx = configure_ssl_context(self.credentials) host = "riak@" + self.host + if self.timeout is not None: + sock.settimeout(self.timeout) self.sock = ssl.SSLSocket(sock=sock, keyfile=self.credentials.pkey_file, certfile=self.credentials.cert_file, diff --git a/riak/transports/http/connection.py b/riak/transports/http/connection.py index 2912f9b1..e1f570e0 100644 --- a/riak/transports/http/connection.py +++ b/riak/transports/http/connection.py @@ -65,14 +65,21 @@ def _connect(self): """ Use the appropriate connection class; optionally with security. """ + timeout = None + if self._options is not None and 'timeout' in self._options: + timeout = self._options['timeout'] + if self._client._credentials: - self._connection = \ - self._connection_class(self._node.host, - self._node.http_port, - self._client._credentials) + self._connection = self._connection_class( + host=self._node.host, + port=self._node.http_port, + credentials=self._client._credentials, + timeout=timeout) else: - self._connection = self._connection_class(self._node.host, - self._node.http_port) + self._connection = self._connection_class( + host=self._node.host, + port=self._node.http_port, + timeout=timeout) # Forces the population of stats and resources before any # other requests are made. self.server_version diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index 2b197966..c139b3ea 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -55,7 +55,7 @@ def __init__(self, node=None, client=None, connection_class=HTTPConnection, client_id=None, - **unused_options): + **options): """ Construct a new HTTP connection to Riak. """ @@ -65,6 +65,7 @@ def __init__(self, node=None, self._node = node self._connection_class = connection_class self._client_id = client_id + self._options = options if not self._client_id: self._client_id = self.make_random_client_id() self._connect() diff --git a/setup.py b/setup.py index dfcce1f3..3e2b84d1 100755 --- a/setup.py +++ b/setup.py @@ -13,9 +13,14 @@ requires.append("pyOpenSSL(>=0.14)") riak_pb_in_pythonpath = False -PYTHONPATH = os.environ.get('PYTHONPATH') -if PYTHONPATH is not None and PYTHONPATH.find('riak_pb/python/lib') != -1: - riak_pb_in_pythonpath = True +os_env_pythonpath = os.environ.get('PYTHONPATH') +if os_env_pythonpath is not None: + for ppath in os_env_pythonpath.split(os.pathsep): + if ppath.find('riak_pb/python/lib') != -1: + riak_pb_messages = os.path.join(ppath, 'riak_pb', 'messages.py') + if os.path.exists(riak_pb_messages): + riak_pb_in_pythonpath = True + break if riak_pb_in_pythonpath: install_requires.append("protobuf ==2.6.1") From 454ba8696d5b7f9052bcb55bb5e615354b449843 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 26 Oct 2015 17:49:54 -0700 Subject: [PATCH 028/324] Add code to query TS data and encode / decode from PB messages. Tests as well --- README.rst | 8 ++ riak/__init__.py | 26 ---- riak/client/__init__.py | 47 +++---- riak/client/operations.py | 41 +++--- riak/riak_object.py | 20 --- riak/table.py | 44 +++--- riak/tests/base.py | 2 +- riak/tests/test_kv.py | 2 + riak/tests/test_search.py | 2 + riak/tests/test_timeseries.py | 227 +++++++++++++++++++++++++++---- riak/tests/yz_setup.py | 2 + riak/transports/pbc/codec.py | 153 +++++++++++++++++---- riak/transports/pbc/transport.py | 39 +++--- riak/transports/transport.py | 2 +- riak/ts_object.py | 50 +++---- riak/util.py | 18 --- 16 files changed, 449 insertions(+), 234 deletions(-) diff --git a/README.rst b/README.rst index 85468584..94b429b6 100644 --- a/README.rst +++ b/README.rst @@ -234,3 +234,11 @@ To run the tests, then simply .. code-block:: console RUN_SECURITY=1 RIAK_TEST_HTTP_PORT=18098 python setup.py test + +Contributors +-------------------------- + - Rusty Klophaus + - Justin Sheehy + - Jay Baird + - Andy Gross + - Jon Meredith diff --git a/riak/__init__.py b/riak/__init__.py index 415f6660..9e761a91 100644 --- a/riak/__init__.py +++ b/riak/__init__.py @@ -1,34 +1,8 @@ """ -Copyright 2015 Basho Technologies -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. ---- The Riak API for Python allows you to connect to a Riak instance, create, modify, and delete Riak objects, add and remove links from Riak objects, run Javascript (and Erlang) based Map/Reduce operations, and run Linkwalking operations. - -See the unit_tests.py file for example usage. - -@author Rusty Klophaus (@rklophaus) (rusty@basho.com) -@author Andy Gross (@argv0) (andy@basho.com) -@author Jon Meredith (@jmeredith) (jmeredith@basho.com) -@author Jay Baird (@skatterbean) (jay@mochimedia.com) """ from riak.riak_error import RiakError, ConflictError diff --git a/riak/client/__init__.py b/riak/client/__init__.py index 002991d8..d623f970 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -1,24 +1,3 @@ -""" -Copyright 2011 Basho Technologies, Inc. -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - try: import simplejson as json except ImportError: @@ -31,6 +10,7 @@ from riak.bucket import RiakBucket, BucketType from riak.mapreduce import RiakMapReduceChain from riak.resolver import default_resolver +from riak.table import Table from riak.transports.http import RiakHttpPool from riak.transports.pbc import RiakPbcPool from riak.security import SecurityCreds @@ -140,6 +120,7 @@ def __init__(self, protocol='pbc', transport_options={}, nodes=None, 'binary/octet-stream': binary_encoder_decoder} self._buckets = WeakValueDictionary() self._bucket_types = WeakValueDictionary() + self._tables = WeakValueDictionary() def _get_protocol(self): return self._protocol @@ -277,12 +258,12 @@ def bucket_type(self, name): not always exist (unlike buckets), but this will always return a :class:`BucketType ` object. - :param name: the bucket name + :param name: the bucket-type name :type name: str :rtype: :class:`BucketType ` """ if not isinstance(name, string_types): - raise TypeError('Bucket name must be a string') + raise TypeError('BucketType name must be a string') if name in self._bucket_types: return self._bucket_types[name] @@ -291,6 +272,26 @@ def bucket_type(self, name): self._bucket_types[name] = btype return btype + def table(self, name): + """ + Gets the table by the specified name. Tables do + not always exist (unlike buckets), but this will always return + a :class:`Table ` object. + + :param name: the table name + :type name: str + :rtype: :class:`Table ` + """ + if not isinstance(name, string_types): + raise TypeError('Table name must be a string') + + if name in self._tables: + return self._tables[name] + else: + table = Table(self, name) + self._tables[name] = table + return table + def close(self): """ Iterate through all of the connections and close each one. diff --git a/riak/client/operations.py b/riak/client/operations.py index 36f6ed1f..f1881fcf 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -1,26 +1,9 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from riak.client.transport import RiakClientTransport, \ retryable, retryableHttpOnly from riak.client.multiget import multiget from riak.client.index_page import IndexPage from riak.datatypes import TYPES +from riak.table import Table from riak.util import bytes_to_str from six import string_types, PY2 @@ -565,9 +548,31 @@ def ts_put(self, transport, tsobj): :param tsobj: the time series object to store :type tsobj: RiakTsObject + :rtype: boolean """ return transport.ts_put(tsobj) + @retryable + def ts_query(self, transport, table, query, interpolations=None): + """ + ts_query(table, query, interpolations=None) + + Queries time series data in the Riak cluster. + + .. note:: This request is automatically retried :attr:`retries` + times if it fails due to network error. + + :param table: The timeseries table. + :type table: string or :class:`Table ` + :param query: The timeseries query. + :type query: string + :rtype: :class:`TsObject ` + """ + t = table + if isinstance(t, str): + t = Table(self, table) + return transport.ts_query(t, query, interpolations) + @retryable def get(self, transport, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None): diff --git a/riak/riak_object.py b/riak/riak_object.py index 2db8b5e8..ab7dd375 100644 --- a/riak/riak_object.py +++ b/riak/riak_object.py @@ -1,23 +1,3 @@ -""" -Copyright 2012-2013 Basho Technologies -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" from riak import ConflictError from riak.content import RiakContent import base64 diff --git a/riak/table.py b/riak/table.py index a739e9bb..10da507c 100644 --- a/riak/table.py +++ b/riak/table.py @@ -1,27 +1,9 @@ -""" -Copyright 2015 Basho Technologies - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" from six import string_types, PY2 - class Table(object): """ - The ``Table`` object allows you to access properties on a Riak table - (bucket type) and query timeseries data. + The ``Table`` object allows you to access properties on a Riak + timeseries table and query timeseries data. """ def __init__(self, client, name): """ @@ -46,6 +28,26 @@ def __init__(self, client, name): self._client = client self.name = name + def __str__(self): + return self.name + + def __repr__(self): + return self.name + + def new(self, rows, columns=None): + """ + A shortcut for manually instantiating a new :class:`~riak.ts_object.TsObject` + + :param rows: An list of lists with timeseries data + :type rows: list + :param columns: An list of Column names and types. Optional. + :type columns: list + :rtype: :class:`~riak.ts_object.TsObject` + """ + from riak.ts_object import TsObject + + return TsObject(self._client, self, rows, columns) + def query(self, query, interpolations=None): """ Queries a timeseries table. @@ -54,4 +56,4 @@ def query(self, query, interpolations=None): :type query: string :rtype: :class:`TsObject ` """ - return self.client.ts_query(query, interpolations) + return self.client.ts_query(self, query, interpolations) diff --git a/riak/tests/base.py b/riak/tests/base.py index 7ac2dde4..0fb4317f 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -61,13 +61,13 @@ def setUp(self): self.logging_stream_handler = logging.StreamHandler(sys.stdout) self.logger.addHandler(self.logging_stream_handler) - self.table_name = 'GeoCheckin' self.bucket_name = self.randname() self.key_name = self.randname() self.credentials = SECURITY_CREDS self.client = self.create_client() def tearDown(self): + self.client.close() if self.logging_enabled: self.logger.removeHandler(self.logging_stream_handler) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 78decef2..3ba4eedc 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -38,12 +38,14 @@ def setUpModule(): c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, pb_port=PB_PORT, credentials=SECURITY_CREDS) c.bucket(testrun_sibs_bucket).allow_mult = True + c.close() def tearDownModule(): c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, pb_port=PB_PORT, credentials=SECURITY_CREDS) c.bucket(testrun_sibs_bucket).clear_properties() c.bucket(testrun_props_bucket).clear_properties() + c.close() class NotJsonSerializable(object): diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index 799ce8e3..ebf13730 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -18,6 +18,7 @@ def setUpModule(): pb_port=PB_PORT, credentials=SECURITY_CREDS) b = c.bucket(testrun_search_bucket) b.enable_search() + c.close() def tearDownModule(): if not SKIP_SEARCH and not RUN_YZ: @@ -25,6 +26,7 @@ def tearDownModule(): pb_port=PB_PORT, credentials=SECURITY_CREDS) b = c.bucket(testrun_search_bucket) b.clear_properties() + c.close() class EnableSearchTests(IntegrationTestBase, unittest.TestCase): @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 9e710d7e..34aa23b9 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,9 +1,17 @@ # -*- coding: utf-8 -*- +import datetime +import os import platform -import time +import riak_pb import sys +import time -from riak.tests import SKIP_TIMESERIES +from riak.table import Table +from riak.ts_object import TsObject +from riak.transports.pbc.codec import RiakPbcCodec +from riak import RiakClient +from riak.util import str_to_bytes +from riak.tests import SKIP_TIMESERIES, HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': @@ -11,25 +19,198 @@ else: import unittest +table_name = 'GeoCheckin' + +bd0 = os.urandom(16) +bd1 = os.urandom(16) + +fiveMins = datetime.timedelta(0, 300) +ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) +ts1 = ts0 + fiveMins + +s = [ 'foo', 'bar', 'baz' ] +m = { + 'foo': 'foo', + 'bar': 'bar', + 'baz': 'baz', + 'set': s +} +sj = ['"foo"', '"bar"', '"baz"'] +mj = '{"baz": "baz", "set": ["foo", "bar", "baz"], "foo": "foo", "bar": "bar"}' + +class TimeseriesUnitTests(unittest.TestCase): + def setUp(self): + self.c = RiakPbcCodec() + self.ts0ms = self.c._unix_time_millis(ts0) + self.ts1ms = self.c._unix_time_millis(ts1) + self.rows = [ + [ bd0, 0, 1.2, ts0, True, s, m ], + [ bd1, 3, 4.5, ts1, False, s, m ] + ] + self.table = Table(None, 'test-table') + + def test_encode_data(self): + tsobj = TsObject(None, self.table, self.rows, None) + ts_put_req = riak_pb.TsPutReq() + self.c._encode_timeseries(tsobj, ts_put_req) + + # NB: expected, actual + self.assertEqual(len(self.rows), len(ts_put_req.rows)) + + r0 = ts_put_req.rows[0] + self.assertEqual(r0.cells[0].binary_value, self.rows[0][0]) + self.assertEqual(r0.cells[1].integer_value, self.rows[0][1]) + self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) + self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) + self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) + self.assertEqual(r0.cells[5].set_value, sj) + self.assertEqual(r0.cells[6].map_value, mj) + + r1 = ts_put_req.rows[1] + self.assertEqual(r1.cells[0].binary_value, self.rows[1][0]) + self.assertEqual(r1.cells[1].integer_value, self.rows[1][1]) + self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) + self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) + self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) + self.assertEqual(r1.cells[5].set_value, sj) + self.assertEqual(r1.cells[6].map_value, mj) + + def test_decode_data(self): + tqr = riak_pb.TsQueryResp() + + c0 = tqr.columns.add() + c0.name = str_to_bytes('col_binary') + c0.type = riak_pb.TsColumnType.Value('BINARY') + c1 = tqr.columns.add() + c1.name = str_to_bytes('col_integer') + c1.type = riak_pb.TsColumnType.Value('INTEGER') + c2 = tqr.columns.add() + c2.name = str_to_bytes('col_double') + c2.type = riak_pb.TsColumnType.Value('FLOAT') + c3 = tqr.columns.add() + c3.name = str_to_bytes('col_timestamp') + c3.type = riak_pb.TsColumnType.Value('TIMESTAMP') + c4 = tqr.columns.add() + c4.name = str_to_bytes('col_boolean') + c4.type = riak_pb.TsColumnType.Value('BOOLEAN') + c5 = tqr.columns.add() + c5.name = str_to_bytes('col_set') + c5.type = riak_pb.TsColumnType.Value('SET') + c6 = tqr.columns.add() + c6.name = str_to_bytes('col_map') + c6.type = riak_pb.TsColumnType.Value('MAP') + + r0 = tqr.rows.add() + r0c0 = r0.cells.add() + r0c0.binary_value = self.rows[0][0] + r0c1 = r0.cells.add() + r0c1.integer_value = self.rows[0][1] + r0c2 = r0.cells.add() + r0c2.double_value = self.rows[0][2] + r0c3 = r0.cells.add() + r0c3.timestamp_value = self.ts0ms + r0c4 = r0.cells.add() + r0c4.boolean_value = self.rows[0][4] + r0c5 = r0.cells.add() + for j in sj: + r0c5.set_value.append(j) + r0c6 = r0.cells.add() + r0c6.map_value = str_to_bytes(mj) + + r1 = tqr.rows.add() + r1c0 = r1.cells.add() + r1c0.binary_value = self.rows[1][0] + r1c1 = r1.cells.add() + r1c1.integer_value = self.rows[1][1] + r1c2 = r1.cells.add() + r1c2.double_value = self.rows[1][2] + r1c3 = r1.cells.add() + r1c3.timestamp_value = self.ts1ms + r1c4 = r1.cells.add() + r1c4.boolean_value = self.rows[1][4] + r1c5 = r1.cells.add() + for j in sj: + r1c5.set_value.append(j) + r1c6 = r1.cells.add() + r1c6.map_value = str_to_bytes(mj) + + tsobj = TsObject(None, self.table, [], []) + c = RiakPbcCodec() + c._decode_timeseries(tqr, tsobj) + + self.assertEqual(len(self.rows), len(tsobj.rows)) + self.assertEqual(len(tqr.columns), len(tsobj.columns)) + + c = tsobj.columns + self.assertEqual(c[0][0], 'col_binary') + self.assertEqual(c[0][1], riak_pb.TsColumnType.Value('BINARY')) + self.assertEqual(c[1][0], 'col_integer') + self.assertEqual(c[1][1], riak_pb.TsColumnType.Value('INTEGER')) + self.assertEqual(c[2][0], 'col_double') + self.assertEqual(c[2][1], riak_pb.TsColumnType.Value('FLOAT')) + self.assertEqual(c[3][0], 'col_timestamp') + self.assertEqual(c[3][1], riak_pb.TsColumnType.Value('TIMESTAMP')) + self.assertEqual(c[4][0], 'col_boolean') + self.assertEqual(c[4][1], riak_pb.TsColumnType.Value('BOOLEAN')) + self.assertEqual(c[5][0], 'col_set') + self.assertEqual(c[5][1], riak_pb.TsColumnType.Value('SET')) + self.assertEqual(c[6][0], 'col_map') + self.assertEqual(c[6][1], riak_pb.TsColumnType.Value('MAP')) + + r0 = tsobj.rows[0] + self.assertEqual(r0[0], self.rows[0][0]) + self.assertEqual(r0[1], self.rows[0][1]) + self.assertEqual(r0[2], self.rows[0][2]) + self.assertEqual(r0[3], ts0) + self.assertEqual(r0[4], self.rows[0][4]) + self.assertEqual(r0[5], s) + self.assertEqual(r0[6], m) + + r1 = tsobj.rows[1] + self.assertEqual(r1[0], self.rows[1][0]) + self.assertEqual(r1[1], self.rows[1][1]) + self.assertEqual(r1[2], self.rows[1][2]) + self.assertEqual(r1[3], ts1) + self.assertEqual(r1[4], self.rows[1][4]) + self.assertEqual(r1[5], s) + self.assertEqual(r1[6], m) + +@unittest.skipIf(SKIP_TIMESERIES == 1, "skip requested for timeseries tests") class TimeseriesTests(IntegrationTestBase, unittest.TestCase): - @unittest.skipIf(SKIP_TIMESERIES == '1', "skip requested for timeseries tests") - def test_store(self): - pass - # TODO RTS-367 - # now = int(round(time.time() * 1000)) # NB: millis since Jan 1 1970 UTC - # fiveMinsInMsec = 5 * 60 * 1000 - # fiveMinsAgo = now - fiveMinsInMsec - # tenMinsAgo = fiveMinsAgo - fiveMinsInMsec - # fifteenMinsAgo = tenMinsAgo - fiveMinsInMsec - # twentyMinsAgo = fifteenMinsAgo - fiveMinsInMsec - - # table = self.client.table(self.table_name) - # measurements = [ - # [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', '84.3' ], - # [ 'hash1', 'user2', fifteenMinsAgo, 'rain', '79.0' ], - # [ 'hash1', 'user2', fiveMinsAgo, 'wind', 50.5 ], - # [ 'hash1', 'user2', now, 'snow', 20.1 ] - # ] - # ts_obj = table.new(measurements) - # result = ts_obj.store() - # self.assertTrue(result) + @classmethod + def setUpClass(cls): + cls.now = datetime.datetime.utcfromtimestamp(144379690) + fiveMinsAgo = cls.now - fiveMins + tenMinsAgo = fiveMinsAgo - fiveMins + fifteenMinsAgo = tenMinsAgo - fiveMins + twentyMinsAgo = fifteenMinsAgo - fiveMins + + client = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, + pb_port=PB_PORT, credentials=SECURITY_CREDS) + table = client.table(table_name) + rows = [ + [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', None ], + [ 'hash1', 'user2', fifteenMinsAgo, 'rain', 79.0 ], + [ 'hash1', 'user2', fiveMinsAgo, 'wind', 50.5 ], + [ 'hash1', 'user2', cls.now, 'snow', 20.1 ] + ] + ts_obj = table.new(rows) + result = ts_obj.store() + + codec = RiakPbcCodec() + cls.nowMsec = codec._unix_time_millis(cls.now) + cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) + client.close() + + # TODO RTS-367 ts_query test. Ensure that 'None' comes back, somehow + def test_query_that_returns_no_data(self): + query = "select * from {} where time > 0 and time < 10 and user = 'user1'".format(table_name) + ts_obj = self.client.ts_query('GeoCheckin', query) + self.assertEqual(len(ts_obj.columns), 0) + self.assertEqual(len(ts_obj.rows), 0) + + def test_query_that_matches_some_data(self): + query = "select * from {} where time > {} and time < {} and user = 'user2'".format(table_name, self.tenMinsAgoMsec, self.nowMsec); + ts_obj = self.client.ts_query('GeoCheckin', query) + self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.rows), 1) diff --git a/riak/tests/yz_setup.py b/riak/tests/yz_setup.py index 3d4a720e..f7a88efa 100644 --- a/riak/tests/yz_setup.py +++ b/riak/tests/yz_setup.py @@ -23,6 +23,7 @@ def yzSetUp(*yzdata): index_set = True except RiakError: pass + c.close() def yzTearDown(c, *yzdata): if RUN_YZ: @@ -40,3 +41,4 @@ def yzTearDown(c, *yzdata): for keys in b.stream_keys(): for key in keys: b.delete(key) + c.close() diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index e8b47420..d1d7a3af 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,27 +1,15 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" +import datetime +import json +import logging import riak_pb + from riak import RiakError from riak.content import RiakContent from riak.util import decode_index_value, str_to_bytes, bytes_to_str from riak.multidict import MultiDict from six import string_types, PY2 +epoch = datetime.datetime.utcfromtimestamp(0) def _invert(d): out = {} @@ -87,6 +75,12 @@ def __init__(self, **unused_args): raise NotImplementedError("this transport is not available") super(RiakPbcCodec, self).__init__(**unused_args) + def _unix_time_millis(self, dt): + return int((dt - epoch).total_seconds() * 1000.0) + + def _datetime_from_unix_time_millis(self, ut): + return datetime.datetime.utcfromtimestamp(ut / 1000.0) + def _encode_quorum(self, rw): """ Converts a symbolic quorum value into its on-the-wire @@ -627,25 +621,126 @@ def _encode_map_update(self, dtype, msg, op): def _encode_timeseries(self, tsobj, ts_put_req): """ Fills an TsPutReq message with the appropriate data and - metadata from a RiakTsObject. + metadata from a TsObject. - :param tsobj: a RiakTsObject - :type tsobj: RiakTsObject + :param tsobj: a TsObject + :type tsobj: TsObject :param ts_put_req: the protobuf message to fill :type ts_put_req: riak_pb.TsPutReq """ - ts_put_req.table = str_to_bytes(tsobj.table) - # TODO RTS-367 columns / rows - # if tsobj.columns: - # if tsobj.rows: - # else: - # raise RiakError("RiakTsObject requires rows") + ts_put_req.table = str_to_bytes(tsobj.table.name) + if tsobj.columns: + raise NotImplementedError("columns are not implemented yet") + if tsobj.rows and isinstance(tsobj.rows, list): + for row in tsobj.rows: + tsr = ts_put_req.rows.add() # NB: type riak_pb.TsRow + if not isinstance(row, list): + raise RiakError("TsObject row must be a list of values") + for cell in row: + tsc = tsr.cells.add() # NB: type riak_pb.TsCell + if cell is not None: + if isinstance(cell, bytes) or isinstance(cell, bytearray): + logging.debug("cell -> binary_value: '%s'", cell) + tsc.binary_value = cell + elif isinstance(cell, datetime.datetime): + tsc.timestamp_value = self._unix_time_millis(cell) + logging.debug("cell -> timestamp: '%s', timestamp_value '%d'", + cell, tsc.timestamp_value) + elif isinstance(cell, list): + for c in cell: + j = json.dumps(c) + logging.debug("cell -> set_value: '%s'", j) + tsc.set_value.append(str_to_bytes(j)) + elif isinstance(cell, bool): + logging.debug("cell -> boolean: '%s'", cell) + tsc.boolean_value = cell + elif isinstance(cell, str): + logging.debug("cell -> str: '%s'", cell) + tsc.binary_value = str_to_bytes(cell) + elif isinstance(cell, int) or isinstance(cell, long): + logging.debug("cell -> int/long: '%s'", cell) + tsc.integer_value = cell + elif isinstance(cell, float): + logging.debug("cell -> float: '%s'", cell) + tsc.double_value = cell + elif isinstance(cell, dict): + logging.debug("cell -> dict: '%s'", cell) + j = json.dumps(cell) + tsc.map_value = str_to_bytes(j) + else: + t = type(cell) + raise RiakError("can't serialize type '{}', value '{}'".format(t, cell)) + else: + raise RiakError("TsObject requires a list of rows") - def _decode_timeseries(self, ts_put_resp, tsobj): + def _decode_timeseries(self, ts_query_rsp, tsobj): """ - TODO RTS-367 + Fills an TsObject with the appropriate data and + metadata from a TsQueryResp. + + :param ts_query_rsp: the protobuf message from which to process data + :type ts_query_rsp: riak_pb.TsQueryRsp + :param tsobj: a TsObject + :type tsobj: TsObject """ - raise NotImplementedError + if not isinstance(ts_query_rsp, riak_pb.TsQueryResp): + raise RiakError("expected riak_pb.TsQueryResp") + + if tsobj.columns is not None: + for ts_col in ts_query_rsp.columns: + col_name = bytes_to_str(ts_col.name) + col_type = ts_col.type + col = (col_name, col_type) + logging.debug("column: '%s'", col) + tsobj.columns.append(col) + + for ts_row in ts_query_rsp.rows: + tsobj.rows.append(self._decode_timeseries_row(ts_row, ts_query_rsp.columns)) + + def _decode_timeseries_row(self, ts_row, ts_columns): + """ + Decodes a TsRow into a list + + :param ts_row: the protobuf TsRow to decode. + :type ts_row: riak_pb.TsRow + :param ts_columns: the protobuf TsColumn data to help decode. + :type ts_columns: list + :rtype list + """ + row = [] + for i, ts_cell in enumerate(ts_row.cells): + ts_col = ts_columns[i] + if ts_col.type == riak_pb.TsColumnType.Value('BINARY'): + logging.debug("ts_cell.binary_value: '%s'", ts_cell.binary_value) + row.append(ts_cell.binary_value) + elif ts_col.type == riak_pb.TsColumnType.Value('INTEGER'): + logging.debug("ts_cell.integer_value: '%s'", ts_cell.integer_value) + row.append(ts_cell.integer_value) + elif ts_col.type == riak_pb.TsColumnType.Value('FLOAT'): + logging.debug("ts_cell.double_value: '%s'", ts_cell.double_value) + row.append(ts_cell.double_value) + elif ts_col.type == riak_pb.TsColumnType.Value('TIMESTAMP'): + dt = self._datetime_from_unix_time_millis(ts_cell.timestamp_value) + logging.debug("ts_cell.timestamp_value: '%s', datetime: '%s'", + ts_cell.timestamp_value, dt) + row.append(dt) + elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN'): + logging.debug("ts_cell.boolean_value: '%s'", ts_cell.boolean_value) + row.append(ts_cell.boolean_value) + elif ts_col.type == riak_pb.TsColumnType.Value('SET'): + logging.debug("ts_cell.set_value: '%s'", ts_cell.set_value) + s = [] + for sv in ts_cell.set_value: + sj = bytes_to_str(sv) + s.append(json.loads(sj)) + row.append(s) + elif ts_col.type == riak_pb.TsColumnType.Value('MAP'): + logging.debug("ts_cell.map_value: '%s'", ts_cell.map_value) + mj = bytes_to_str(ts_cell.map_value) + row.append(json.loads(mj)) + else: + row.append(None) + return row def _decode_preflist(self, item): """ diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 5e3e91c8..8a81b26c 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -1,28 +1,8 @@ -""" -Copyright 2015 Basho Technologies, Inc. -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import riak_pb from riak import RiakError from riak.transports.transport import RiakTransport from riak.riak_object import VClock +from riak.ts_object import TsObject from riak.util import decode_index_value, str_to_bytes, bytes_to_str from riak.transports.pbc.connection import RiakPbcConnection from riak.transports.pbc.stream import (RiakPbcKeyStream, @@ -80,8 +60,10 @@ MSG_CODE_DT_FETCH_RESP, MSG_CODE_DT_UPDATE_REQ, MSG_CODE_DT_UPDATE_RESP, - # MSG_CODE_TS_PUT_REQ, - # MSG_CODE_TS_PUT_RESP + MSG_CODE_TS_PUT_REQ, + MSG_CODE_TS_PUT_RESP, + MSG_CODE_TS_QUERY_REQ, + MSG_CODE_TS_QUERY_RESP ) @@ -248,6 +230,17 @@ def ts_put(self, tsobj): elif not robj.key: raise RiakError("missing response object") + def ts_query(self, table, query, interpolations=None): + req = riak_pb.TsQueryReq() + req.query.base = bytes_to_str(query) + + msg_code, ts_query_resp = self._request(MSG_CODE_TS_QUERY_REQ, req, + MSG_CODE_TS_QUERY_RESP) + + tsobj = TsObject(self._client, table, [], []) + self._decode_timeseries(ts_query_resp, tsobj) + return tsobj + def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): req = riak_pb.RpbDelReq() diff --git a/riak/transports/transport.py b/riak/transports/transport.py index d742d9dd..5e2e5f3e 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -98,7 +98,7 @@ def ts_put(self, tsobj): """ raise NotImplementedError - def ts_query(self, query, interpolations=None): + def ts_query(self, table, query, interpolations=None): """ Query timeseries data. """ diff --git a/riak/ts_object.py b/riak/ts_object.py index 2b855f2e..f1e4f028 100644 --- a/riak/ts_object.py +++ b/riak/ts_object.py @@ -1,30 +1,12 @@ -""" -Copyright 2015 Basho Technologies +from riak import RiakError +from riak.table import Table -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - -# TODO RTS-367 -# Should the table parameter be its own object that has a query method on it? -# Like Bucket? class TsObject(object): """ - The TsObject holds meta information about Timeseries data, - plus the data itself. + The TsObject holds information about Timeseries data, plus the data + itself. """ - def __init__(self, client, table, rows, columns=None): + def __init__(self, client, table, rows=[], columns=[]): """ Construct a new TsObject. @@ -32,23 +14,29 @@ def __init__(self, client, table, rows, columns=None): :type client: :class:`RiakClient ` :param table: The table for the timeseries data as a Table object. :type table: :class:`Table` - :param rows: An array of arrays with timeseries data - :type rows: array - :param columns: An array Column names and types. Optional. - :type columns: array + :param rows: An list of lists with timeseries data + :type rows: list + :param columns: An list of Column names and types. Optional. + :type columns: list """ - if table is None or len(table) == 0: - raise ValueError('Table must either be a non-empty string.') + if not isinstance(table, Table): + raise ValueError('table must be an instance of Table.') self.client = client self.table = table - # TODO RTS-367 rows, columns + + self.rows = rows + if not isinstance(self.rows, list): + raise RiakError("TsObject requires a list of rows") + + self.columns = columns + if self.columns is not None and not isinstance(self.columns, list): + raise RiakError("TsObject columns must be a list") def store(self): """ Store the timeseries data in Riak. :rtype: boolean """ - return self.client.ts_put(self) diff --git a/riak/util.py b/riak/util.py index 9a5b5a14..5dc3e61a 100644 --- a/riak/util.py +++ b/riak/util.py @@ -1,21 +1,3 @@ -""" -Copyright 2014 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from __future__ import print_function import warnings from collections import Mapping From 4551e14f57c7877b6f2cba74b654ca7e9d894d65 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 26 Oct 2015 18:02:25 -0700 Subject: [PATCH 029/324] Ensure that RiakClient objects created during tests are closed --- riak/tests/test_client.py | 5 +++++ riak/tests/test_comparison.py | 8 ++++++++ riak/tests/test_kv.py | 10 ++++++++-- riak/tests/test_search.py | 29 +++++++++++++++++++---------- riak/tests/test_security.py | 10 ++++++++++ 5 files changed, 50 insertions(+), 12 deletions(-) diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index a53e7ba6..be105502 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -21,6 +21,7 @@ def test_uses_client_id_if_given(self): zero_client_id = "\0\0\0\0" c = self.create_client(client_id=zero_client_id) self.assertEqual(zero_client_id, c.client_id) + c.close() else: pass @@ -33,6 +34,7 @@ def test_request_retries(self): # If retries are exhausted, the final result should also be an # error. self.assertRaises(IOError, client.ping) + client.close() def test_request_retries_configurable(self): # We guess at some ports that will be unused by Riak or @@ -61,6 +63,7 @@ def _target(): with client.retry_count(5): self.assertEqual(5, client.retries) self.assertRaises(IOError, client.ping) + client.close() def test_timeout_validation(self): bucket = self.client.bucket(self.bucket_name) @@ -147,6 +150,7 @@ def test_multiget_errors(self): self.assertIsInstance(failure[3], StandardError) # noqa else: self.assertIsInstance(failure[3], Exception) + client.close() def test_multiget_notfounds(self): """ @@ -186,6 +190,7 @@ def test_multiget_pool_size(self): self.assertEqual(obj.key, obj.encoded_data) else: self.assertEqual(obj.key, obj.data) + client.close() @unittest.skipIf(SKIP_POOL, 'SKIP_POOL is set') def test_pool_close(self): diff --git a/riak/tests/test_comparison.py b/riak/tests/test_comparison.py index 6f9e6b9c..446bc031 100644 --- a/riak/tests/test_comparison.py +++ b/riak/tests/test_comparison.py @@ -142,6 +142,8 @@ def test_client_eq(self): a = self.create_client(host='host1', http_port=11) b = self.create_client(host='host1', http_port=11) self.assertEqual(a, b) + a.close() + b.close() def test_client_nq(self): self.protocol = 'http' @@ -150,6 +152,9 @@ def test_client_nq(self): c = self.create_client(host='host1', http_port=12) self.assertNotEqual(a, b, 'matched with different hosts') self.assertNotEqual(a, c, 'matched with different ports') + a.close() + b.close() + c.close() def test_client_hash(self): self.protocol = 'http' @@ -158,6 +163,9 @@ def test_client_hash(self): c = self.create_client(host='host2', http_port=11) self.assertEqual(hash(a), hash(b), 'same object has different hashes') self.assertNotEqual(hash(a), hash(c), 'different object has same hash') + a.close() + b.close() + c.close() if __name__ == '__main__': unittest.main() diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 3ba4eedc..75bb69f2 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -363,16 +363,22 @@ def test_set_bucket_properties(self): # Test setting nval... bucket.n_val = 1 - bucket2 = self.create_client().bucket(testrun_props_bucket) + c2 = self.create_client() + bucket2 = c2.bucket(testrun_props_bucket) self.assertTrue(bucket2.allow_mult) self.assertEqual(bucket2.n_val, 1) # Test setting multiple properties... bucket.set_properties({"allow_mult": False, "n_val": 2}) - bucket3 = self.create_client().bucket(testrun_props_bucket) + c3 = self.create_client() + bucket3 = c3.bucket(testrun_props_bucket) self.assertFalse(bucket3.allow_mult) self.assertEqual(bucket3.n_val, 2) + # clean up! + c2.close() + c3.close() + def test_if_none_match(self): bucket = self.client.bucket(self.bucket_name) obj = bucket.get(self.key_name) diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index ebf13730..3050de34 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -38,24 +38,33 @@ def test_bucket_search_enabled(self): def test_enable_search_commit_hook(self): bucket = self.client.bucket(testrun_search_bucket) bucket.clear_properties() - self.assertFalse(self.create_client(). - bucket(testrun_search_bucket). - search_enabled()) + + c = self.create_client() + self.assertFalse(c.bucket(testrun_search_bucket).search_enabled()) + c.close() + bucket.enable_search() - self.assertTrue(self.create_client(). - bucket(testrun_search_bucket). - search_enabled()) + + c = self.create_client() + self.assertTrue(c.bucket(testrun_search_bucket).search_enabled()) + c.close() @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_disable_search_commit_hook(self): bucket = self.client.bucket(testrun_search_bucket) bucket.clear_properties() bucket.enable_search() - self.assertTrue(self.create_client().bucket(testrun_search_bucket) - .search_enabled()) + + c = self.create_client() + self.assertTrue(c.bucket(testrun_search_bucket).search_enabled()) + c.close() + bucket.disable_search() - self.assertFalse(self.create_client().bucket(testrun_search_bucket) - .search_enabled()) + + c = self.create_client() + self.assertFalse(c.bucket(testrun_search_bucket).search_enabled()) + c.close() + bucket.enable_search() diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index f3339e28..0c8a5a49 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -29,6 +29,7 @@ def test_security_disabled(self): key1 = myBucket.new('x', data=val1) with self.assertRaises(Exception): key1.store() + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_basic_connection(self): @@ -47,6 +48,7 @@ def test_security_bad_user(self): client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_bad_password(self): @@ -57,6 +59,7 @@ def test_security_bad_password(self): client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_invalid_cert(self): @@ -67,6 +70,7 @@ def test_security_invalid_cert(self): client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_password_without_cacert(self): @@ -79,6 +83,7 @@ def test_security_password_without_cacert(self): val1 = "foobar" key1 = myBucket.new('x', data=val1) key1.store() + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_cert_authentication(self): @@ -101,6 +106,7 @@ def test_security_cert_authentication(self): with self.assertRaises(Exception): key1.store() myBucket.get('x') + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_revoked_cert(self): @@ -115,6 +121,7 @@ def test_security_revoked_cert(self): client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_bad_ca_cert(self): @@ -124,6 +131,7 @@ def test_security_bad_ca_cert(self): client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_ciphers(self): @@ -136,6 +144,7 @@ def test_security_ciphers(self): key1 = myBucket.new('x', data=val1) key1.store() myBucket.get('x') + client.close() @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') def test_security_bad_ciphers(self): @@ -145,3 +154,4 @@ def test_security_bad_ciphers(self): client = self.create_client(credentials=creds) with self.assertRaises(Exception): client.get_buckets() + client.close() From a4ab50c6c1a0531c0fb4cd52e729a10393177380 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Oct 2015 09:43:46 -0700 Subject: [PATCH 030/324] Test fixes, decoding TS data fixes, ensure that resources are cleaned up --- commands.py | 11 ++++---- riak/bucket.py | 4 ++- riak/client/multiget.py | 4 ++- riak/tests/base.py | 48 +++++++++++++++++++---------------- riak/tests/test_kv.py | 10 +++----- riak/tests/test_search.py | 9 +++---- riak/tests/test_timeseries.py | 33 ++++++++++++++++++------ riak/tests/yz_setup.py | 11 ++++---- riak/transports/pbc/codec.py | 36 ++++++++++++++++++-------- setup.py | 1 + 10 files changed, 100 insertions(+), 67 deletions(-) diff --git a/commands.py b/commands.py index 0a736284..0f678c02 100644 --- a/commands.py +++ b/commands.py @@ -416,9 +416,9 @@ def _update_riak_conf(self): https_host = self.host + ':' + self.https_port pb_host = self.host + ':' + self.pb_port self._backup_file(self.riak_conf) - f = open(self.riak_conf, 'r', buffering=1) - conf = f.read() - f.close() + conf = None + with open(self.riak_conf, 'r', buffering=1) as f: + conf = f.read() conf = re.sub(r'search\s+=\s+off', r'search = on', conf) conf = re.sub(r'##[ ]+ssl\.', r'ssl.', conf) conf = re.sub(r'ssl.certfile\s+=\s+\S+', @@ -447,9 +447,8 @@ def _update_riak_conf(self): # Older versions of OpenSSL client library need to match on the server conf += 'tls_protocols.tlsv1 = on\n' conf += 'tls_protocols.tlsv1.1 = on\n' - f = open(self.riak_conf, 'w', buffering=1) - f.write(conf) - f.close() + with open(self.riak_conf, 'w', buffering=1) as f: + f.write(conf) def _backup_file(self, name): backup = name + ".bak" diff --git a/riak/bucket.py b/riak/bucket.py index 1d56926f..f6dd3863 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -410,7 +410,9 @@ def new_from_file(self, key, filename): :type filename: string :rtype: :class:`RiakObject ` """ - binary_data = open(filename, "rb").read() + binary_data = None + with open(filename, 'rb') as f: + binary_data = f.read() mimetype, encoding = mimetypes.guess_type(filename) if encoding: binary_data = bytearray(binary_data, encoding) diff --git a/riak/client/multiget.py b/riak/client/multiget.py index a8573cc8..20d02801 100644 --- a/riak/client/multiget.py +++ b/riak/client/multiget.py @@ -209,7 +209,9 @@ def multiget(client, keys, **options): client = RiakClient(protocol='pbc') bkeys = [('default', 'multiget', str(key)) for key in range(10000)] - data = open(__file__).read() + data = None + with open(__file__) as f: + data = f.read() print("Benchmarking multiget:") print(" CPUs: {0}".format(cpu_count())) diff --git a/riak/tests/base.py b/riak/tests/base.py index 0fb4317f..8856f923 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -25,25 +25,25 @@ def randname(length=12): out += chr(random.randint(ord('a'), ord('z'))) return out - def create_client(self, host=None, http_port=None, pb_port=None, - protocol=None, credentials=None, - **client_args): - host = host or self.host or HOST - http_port = http_port or self.http_port or HTTP_PORT - pb_port = pb_port or self.pb_port or PB_PORT + @classmethod + def create_client(cls, host=None, http_port=None, pb_port=None, + protocol=None, credentials=None, **client_args): + host = host or HOST + http_port = http_port or HTTP_PORT + pb_port = pb_port or PB_PORT if protocol is None: - if hasattr(self, 'protocol') and (self.protocol is not None): - protocol = self.protocol + if hasattr(cls, 'protocol') and (cls.protocol is not None): + protocol = cls.protocol else: protocol = PROTOCOL - self.protocol = protocol + cls.protocol = protocol credentials = credentials or SECURITY_CREDS - if self.logging_enabled: - self.logger.debug("RiakClient(protocol='%s', host='%s', pb_port='%d', http_port='%d', credentials='%s', client_args='%s')", protocol, host, pb_port, http_port, credentials, client_args) + if hasattr(cls, 'logging_enabled') and cls.logging_enabled: + cls.logger.debug("RiakClient(protocol='%s', host='%s', pb_port='%d', http_port='%d', credentials='%s', client_args='%s')", protocol, host, pb_port, http_port, credentials, client_args) return RiakClient(protocol=protocol, host=host, @@ -51,23 +51,27 @@ def create_client(self, host=None, http_port=None, pb_port=None, credentials=credentials, pb_port=pb_port, **client_args) - def setUp(self): - self.logging_enabled = False + @classmethod + def setUpClass(cls): + cls.logging_enabled = False distutils_debug = os.environ.get('DISTUTILS_DEBUG', '0') if distutils_debug == '1': - self.logging_enabled = True - self.logger = logging.getLogger() - self.logger.level = logging.DEBUG - self.logging_stream_handler = logging.StreamHandler(sys.stdout) - self.logger.addHandler(self.logging_stream_handler) + cls.logging_enabled = True + cls.logger = logging.getLogger() + cls.logger.level = logging.DEBUG + cls.logging_stream_handler = logging.StreamHandler(sys.stdout) + cls.logger.addHandler(cls.logging_stream_handler) + + @classmethod + def tearDownClass(cls): + if hasattr(cls, 'logging_enabled') and cls.logging_enabled: + cls.logger.removeHandler(cls.logging_stream_handler) + cls.logging_enabled = False + def setUp(self): self.bucket_name = self.randname() self.key_name = self.randname() - self.credentials = SECURITY_CREDS self.client = self.create_client() def tearDown(self): self.client.close() - if self.logging_enabled: - self.logger.removeHandler(self.logging_stream_handler) - diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 75bb69f2..592031c5 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -5,9 +5,9 @@ import copy from time import sleep -from riak import ConflictError, RiakClient, RiakBucket, RiakError +from riak import ConflictError, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver -from riak.tests import SKIP_RESOLVE, HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak.tests import SKIP_RESOLVE from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -35,14 +35,12 @@ testrun_props_bucket = 'propsbucket' def setUpModule(): - c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) + c = IntegrationTestBase.create_client() c.bucket(testrun_sibs_bucket).allow_mult = True c.close() def tearDownModule(): - c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) + c = IntegrationTestBase.create_client() c.bucket(testrun_sibs_bucket).clear_properties() c.bucket(testrun_props_bucket).clear_properties() c.close() diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index 3050de34..73c6cd47 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -1,8 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import print_function import platform -from riak import RiakClient -from riak.tests import SKIP_SEARCH, HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak.tests import SKIP_SEARCH from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': @@ -14,16 +13,14 @@ def setUpModule(): if not SKIP_SEARCH and not RUN_YZ: - c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) + c = IntegrationTestBase.create_client() b = c.bucket(testrun_search_bucket) b.enable_search() c.close() def tearDownModule(): if not SKIP_SEARCH and not RUN_YZ: - c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) + c = IntegrationTestBase.create_client() b = c.bucket(testrun_search_bucket) b.clear_properties() c.close() diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 34aa23b9..b39eb5bd 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -9,9 +9,8 @@ from riak.table import Table from riak.ts_object import TsObject from riak.transports.pbc.codec import RiakPbcCodec -from riak import RiakClient from riak.util import str_to_bytes -from riak.tests import SKIP_TIMESERIES, HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak.tests import SKIP_TIMESERIES from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': @@ -179,28 +178,39 @@ def test_decode_data(self): class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): + super(TimeseriesTests, cls).setUpClass() cls.now = datetime.datetime.utcfromtimestamp(144379690) fiveMinsAgo = cls.now - fiveMins tenMinsAgo = fiveMinsAgo - fiveMins fifteenMinsAgo = tenMinsAgo - fiveMins twentyMinsAgo = fifteenMinsAgo - fiveMins - client = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) + client = cls.create_client() table = client.table(table_name) + # CREATE TABLE GeoCheckin ( + # geohash varchar not null, + # user varchar not null, + # time timestamp not null, + # weather varchar not null, + # temperature float, + # PRIMARY KEY((quantum(time, 15, m), user), time, user) + # ) rows = [ - [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', None ], + [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3 ], [ 'hash1', 'user2', fifteenMinsAgo, 'rain', 79.0 ], - [ 'hash1', 'user2', fiveMinsAgo, 'wind', 50.5 ], + [ 'hash1', 'user2', fiveMinsAgo, 'wind', None ], [ 'hash1', 'user2', cls.now, 'snow', 20.1 ] ] ts_obj = table.new(rows) result = ts_obj.store() + if result != True: + raise AssertionError("expected success") + client.close() codec = RiakPbcCodec() cls.nowMsec = codec._unix_time_millis(cls.now) + cls.fiveMinsAgo = fiveMinsAgo cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) - client.close() # TODO RTS-367 ts_query test. Ensure that 'None' comes back, somehow def test_query_that_returns_no_data(self): @@ -210,7 +220,14 @@ def test_query_that_returns_no_data(self): self.assertEqual(len(ts_obj.rows), 0) def test_query_that_matches_some_data(self): - query = "select * from {} where time > {} and time < {} and user = 'user2'".format(table_name, self.tenMinsAgoMsec, self.nowMsec); + query = "select * from {} where time > {} and time < {} and user = 'user2'".format(table_name, self.tenMinsAgoMsec, self.nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) self.assertEqual(len(ts_obj.columns), 5) self.assertEqual(len(ts_obj.rows), 1) + + r0 = ts_obj.rows[0] + self.assertEqual(r0[0], 'hash1') + self.assertEqual(r0[1], 'user2') + self.assertEqual(r0[2], self.fiveMinsAgo) + self.assertEqual(r0[3], 'wind') + self.assertIsNone(r0[4]) diff --git a/riak/tests/yz_setup.py b/riak/tests/yz_setup.py index f7a88efa..80462988 100644 --- a/riak/tests/yz_setup.py +++ b/riak/tests/yz_setup.py @@ -1,12 +1,12 @@ import logging -from riak import RiakClient, RiakError -from riak.tests import RUN_YZ, PROTOCOL, HOST, PB_PORT, HTTP_PORT, SECURITY_CREDS +from riak import RiakError +from riak.tests import RUN_YZ +from riak.tests.base import IntegrationTestBase def yzSetUp(*yzdata): if RUN_YZ: - c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) + c = IntegrationTestBase.create_client() for yz in yzdata: logging.debug("yzSetUp: %s", yz) c.create_search_index(yz['index'], timeout=30000) @@ -27,8 +27,7 @@ def yzSetUp(*yzdata): def yzTearDown(c, *yzdata): if RUN_YZ: - c = RiakClient(protocol=PROTOCOL, host=HOST, http_port=HTTP_PORT, - pb_port=PB_PORT, credentials=SECURITY_CREDS) + c = IntegrationTestBase.create_client() for yz in yzdata: logging.debug("yzTearDown: %s", yz) if yz['btype'] is not None: diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index d1d7a3af..89fd69de 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -76,7 +76,12 @@ def __init__(self, **unused_args): super(RiakPbcCodec, self).__init__(**unused_args) def _unix_time_millis(self, dt): - return int((dt - epoch).total_seconds() * 1000.0) + td = dt - epoch + try: + return int(dt.total_seconds() * 1000.0) + except AttributeError: + # NB: python 2.6 must use this method + return int(((td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10**6) * 1000.0) def _datetime_from_unix_time_millis(self, ut): return datetime.datetime.utcfromtimestamp(ut / 1000.0) @@ -710,21 +715,30 @@ def _decode_timeseries_row(self, ts_row, ts_columns): row = [] for i, ts_cell in enumerate(ts_row.cells): ts_col = ts_columns[i] - if ts_col.type == riak_pb.TsColumnType.Value('BINARY'): + logging.debug("ts_cell: '%s', ts_col: '%d'", ts_cell, ts_col.type) + if ts_col.type == riak_pb.TsColumnType.Value('BINARY') and ts_cell.HasField('binary_value'): logging.debug("ts_cell.binary_value: '%s'", ts_cell.binary_value) row.append(ts_cell.binary_value) - elif ts_col.type == riak_pb.TsColumnType.Value('INTEGER'): + elif ts_col.type == riak_pb.TsColumnType.Value('INTEGER') and ts_cell.HasField('integer_value'): logging.debug("ts_cell.integer_value: '%s'", ts_cell.integer_value) row.append(ts_cell.integer_value) - elif ts_col.type == riak_pb.TsColumnType.Value('FLOAT'): - logging.debug("ts_cell.double_value: '%s'", ts_cell.double_value) - row.append(ts_cell.double_value) + elif ts_col.type == riak_pb.TsColumnType.Value('FLOAT') and ts_cell.HasField('double_value'): + value = None + if ts_cell.HasField('double_value'): + value = ts_cell.double_value + elif ts_cell.HasField('float_value'): + value = ts_cell.float_value + logging.debug("ts_cell double/float value: '%d'", value) + row.append(value) elif ts_col.type == riak_pb.TsColumnType.Value('TIMESTAMP'): - dt = self._datetime_from_unix_time_millis(ts_cell.timestamp_value) - logging.debug("ts_cell.timestamp_value: '%s', datetime: '%s'", - ts_cell.timestamp_value, dt) + dt = None + if ts_cell.HasField('timestamp_value'): + dt = self._datetime_from_unix_time_millis(ts_cell.timestamp_value) + elif ts_cell.HasField('integer_value'): + dt = self._datetime_from_unix_time_millis(ts_cell.integer_value) + logging.debug("ts_cell datetime: '%s'", dt) row.append(dt) - elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN'): + elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN') and ts_cell.HasField('boolean_value'): logging.debug("ts_cell.boolean_value: '%s'", ts_cell.boolean_value) row.append(ts_cell.boolean_value) elif ts_col.type == riak_pb.TsColumnType.Value('SET'): @@ -734,7 +748,7 @@ def _decode_timeseries_row(self, ts_row, ts_columns): sj = bytes_to_str(sv) s.append(json.loads(sj)) row.append(s) - elif ts_col.type == riak_pb.TsColumnType.Value('MAP'): + elif ts_col.type == riak_pb.TsColumnType.Value('MAP') and ts_cell.HasField('map_value'): logging.debug("ts_cell.map_value: '%s'", ts_cell.map_value) mj = bytes_to_str(ts_cell.map_value) row.append(json.loads(mj)) diff --git a/setup.py b/setup.py index 3e2b84d1..d5ba1bd9 100755 --- a/setup.py +++ b/setup.py @@ -1,6 +1,7 @@ #!/usr/bin/env python import os import sys +from multiprocessing import util from setuptools import setup, find_packages from version import get_version from commands import preconfigure, configure, create_bucket_types, \ From 867ff6310e6422082ae8cd7460c143840e823f94 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Oct 2015 09:47:21 -0700 Subject: [PATCH 031/324] Remove two TODOs --- riak/tests/test_timeseries.py | 1 - riak/transports/http/__init__.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index b39eb5bd..995eeafa 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -212,7 +212,6 @@ def setUpClass(cls): cls.fiveMinsAgo = fiveMinsAgo cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) - # TODO RTS-367 ts_query test. Ensure that 'None' comes back, somehow def test_query_that_returns_no_data(self): query = "select * from {} where time > 0 and time < 10 and user = 'user1'".format(table_name) ts_obj = self.client.ts_query('GeoCheckin', query) diff --git a/riak/transports/http/__init__.py b/riak/transports/http/__init__.py index c4a19a96..4acb06fd 100644 --- a/riak/transports/http/__init__.py +++ b/riak/transports/http/__init__.py @@ -87,7 +87,7 @@ def __init__(self, :type timeout: int """ if PY2: - # TODO LRB RTS-367 it appears that pkey_file / cert_file are never set + # NB: it appears that pkey_file / cert_file are never set # in riak/transports/http/connection.py#_connect() method pkf = pkey_file if pkf is None and credentials is not None: From c268e6aa41e489ba6085c94b3cca4b36b2387838 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Oct 2015 15:51:33 -0700 Subject: [PATCH 032/324] Add TsGetReq / TsGetResp support and tests --- docs/client.rst | 1 + riak/client/operations.py | 21 +++++++ riak/table.py | 12 ++++ riak/tests/test_timeseries.py | 53 ++++++++++++---- riak/transports/pbc/codec.py | 102 ++++++++++++++++++------------- riak/transports/pbc/transport.py | 18 +++++- 6 files changed, 149 insertions(+), 58 deletions(-) diff --git a/docs/client.rst b/docs/client.rst index a2a3b135..e932d206 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -127,6 +127,7 @@ Key-level Operations Timeseries Operations -------------------- +.. automethod:: RiakClient.ts_get .. automethod:: RiakClient.ts_put .. automethod:: RiakClient.ts_query diff --git a/riak/client/operations.py b/riak/client/operations.py index f1881fcf..573e41c8 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -536,6 +536,27 @@ def put(self, transport, robj, w=None, dw=None, pw=None, return_body=None, if_none_match=if_none_match, timeout=timeout) + @retryable + def ts_get(self, transport, table, key): + """ + ts_get(table, key) + + Retrieve timeseries value by key + + .. note:: This request is automatically retried :attr:`retries` + times if it fails due to network error. + + :param table: The timeseries table. + :type table: string or :class:`Table ` + :param key: The timeseries value's key. + :type key: list or dict + :rtype: :class:`TsObject ` + """ + t = table + if isinstance(t, str): + t = Table(self, table) + return transport.ts_get(t, key) + @retryable def ts_put(self, transport, tsobj): """ diff --git a/riak/table.py b/riak/table.py index 10da507c..c6f47f12 100644 --- a/riak/table.py +++ b/riak/table.py @@ -48,6 +48,18 @@ def new(self, rows, columns=None): return TsObject(self._client, self, rows, columns) + def get(self, table, key): + """ + Gets a value from a timeseries table. + + :param table: The timeseries table. + :type table: string or :class:`Table ` + :param key: The timeseries value's key. + :type key: list or dict + :rtype: :class:`TsObject ` + """ + return self.client.ts_get(self, table, key) + def query(self, query, interpolations=None): """ Queries a timeseries table. diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 995eeafa..a2e6fc87 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -9,7 +9,7 @@ from riak.table import Table from riak.ts_object import TsObject from riak.transports.pbc.codec import RiakPbcCodec -from riak.util import str_to_bytes +from riak.util import str_to_bytes, bytes_to_str from riak.tests import SKIP_TIMESERIES from riak.tests.base import IntegrationTestBase @@ -48,12 +48,26 @@ def setUp(self): ] self.table = Table(None, 'test-table') - def test_encode_data(self): + def test_encode_data_for_get(self): + key = { + 'user' : 'user2', + 'time' : ts0 + } + ts_get_req = riak_pb.TsGetReq() + self.c._encode_timeseries_get(self.table, key, ts_get_req) + + self.assertEqual(self.table.name, bytes_to_str(ts_get_req.table)) + self.assertEqual(len(key.values()), len(ts_get_req.key)) + self.assertEqual('user2', bytes_to_str(ts_get_req.key[0].binary_value)) + self.assertEqual(self.ts0ms, ts_get_req.key[1].timestamp_value) + + def test_encode_data_for_put(self): tsobj = TsObject(None, self.table, self.rows, None) ts_put_req = riak_pb.TsPutReq() - self.c._encode_timeseries(tsobj, ts_put_req) + self.c._encode_timeseries_put(tsobj, ts_put_req) # NB: expected, actual + self.assertEqual(self.table.name, bytes_to_str(ts_put_req.table)) self.assertEqual(len(self.rows), len(ts_put_req.rows)) r0 = ts_put_req.rows[0] @@ -74,7 +88,7 @@ def test_encode_data(self): self.assertEqual(r1.cells[5].set_value, sj) self.assertEqual(r1.cells[6].map_value, mj) - def test_decode_data(self): + def test_decode_data_from_query(self): tqr = riak_pb.TsQueryResp() c0 = tqr.columns.add() @@ -212,6 +226,16 @@ def setUpClass(cls): cls.fiveMinsAgo = fiveMinsAgo cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) + def validate_data(self, ts_obj): + self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.rows), 1) + row = ts_obj.rows[0] + self.assertEqual(row[0], 'hash1') + self.assertEqual(row[1], 'user2') + self.assertEqual(row[2], self.fiveMinsAgo) + self.assertEqual(row[3], 'wind') + self.assertIsNone(row[4]) + def test_query_that_returns_no_data(self): query = "select * from {} where time > 0 and time < 10 and user = 'user1'".format(table_name) ts_obj = self.client.ts_query('GeoCheckin', query) @@ -221,12 +245,17 @@ def test_query_that_returns_no_data(self): def test_query_that_matches_some_data(self): query = "select * from {} where time > {} and time < {} and user = 'user2'".format(table_name, self.tenMinsAgoMsec, self.nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) - self.assertEqual(len(ts_obj.columns), 5) - self.assertEqual(len(ts_obj.rows), 1) + self.validate_data(ts_obj) + + def test_get_single_value_using_dict(self): + key = { + 'user' : 'user2', + 'time' : self.fiveMinsAgo + } + ts_obj = self.client.ts_get('GeoCheckin', key) + self.validate_data(ts_obj) - r0 = ts_obj.rows[0] - self.assertEqual(r0[0], 'hash1') - self.assertEqual(r0[1], 'user2') - self.assertEqual(r0[2], self.fiveMinsAgo) - self.assertEqual(r0[3], 'wind') - self.assertIsNone(r0[4]) + def test_get_single_value_using_array(self): + key = [ self.fiveMinsAgo, 'user2' ] + ts_obj = self.client.ts_get('GeoCheckin', key) + self.validate_data(ts_obj) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 89fd69de..41f03cbb 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -623,7 +623,55 @@ def _encode_map_update(self, dtype, msg, op): else: msg.flag_op = riak_pb.MapUpdate.DISABLE - def _encode_timeseries(self, tsobj, ts_put_req): + def _encode_to_ts_cell(self, cell, ts_cell): + if cell is not None: + if isinstance(cell, bytes) or isinstance(cell, bytearray): + logging.debug("cell -> binary_value: '%s'", cell) + ts_cell.binary_value = cell + elif isinstance(cell, datetime.datetime): + ts_cell.timestamp_value = self._unix_time_millis(cell) + logging.debug("cell -> timestamp: '%s', timestamp_value '%d'", + cell, ts_cell.timestamp_value) + elif isinstance(cell, list): + for c in cell: + j = json.dumps(c) + logging.debug("cell -> set_value: '%s'", j) + ts_cell.set_value.append(str_to_bytes(j)) + elif isinstance(cell, bool): + logging.debug("cell -> boolean: '%s'", cell) + ts_cell.boolean_value = cell + elif isinstance(cell, str): + logging.debug("cell -> str: '%s'", cell) + ts_cell.binary_value = str_to_bytes(cell) + elif isinstance(cell, int) or isinstance(cell, long): + logging.debug("cell -> int/long: '%s'", cell) + ts_cell.integer_value = cell + elif isinstance(cell, float): + logging.debug("cell -> float: '%s'", cell) + ts_cell.double_value = cell + elif isinstance(cell, dict): + logging.debug("cell -> dict: '%s'", cell) + j = json.dumps(cell) + ts_cell.map_value = str_to_bytes(j) + else: + t = type(cell) + raise RiakError("can't serialize type '{}', value '{}'".format(t, cell)) + + def _encode_timeseries_get(self, table, key, req): + key_vals = None + if isinstance(key, list): + key_vals = key + elif isinstance(key, dict): + key_vals = key.values() + else: + raise ValueError("key must be a list or dict") + + req.table = str_to_bytes(table.name) + for cell in key_vals: + ts_cell = req.key.add() + self._encode_to_ts_cell(cell, ts_cell) + + def _encode_timeseries_put(self, tsobj, ts_put_req): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. @@ -634,73 +682,41 @@ def _encode_timeseries(self, tsobj, ts_put_req): :type ts_put_req: riak_pb.TsPutReq """ ts_put_req.table = str_to_bytes(tsobj.table.name) + if tsobj.columns: raise NotImplementedError("columns are not implemented yet") + if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: tsr = ts_put_req.rows.add() # NB: type riak_pb.TsRow if not isinstance(row, list): - raise RiakError("TsObject row must be a list of values") + raise ValueError("TsObject row must be a list of values") for cell in row: tsc = tsr.cells.add() # NB: type riak_pb.TsCell - if cell is not None: - if isinstance(cell, bytes) or isinstance(cell, bytearray): - logging.debug("cell -> binary_value: '%s'", cell) - tsc.binary_value = cell - elif isinstance(cell, datetime.datetime): - tsc.timestamp_value = self._unix_time_millis(cell) - logging.debug("cell -> timestamp: '%s', timestamp_value '%d'", - cell, tsc.timestamp_value) - elif isinstance(cell, list): - for c in cell: - j = json.dumps(c) - logging.debug("cell -> set_value: '%s'", j) - tsc.set_value.append(str_to_bytes(j)) - elif isinstance(cell, bool): - logging.debug("cell -> boolean: '%s'", cell) - tsc.boolean_value = cell - elif isinstance(cell, str): - logging.debug("cell -> str: '%s'", cell) - tsc.binary_value = str_to_bytes(cell) - elif isinstance(cell, int) or isinstance(cell, long): - logging.debug("cell -> int/long: '%s'", cell) - tsc.integer_value = cell - elif isinstance(cell, float): - logging.debug("cell -> float: '%s'", cell) - tsc.double_value = cell - elif isinstance(cell, dict): - logging.debug("cell -> dict: '%s'", cell) - j = json.dumps(cell) - tsc.map_value = str_to_bytes(j) - else: - t = type(cell) - raise RiakError("can't serialize type '{}', value '{}'".format(t, cell)) + self._encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") - def _decode_timeseries(self, ts_query_rsp, tsobj): + def _decode_timeseries(self, ts_rsp, tsobj): """ Fills an TsObject with the appropriate data and metadata from a TsQueryResp. - :param ts_query_rsp: the protobuf message from which to process data - :type ts_query_rsp: riak_pb.TsQueryRsp + :param ts_rsp: the protobuf message from which to process data + :type ts_rsp: riak_pb.TsQueryRsp or riak_pb.TsGetResp :param tsobj: a TsObject :type tsobj: TsObject """ - if not isinstance(ts_query_rsp, riak_pb.TsQueryResp): - raise RiakError("expected riak_pb.TsQueryResp") - if tsobj.columns is not None: - for ts_col in ts_query_rsp.columns: + for ts_col in ts_rsp.columns: col_name = bytes_to_str(ts_col.name) col_type = ts_col.type col = (col_name, col_type) logging.debug("column: '%s'", col) tsobj.columns.append(col) - for ts_row in ts_query_rsp.rows: - tsobj.rows.append(self._decode_timeseries_row(ts_row, ts_query_rsp.columns)) + for ts_row in ts_rsp.rows: + tsobj.rows.append(self._decode_timeseries_row(ts_row, ts_rsp.columns)) def _decode_timeseries_row(self, ts_row, ts_columns): """ diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 8a81b26c..6bbc0105 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -63,7 +63,9 @@ MSG_CODE_TS_PUT_REQ, MSG_CODE_TS_PUT_RESP, MSG_CODE_TS_QUERY_REQ, - MSG_CODE_TS_QUERY_RESP + MSG_CODE_TS_QUERY_RESP, + MSG_CODE_TS_GET_REQ, + MSG_CODE_TS_GET_RESP ) @@ -217,10 +219,20 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, return robj + def ts_get(self, table, key): + req = riak_pb.TsGetReq() + self._encode_timeseries_get(table, key, req) + + msg_code, ts_get_resp = self._request(MSG_CODE_TS_GET_REQ, req, + MSG_CODE_TS_GET_RESP) + + tsobj = TsObject(self._client, table, [], None) + self._decode_timeseries(ts_get_resp, tsobj) + return tsobj + def ts_put(self, tsobj): req = riak_pb.TsPutReq() - - self._encode_timeseries(tsobj, req) + self._encode_timeseries_put(tsobj, req) msg_code, resp = self._request(MSG_CODE_TS_PUT_REQ, req, MSG_CODE_TS_PUT_RESP) From aece9f32f796878c233200cdff0efa2e541276ef Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Oct 2015 16:09:27 -0700 Subject: [PATCH 033/324] Add TsDelReq / TsDelResp support and tests --- docs/client.rst | 1 + riak/client/operations.py | 21 ++++++++++++++++++ riak/table.py | 12 ++++++++++ riak/tests/test_timeseries.py | 38 +++++++++++++++++++++++--------- riak/transports/pbc/codec.py | 2 +- riak/transports/pbc/transport.py | 22 ++++++++++++++---- 6 files changed, 81 insertions(+), 15 deletions(-) diff --git a/docs/client.rst b/docs/client.rst index e932d206..ae2f8e54 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -129,6 +129,7 @@ Timeseries Operations .. automethod:: RiakClient.ts_get .. automethod:: RiakClient.ts_put +.. automethod:: RiakClient.ts_delete .. automethod:: RiakClient.ts_query ---------------- diff --git a/riak/client/operations.py b/riak/client/operations.py index 573e41c8..4f2d20a7 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -573,6 +573,27 @@ def ts_put(self, transport, tsobj): """ return transport.ts_put(tsobj) + @retryable + def ts_delete(self, transport, table, key): + """ + ts_delete(table, key) + + Delete timeseries value by key + + .. note:: This request is automatically retried :attr:`retries` + times if it fails due to network error. + + :param table: The timeseries table. + :type table: string or :class:`Table ` + :param key: The timeseries value's key. + :type key: list or dict + :rtype: boolean + """ + t = table + if isinstance(t, str): + t = Table(self, table) + return transport.ts_delete(t, key) + @retryable def ts_query(self, transport, table, query, interpolations=None): """ diff --git a/riak/table.py b/riak/table.py index c6f47f12..6635cd0a 100644 --- a/riak/table.py +++ b/riak/table.py @@ -60,6 +60,18 @@ def get(self, table, key): """ return self.client.ts_get(self, table, key) + def delete(self, table, key): + """ + Deletes a value from a timeseries table. + + :param table: The timeseries table. + :type table: string or :class:`Table ` + :param key: The timeseries value's key. + :type key: list or dict + :rtype: boolean + """ + return self.client.ts_delete(self, table, key) + def query(self, query, interpolations=None): """ Queries a timeseries table. diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index a2e6fc87..971d2355 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -46,20 +46,27 @@ def setUp(self): [ bd0, 0, 1.2, ts0, True, s, m ], [ bd1, 3, 4.5, ts1, False, s, m ] ] - self.table = Table(None, 'test-table') - - def test_encode_data_for_get(self): - key = { + self.test_key = { 'user' : 'user2', 'time' : ts0 } - ts_get_req = riak_pb.TsGetReq() - self.c._encode_timeseries_get(self.table, key, ts_get_req) + self.table = Table(None, 'test-table') + + def validate_keyreq(self, req): + self.assertEqual(self.table.name, bytes_to_str(req.table)) + self.assertEqual(len(self.test_key.values()), len(req.key)) + self.assertEqual('user2', bytes_to_str(req.key[0].binary_value)) + self.assertEqual(self.ts0ms, req.key[1].timestamp_value) - self.assertEqual(self.table.name, bytes_to_str(ts_get_req.table)) - self.assertEqual(len(key.values()), len(ts_get_req.key)) - self.assertEqual('user2', bytes_to_str(ts_get_req.key[0].binary_value)) - self.assertEqual(self.ts0ms, ts_get_req.key[1].timestamp_value) + def test_encode_data_for_get(self): + req = riak_pb.TsGetReq() + self.c._encode_timeseries_keyreq(self.table, self.test_key, req) + self.validate_keyreq(req) + + def test_encode_data_for_delete(self): + req = riak_pb.TsDelReq() + self.c._encode_timeseries_keyreq(self.table, self.test_key, req) + self.validate_keyreq(req) def test_encode_data_for_put(self): tsobj = TsObject(None, self.table, self.rows, None) @@ -224,6 +231,7 @@ def setUpClass(cls): codec = RiakPbcCodec() cls.nowMsec = codec._unix_time_millis(cls.now) cls.fiveMinsAgo = fiveMinsAgo + cls.twentyMinsAgo = twentyMinsAgo cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) def validate_data(self, ts_obj): @@ -259,3 +267,13 @@ def test_get_single_value_using_array(self): key = [ self.fiveMinsAgo, 'user2' ] ts_obj = self.client.ts_get('GeoCheckin', key) self.validate_data(ts_obj) + + def test_delete_single_value_using_dict(self): + key = { + 'user' : 'user2', + 'time' : self.twentyMinsAgo + } + rslt = self.client.ts_delete('GeoCheckin', key) + self.assertTrue(rslt) + ts_obj = self.client.ts_get('GeoCheckin', key) + self.assertIsNone(ts_obj) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 41f03cbb..be85719e 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -657,7 +657,7 @@ def _encode_to_ts_cell(self, cell, ts_cell): t = type(cell) raise RiakError("can't serialize type '{}', value '{}'".format(t, cell)) - def _encode_timeseries_get(self, table, key, req): + def _encode_timeseries_keyreq(self, table, key, req): key_vals = None if isinstance(key, list): key_vals = key diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 6bbc0105..4d642fce 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -65,7 +65,9 @@ MSG_CODE_TS_QUERY_REQ, MSG_CODE_TS_QUERY_RESP, MSG_CODE_TS_GET_REQ, - MSG_CODE_TS_GET_RESP + MSG_CODE_TS_GET_RESP, + MSG_CODE_TS_DEL_REQ, + MSG_CODE_TS_DEL_RESP ) @@ -221,10 +223,10 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, def ts_get(self, table, key): req = riak_pb.TsGetReq() - self._encode_timeseries_get(table, key, req) + self._encode_timeseries_keyreq(table, key, req) msg_code, ts_get_resp = self._request(MSG_CODE_TS_GET_REQ, req, - MSG_CODE_TS_GET_RESP) + MSG_CODE_TS_GET_RESP) tsobj = TsObject(self._client, table, [], None) self._decode_timeseries(ts_get_resp, tsobj) @@ -239,7 +241,19 @@ def ts_put(self, tsobj): if resp is not None: return True - elif not robj.key: + else: + raise RiakError("missing response object") + + def ts_delete(self, table, key): + req = riak_pb.TsDelReq() + self._encode_timeseries_keyreq(table, key, req) + + msg_code, ts_del_resp = self._request(MSG_CODE_TS_DEL_REQ, req, + MSG_CODE_TS_DEL_RESP) + + if ts_del_resp is not None: + return True + else: raise RiakError("missing response object") def ts_query(self, table, query, interpolations=None): From 429b5fdfb756d65253dce5ca20faa65a63440eb6 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Oct 2015 16:38:53 -0700 Subject: [PATCH 034/324] Remove support for sets and maps in TS data --- riak/tests/test_timeseries.py | 42 ++--------------------------------- riak/transports/pbc/codec.py | 21 ------------------ 2 files changed, 2 insertions(+), 61 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 971d2355..dbc1674d 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -27,24 +27,14 @@ ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) ts1 = ts0 + fiveMins -s = [ 'foo', 'bar', 'baz' ] -m = { - 'foo': 'foo', - 'bar': 'bar', - 'baz': 'baz', - 'set': s -} -sj = ['"foo"', '"bar"', '"baz"'] -mj = '{"baz": "baz", "set": ["foo", "bar", "baz"], "foo": "foo", "bar": "bar"}' - class TimeseriesUnitTests(unittest.TestCase): def setUp(self): self.c = RiakPbcCodec() self.ts0ms = self.c._unix_time_millis(ts0) self.ts1ms = self.c._unix_time_millis(ts1) self.rows = [ - [ bd0, 0, 1.2, ts0, True, s, m ], - [ bd1, 3, 4.5, ts1, False, s, m ] + [ bd0, 0, 1.2, ts0, True ], + [ bd1, 3, 4.5, ts1, False ] ] self.test_key = { 'user' : 'user2', @@ -83,8 +73,6 @@ def test_encode_data_for_put(self): self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) - self.assertEqual(r0.cells[5].set_value, sj) - self.assertEqual(r0.cells[6].map_value, mj) r1 = ts_put_req.rows[1] self.assertEqual(r1.cells[0].binary_value, self.rows[1][0]) @@ -92,8 +80,6 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) - self.assertEqual(r1.cells[5].set_value, sj) - self.assertEqual(r1.cells[6].map_value, mj) def test_decode_data_from_query(self): tqr = riak_pb.TsQueryResp() @@ -113,12 +99,6 @@ def test_decode_data_from_query(self): c4 = tqr.columns.add() c4.name = str_to_bytes('col_boolean') c4.type = riak_pb.TsColumnType.Value('BOOLEAN') - c5 = tqr.columns.add() - c5.name = str_to_bytes('col_set') - c5.type = riak_pb.TsColumnType.Value('SET') - c6 = tqr.columns.add() - c6.name = str_to_bytes('col_map') - c6.type = riak_pb.TsColumnType.Value('MAP') r0 = tqr.rows.add() r0c0 = r0.cells.add() @@ -131,11 +111,6 @@ def test_decode_data_from_query(self): r0c3.timestamp_value = self.ts0ms r0c4 = r0.cells.add() r0c4.boolean_value = self.rows[0][4] - r0c5 = r0.cells.add() - for j in sj: - r0c5.set_value.append(j) - r0c6 = r0.cells.add() - r0c6.map_value = str_to_bytes(mj) r1 = tqr.rows.add() r1c0 = r1.cells.add() @@ -148,11 +123,6 @@ def test_decode_data_from_query(self): r1c3.timestamp_value = self.ts1ms r1c4 = r1.cells.add() r1c4.boolean_value = self.rows[1][4] - r1c5 = r1.cells.add() - for j in sj: - r1c5.set_value.append(j) - r1c6 = r1.cells.add() - r1c6.map_value = str_to_bytes(mj) tsobj = TsObject(None, self.table, [], []) c = RiakPbcCodec() @@ -172,10 +142,6 @@ def test_decode_data_from_query(self): self.assertEqual(c[3][1], riak_pb.TsColumnType.Value('TIMESTAMP')) self.assertEqual(c[4][0], 'col_boolean') self.assertEqual(c[4][1], riak_pb.TsColumnType.Value('BOOLEAN')) - self.assertEqual(c[5][0], 'col_set') - self.assertEqual(c[5][1], riak_pb.TsColumnType.Value('SET')) - self.assertEqual(c[6][0], 'col_map') - self.assertEqual(c[6][1], riak_pb.TsColumnType.Value('MAP')) r0 = tsobj.rows[0] self.assertEqual(r0[0], self.rows[0][0]) @@ -183,8 +149,6 @@ def test_decode_data_from_query(self): self.assertEqual(r0[2], self.rows[0][2]) self.assertEqual(r0[3], ts0) self.assertEqual(r0[4], self.rows[0][4]) - self.assertEqual(r0[5], s) - self.assertEqual(r0[6], m) r1 = tsobj.rows[1] self.assertEqual(r1[0], self.rows[1][0]) @@ -192,8 +156,6 @@ def test_decode_data_from_query(self): self.assertEqual(r1[2], self.rows[1][2]) self.assertEqual(r1[3], ts1) self.assertEqual(r1[4], self.rows[1][4]) - self.assertEqual(r1[5], s) - self.assertEqual(r1[6], m) @unittest.skipIf(SKIP_TIMESERIES == 1, "skip requested for timeseries tests") class TimeseriesTests(IntegrationTestBase, unittest.TestCase): diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index be85719e..f2331f31 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,5 +1,4 @@ import datetime -import json import logging import riak_pb @@ -632,11 +631,6 @@ def _encode_to_ts_cell(self, cell, ts_cell): ts_cell.timestamp_value = self._unix_time_millis(cell) logging.debug("cell -> timestamp: '%s', timestamp_value '%d'", cell, ts_cell.timestamp_value) - elif isinstance(cell, list): - for c in cell: - j = json.dumps(c) - logging.debug("cell -> set_value: '%s'", j) - ts_cell.set_value.append(str_to_bytes(j)) elif isinstance(cell, bool): logging.debug("cell -> boolean: '%s'", cell) ts_cell.boolean_value = cell @@ -649,10 +643,6 @@ def _encode_to_ts_cell(self, cell, ts_cell): elif isinstance(cell, float): logging.debug("cell -> float: '%s'", cell) ts_cell.double_value = cell - elif isinstance(cell, dict): - logging.debug("cell -> dict: '%s'", cell) - j = json.dumps(cell) - ts_cell.map_value = str_to_bytes(j) else: t = type(cell) raise RiakError("can't serialize type '{}', value '{}'".format(t, cell)) @@ -757,17 +747,6 @@ def _decode_timeseries_row(self, ts_row, ts_columns): elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN') and ts_cell.HasField('boolean_value'): logging.debug("ts_cell.boolean_value: '%s'", ts_cell.boolean_value) row.append(ts_cell.boolean_value) - elif ts_col.type == riak_pb.TsColumnType.Value('SET'): - logging.debug("ts_cell.set_value: '%s'", ts_cell.set_value) - s = [] - for sv in ts_cell.set_value: - sj = bytes_to_str(sv) - s.append(json.loads(sj)) - row.append(s) - elif ts_col.type == riak_pb.TsColumnType.Value('MAP') and ts_cell.HasField('map_value'): - logging.debug("ts_cell.map_value: '%s'", ts_cell.map_value) - mj = bytes_to_str(ts_cell.map_value) - row.append(json.loads(mj)) else: row.append(None) return row From 9cba4512c76d962d4fdbf2983e52a2daa569e9ef Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Wed, 28 Oct 2015 19:15:23 +0000 Subject: [PATCH 035/324] - Rename all SKIP_ variables to RUN_ to be consistent - Clean up all PEP8 and pyflakes warnings --- README.rst | 24 ++++++++++++----- buildbot/Makefile | 8 +++--- commands.py | 21 ++++++++++++--- riak/table.py | 4 ++- riak/tests/__init__.py | 18 +++++++------ riak/tests/base.py | 11 +++++++- riak/tests/test_2i.py | 36 ++++++++++++------------- riak/tests/test_btypes.py | 12 ++------- riak/tests/test_client.py | 5 ++-- riak/tests/test_datatypes.py | 23 ++++------------ riak/tests/test_kv.py | 10 +++---- riak/tests/test_mapreduce.py | 8 ++++-- riak/tests/test_pool.py | 4 +-- riak/tests/test_search.py | 25 +++++++---------- riak/tests/test_security.py | 37 ++++++++++++------------- riak/tests/test_timeseries.py | 44 ++++++++++++++++-------------- riak/tests/test_yokozuna.py | 23 +++++++--------- riak/tests/yz_setup.py | 2 ++ riak/transports/http/__init__.py | 1 - riak/transports/pbc/codec.py | 46 +++++++++++++++++++++----------- riak/ts_object.py | 1 + setup.py | 1 - 22 files changed, 197 insertions(+), 167 deletions(-) diff --git a/README.rst b/README.rst index 94b429b6..b2cb0b65 100644 --- a/README.rst +++ b/README.rst @@ -139,6 +139,8 @@ If your Riak server isn't running on localhost or you have built a Riak devrel from source, use the environment variables ``RIAK_TEST_HOST``, ``RIAK_TEST_HTTP_PORT`` and ``RIAK_TEST_PB_PORT`` to specify where to find the Riak server. +``RIAK_TEST_PROTOCOL`` to specify which protocol to test. Can be +either ``pbc`` or ``http``. Some of the connection tests need port numbers that are NOT in use. If ports 1023 and 1022 are in use on your test system, set the @@ -150,7 +152,7 @@ Testing Search If you don't have `Riak Search `_ enabled, you -can set the ``SKIP_SEARCH`` environment variable to 1 skip those +can set the ``RUN_SEARCH`` environment variable to 0 skip those tests. If you don't have `Search 2.0 `_ @@ -176,10 +178,18 @@ You may alternately add these lines to `setup.cfg` [create_bucket_types] riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin -To skip the bucket-type tests, set the ``SKIP_BTYPES`` environment -variable to ``1``. +To skip the bucket-type tests, set the ``RUN_BTYPES`` environment +variable to ``0``. + +Testing Data Types (Riak 2+) +---------------------------- + +To test data types, you must set up bucket types (see above.) -Testing Timeseries (Riak 2+) +To skip the data type tests, set the ``RUN_DATATYPES`` environment +variable to ``0``. + +Testing Timeseries (Riak 2.1+) ------------------------------ To test timeseries data, you must run the ``setup_timeseries`` command, @@ -198,15 +208,15 @@ You may alternately add these lines to `setup.cfg` [setup_timeseries] riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin -To enable the timeseries tests, set the ``SKIP_TIMESERIES`` environment -variable to ``0``. +To enable the timeseries tests, set the ``RUN_TIMESERIES`` environment +variable to ``1``. Testing Secondary Indexes ------------------------- To test `Secondary Indexes `_, -the ``SKIP_INDEX`` environment variable must be set to 0 (or 1 to skip them.) +the ``RUN_INDEXES`` environment variable must be set to 1 (or 0 to skip them.) Testing Security (Riak 2+) -------------------------- diff --git a/buildbot/Makefile b/buildbot/Makefile index 1209ac84..890e3984 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -26,14 +26,14 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @../setup.py disable_security --riak-admin=${RIAK_ADMIN} - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=${RIAK_ADMIN} - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. # These are required to actually build all the Python versions: # * pip install tox diff --git a/commands.py b/commands.py index 0f678c02..cd178e8b 100644 --- a/commands.py +++ b/commands.py @@ -74,6 +74,7 @@ def check_output(*popenargs, **kwargs): except ImportError: import json + class bucket_type_commands: def initialize_options(self): self.riak_admin = None @@ -143,6 +144,7 @@ def _btype_command(self, *args): cmd.extend(args) return cmd + class create_bucket_types(bucket_type_commands, Command): """ Creates bucket-types appropriate for testing. By default this will create: @@ -175,9 +177,14 @@ class create_bucket_types(bucket_type_commands, Command): class setup_timeseries(bucket_type_commands, Command): """ - Creates bucket-types appropriate for timeseries. By default this will create: - - * `GeoCheckin` with ``{"props": {"n_val": 3, "table_def": "CREATE TABLE GeoCheckin (geohash varchar not null, user varchar not null, time timestamp not null, weather varchar not null, temperature float, PRIMARY KEY((quantum(time, 15, m),user), time, user))"}}`` + Creates bucket-types appropriate for timeseries. + By default this will create: + + * `GeoCheckin` with ``{"props": {"n_val": 3, + "table_def": "CREATE TABLE GeoCheckin (geohash varchar not null, + user varchar not null, time timestamp not null, + weather varchar not null, temperature float, + PRIMARY KEY((quantum(time, 15, m),user), time, user))"}}`` """ description = "create bucket-types used in timeseries tests" @@ -187,7 +194,13 @@ class setup_timeseries(bucket_type_commands, Command): ] _props = { - 'GeoCheckin': {'n_val': 3, 'table_def': 'CREATE TABLE GeoCheckin (geohash varchar not null, user varchar not null, time timestamp not null, weather varchar not null, temperature float, PRIMARY KEY((quantum(time, 15, m),user), time, user))'}, + 'GeoCheckin': { + 'n_val': 3, + 'table_def': + 'CREATE TABLE GeoCheckin (geohash varchar not null, ' + + 'user varchar not null, time timestamp not null, ' + + 'weather varchar not null, temperature float, ' + + 'PRIMARY KEY((quantum(time, 15, m),user), time, user))'} } diff --git a/riak/table.py b/riak/table.py index 6635cd0a..ea30752b 100644 --- a/riak/table.py +++ b/riak/table.py @@ -1,5 +1,6 @@ from six import string_types, PY2 + class Table(object): """ The ``Table`` object allows you to access properties on a Riak @@ -36,7 +37,8 @@ def __repr__(self): def new(self, rows, columns=None): """ - A shortcut for manually instantiating a new :class:`~riak.ts_object.TsObject` + A shortcut for manually instantiating a new + :class:`~riak.ts_object.TsObject` :param rows: An list of lists with timeseries data :type rows: list diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index a7bb4742..f5aa6866 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -32,16 +32,17 @@ DUMMY_HTTP_PORT = int(os.environ.get('DUMMY_HTTP_PORT', '1023')) DUMMY_PB_PORT = int(os.environ.get('DUMMY_PB_PORT', '1022')) -SKIP_SEARCH = int(os.environ.get('SKIP_SEARCH', '1')) +RUN_SEARCH = int(os.environ.get('RUN_SEARCH', '0')) RUN_YZ = int(os.environ.get('RUN_YZ', '0')) -SKIP_INDEXES = int(os.environ.get('SKIP_INDEXES', '1')) +RUN_INDEXES = int(os.environ.get('RUN_INDEXES', '0')) -SKIP_TIMESERIES = int(os.environ.get('SKIP_TIMESERIES', '1')) +RUN_TIMESERIES = int(os.environ.get('RUN_TIMESERIES', '0')) -SKIP_POOL = int(os.environ.get('SKIP_POOL', '1')) -SKIP_RESOLVE = int(os.environ.get('SKIP_RESOLVE', '0')) -SKIP_BTYPES = int(os.environ.get('SKIP_BTYPES', '0')) +RUN_POOL = int(os.environ.get('RUN_POOL', '0')) +RUN_RESOLVE = int(os.environ.get('RUN_RESOLVE', '1')) +RUN_BTYPES = int(os.environ.get('RUN_BTYPES', '1')) +RUN_DATATYPES = int(os.environ.get('RUN_DATATYPES', '1')) RUN_SECURITY = int(os.environ.get('RUN_SECURITY', '0')) SECURITY_USER = os.environ.get('RIAK_TEST_SECURITY_USER', 'testuser') @@ -63,7 +64,9 @@ SECURITY_CERT_PASSWD = os.environ.get('RIAK_TEST_SECURITY_CERT_PASSWD', 'certpass') -SECURITY_CIPHERS = 'DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:AES128-SHA256:AES128-SHA:AES256-SHA256:AES256-SHA:RC4-SHA' +SECURITY_CIPHERS = 'DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:' + \ + 'DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:' + \ + 'AES128-SHA256:AES128-SHA:AES256-SHA256:AES256-SHA:RC4-SHA' SECURITY_CREDS = None if RUN_SECURITY: @@ -71,4 +74,3 @@ password=SECURITY_PASSWD, cacert_file=SECURITY_CACERT, ciphers=SECURITY_CIPHERS) -SKIP_DATATYPES = int(os.environ.get('SKIP_DATATYPES', '0')) diff --git a/riak/tests/base.py b/riak/tests/base.py index 8856f923..ec0f397c 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -7,6 +7,7 @@ from riak.client import RiakClient from riak.tests import HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS + class IntegrationTestBase(object): host = None @@ -43,7 +44,15 @@ def create_client(cls, host=None, http_port=None, pb_port=None, credentials = credentials or SECURITY_CREDS if hasattr(cls, 'logging_enabled') and cls.logging_enabled: - cls.logger.debug("RiakClient(protocol='%s', host='%s', pb_port='%d', http_port='%d', credentials='%s', client_args='%s')", protocol, host, pb_port, http_port, credentials, client_args) + cls.logger.debug("RiakClient(protocol='%s', host='%s', " + + "pb_port='%d', http_port='%d', " + + "credentials='%s', client_args='%s')", + protocol, + host, + pb_port, + http_port, + credentials, + client_args) return RiakClient(protocol=protocol, host=host, diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index 419bce60..d7b254e3 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import platform from riak import RiakError -from riak.tests import SKIP_INDEXES +from riak.tests import RUN_INDEXES from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': @@ -21,7 +21,7 @@ def is_2i_supported(self): return False return True # it failed, but is supported! - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_secondary_index_store(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -102,7 +102,7 @@ def test_secondary_index_store(self): # Clean up... bucket.get('mykey1').delete() - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_set_indexes(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -120,7 +120,7 @@ def test_set_indexes(self): self.assertEqual(1, len(result)) self.assertEqual('foo', str(result[0])) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_remove_indexes(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -180,7 +180,7 @@ def test_remove_indexes(self): self.assertEqual(1, len([x for x in bar.indexes if x[0] == 'baz_bin'])) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_secondary_index_query(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -209,7 +209,7 @@ def test_secondary_index_query(self): self.assertEqual(3, len(results)) self.assertEqual(set([o2.key, o3.key, o4.key]), vals) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_secondary_index_invalid_name(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -219,7 +219,7 @@ def test_secondary_index_invalid_name(self): with self.assertRaises(RiakError): bucket.new('k', 'a').add_index('field1', 'value1') - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_set_index(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -237,7 +237,7 @@ def test_set_index(self): obj.set_index('bar2_int', 10) self.assertEqual(set((('bar_int', 3), ('bar2_int', 10))), obj.indexes) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_stream_index(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I not supported") @@ -250,7 +250,7 @@ def test_stream_index(self): self.assertEqual(sorted([o1.key, o2.key, o3.key]), sorted(keys)) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -274,7 +274,7 @@ def test_index_return_terms(self): self.assertEqual([(1002, o2.key), (1003, o3.key), (1004, o4.key)], sorted(spairs)) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_pagination(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -309,7 +309,7 @@ def test_index_pagination(self): self.assertEqual(3, pagecount) self.assertEqual([o1.key, o2.key, o3.key, o4.key], presults) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_pagination_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -334,7 +334,7 @@ def test_index_pagination_return_terms(self): self.assertLessEqual(2, len(results)) self.assertEqual([('val3', o3.key), ('val4', o4.key)], page2) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_pagination_stream(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -377,7 +377,7 @@ def test_index_pagination_stream(self): self.assertEqual(3, pagecount) self.assertEqual([o1.key, o2.key, o3.key, o4.key], presults) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_pagination_stream_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -409,7 +409,7 @@ def test_index_pagination_stream_return_terms(self): self.assertLessEqual(2, len(results)) self.assertEqual([('val3', o3.key), ('val4', o4.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_eq_query_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -419,7 +419,7 @@ def test_index_eq_query_return_terms(self): results = bucket.get_index('field2_int', 1001, return_terms=True) self.assertEqual([(1001, o1.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_eq_query_stream_return_terms(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -432,7 +432,7 @@ def test_index_eq_query_stream_return_terms(self): self.assertEqual([(1001, o1.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_timeout(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -451,7 +451,7 @@ def test_index_timeout(self): self.assertEqual([o1.key], bucket.get_index('field1_bin', 'val1', timeout='infinity')) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_regex(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") @@ -466,7 +466,7 @@ def test_index_regex(self): self.assertEqual([('val2', o2.key)], results) - @unittest.skipIf(SKIP_INDEXES, 'SKIP_INDEXES is defined') + @unittest.skipUnless(RUN_INDEXES, 'RUN_INDEXES is 0') def test_index_falsey_endkey_gh378(self): if not self.is_2i_supported(): raise unittest.SkipTest("2I is not supported") diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index c1b57f6c..3c8b6c1e 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -1,7 +1,7 @@ import platform from riak import RiakError, RiakObject from riak.bucket import RiakBucket, BucketType -from riak.tests import SKIP_BTYPES +from riak.tests import RUN_BTYPES from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -11,6 +11,7 @@ import unittest +@unittest.skipUnless(RUN_BTYPES, "RUN_BTYPES is 0") class BucketTypeTests(IntegrationTestBase, unittest.TestCase, Comparison): def test_btype_init(self): btype = self.client.bucket_type('foo') @@ -41,7 +42,6 @@ def test_btype_repr(self): self.assertEqual("", repr(defbtype)) self.assertEqual("", repr(othertype)) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_btype_get_props(self): defbtype = self.client.bucket_type("default") btype = self.client.bucket_type("pytest") @@ -53,7 +53,6 @@ def test_btype_get_props(self): self.assertIn('n_val', props) self.assertEqual(3, props['n_val']) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_btype_set_props(self): defbtype = self.client.bucket_type("default") btype = self.client.bucket_type("pytest") @@ -72,13 +71,11 @@ def test_btype_set_props(self): finally: btype.set_properties(oldprops) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_btype_set_props_immutable(self): btype = self.client.bucket_type("pytest-maps") with self.assertRaises(RiakError): btype.set_property('datatype', 'counter') - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_btype_list_buckets(self): btype = self.client.bucket_type("pytest") bucket = btype.bucket(self.bucket_name) @@ -93,7 +90,6 @@ def test_btype_list_buckets(self): self.assertIn(bucket, buckets) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_btype_list_keys(self): btype = self.client.bucket_type("pytest") bucket = btype.bucket(self.bucket_name) @@ -109,7 +105,6 @@ def test_btype_list_keys(self): self.assertIn(self.key_name, keys) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_default_btype_list_buckets(self): default_btype = self.client.bucket_type("default") bucket = default_btype.bucket(self.bucket_name) @@ -126,7 +121,6 @@ def test_default_btype_list_buckets(self): self.assertItemsEqual(buckets, self.client.get_buckets()) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_default_btype_list_keys(self): btype = self.client.bucket_type("default") bucket = btype.bucket(self.bucket_name) @@ -145,7 +139,6 @@ def test_default_btype_list_keys(self): oldapikeys = self.client.get_keys(self.client.bucket(self.bucket_name)) self.assertItemsEqual(keys, oldapikeys) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_multiget_bucket_types(self): btype = self.client.bucket_type('pytest') bucket = btype.bucket(self.bucket_name) @@ -161,7 +154,6 @@ def test_multiget_bucket_types(self): self.assertEqual(bucket, mobj.bucket) self.assertEqual(btype, mobj.bucket.bucket_type) - @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") def test_write_once_bucket_type(self): btype = self.client.bucket_type('pytest-write-once') btype.set_property('write_once', True) diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index be105502..46700a61 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -2,7 +2,7 @@ from six import PY2 from threading import Thread from riak.riak_object import RiakObject -from riak.tests import DUMMY_HTTP_PORT, DUMMY_PB_PORT, SKIP_POOL +from riak.tests import DUMMY_HTTP_PORT, DUMMY_PB_PORT, RUN_POOL from riak.tests.base import IntegrationTestBase if PY2: @@ -15,6 +15,7 @@ else: import unittest + class ClientTests(IntegrationTestBase, unittest.TestCase): def test_uses_client_id_if_given(self): if self.protocol == 'pbc': @@ -192,7 +193,7 @@ def test_multiget_pool_size(self): self.assertEqual(obj.key, obj.data) client.close() - @unittest.skipIf(SKIP_POOL, 'SKIP_POOL is set') + @unittest.skipUnless(RUN_POOL, 'RUN_POOL is 0') def test_pool_close(self): """ Iterate over the connection pool and close all connections. diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index b8ca881f..747de515 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -2,7 +2,7 @@ import platform from riak import RiakBucket, BucketType, RiakObject import riak.datatypes as datatypes -from riak.tests import SKIP_DATATYPES +from riak.tests import RUN_DATATYPES from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -147,8 +147,10 @@ def test_removes_require_context(self): self.assertTrue(dtype.modified) -class DatatypeIntegrationTests(IntegrationTestBase, unittest.TestCase, Comparison): - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') +@unittest.skipUnless(RUN_DATATYPES, 'RUN_DATATYPES is 0') +class DatatypeIntegrationTests(IntegrationTestBase, + unittest.TestCase, + Comparison): def test_dt_counter(self): btype = self.client.bucket_type('pytest-counters') bucket = btype.bucket(self.bucket_name) @@ -165,7 +167,6 @@ def test_dt_counter(self): mycount.reload() self.assertEqual(2, mycount.value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -188,7 +189,6 @@ def test_dt_set(self): self.assertIn('Brett', myset) self.assertNotIn('Sean', myset) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_map(self): btype = self.client.bucket_type('pytest-maps') bucket = btype.bucket(self.bucket_name) @@ -224,7 +224,6 @@ def test_dt_map(self): self.assertIn('f', mymap.sets) self.assertItemsEqual(['thing1', 'thing2'], mymap.sets['f'].value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_remove_without_context(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -236,7 +235,6 @@ def test_dt_set_remove_without_context(self): with self.assertRaises(datatypes.ContextRequired): set.discard("Y") - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_remove_fetching_context(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -253,7 +251,6 @@ def test_dt_set_remove_fetching_context(self): set2 = bucket.get(self.key_name) self.assertItemsEqual(['X', 'Y'], set2.value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_add_twice(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -270,7 +267,6 @@ def test_dt_set_add_twice(self): set2 = bucket.get(self.key_name) self.assertItemsEqual(['X', 'Y'], set2.value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_add_wins_in_same_op(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -288,7 +284,6 @@ def test_dt_set_add_wins_in_same_op(self): set2 = bucket.get(self.key_name) self.assertItemsEqual(['X', 'Y'], set2.value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_add_wins_in_same_op_reversed(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -306,7 +301,6 @@ def test_dt_set_add_wins_in_same_op_reversed(self): set2 = bucket.get(self.key_name) self.assertItemsEqual(['X', 'Y'], set2.value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_remove_old_context(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -328,7 +322,6 @@ def test_dt_set_remove_old_context(self): set2 = bucket.get(self.key_name) self.assertItemsEqual(['X', 'Y', 'Z'], set2.value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_remove_updated_context(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -349,7 +342,6 @@ def test_dt_set_remove_updated_context(self): set2 = bucket.get(self.key_name) self.assertItemsEqual(['X', 'Y'], set2.value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_map_remove_set_update_same_op(self): btype = self.client.bucket_type('pytest-maps') bucket = btype.bucket(self.bucket_name) @@ -367,7 +359,6 @@ def test_dt_map_remove_set_update_same_op(self): map2 = bucket.get(self.key_name) self.assertItemsEqual(["Z"], map2.sets['set']) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_map_remove_counter_increment_same_op(self): btype = self.client.bucket_type('pytest-maps') bucket = btype.bucket(self.bucket_name) @@ -385,7 +376,6 @@ def test_dt_map_remove_counter_increment_same_op(self): map2 = bucket.get(self.key_name) self.assertEqual(2, map2.counters['counter'].value) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_map_remove_map_update_same_op(self): btype = self.client.bucket_type('pytest-maps') bucket = btype.bucket(self.bucket_name) @@ -403,7 +393,6 @@ def test_dt_map_remove_map_update_same_op(self): map2 = bucket.get(self.key_name) self.assertItemsEqual(["Z"], map2.maps['map'].sets['set']) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_set_return_body_true_default(self): btype = self.client.bucket_type('pytest-sets') bucket = btype.bucket(self.bucket_name) @@ -421,7 +410,6 @@ def test_dt_set_return_body_true_default(self): myset.store() self.assertItemsEqual(myset.value, ['Y']) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_dt_map_return_body_true_default(self): btype = self.client.bucket_type('pytest-maps') bucket = btype.bucket(self.bucket_name) @@ -446,7 +434,6 @@ def test_dt_map_return_body_true_default(self): self.assertEqual(mymap.value, {}) - @unittest.skipIf(SKIP_DATATYPES, 'SKIP_DATATYPES is set') def test_delete_datatype(self): ctype = self.client.bucket_type('pytest-counters') cbucket = ctype.bucket(self.bucket_name) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 592031c5..cc7b20c9 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -7,7 +7,7 @@ from time import sleep from riak import ConflictError, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver -from riak.tests import SKIP_RESOLVE +from riak.tests import RUN_RESOLVE from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -34,11 +34,13 @@ testrun_sibs_bucket = 'sibsbucket' testrun_props_bucket = 'propsbucket' + def setUpModule(): c = IntegrationTestBase.create_client() c.bucket(testrun_sibs_bucket).allow_mult = True c.close() + def tearDownModule(): c = IntegrationTestBase.create_client() c.bucket(testrun_sibs_bucket).clear_properties() @@ -430,8 +432,7 @@ def test_siblings(self): self.assertEqual(len(obj.siblings), 1) self.assertEqual(obj.data, resolved_sibling.data) - @unittest.skipIf(SKIP_RESOLVE == '1', - "skip requested for resolvers test") + @unittest.skipUnless(RUN_RESOLVE, "RUN_RESOLVE is 0") def test_resolution(self): bucket = self.client.bucket(testrun_sibs_bucket) obj = bucket.get(self.key_name) @@ -487,8 +488,7 @@ def max_value_resolver(obj): self.assertEqual(bucket.resolver, default_resolver) # reset self.assertEqual(self.client.resolver, default_resolver) # reset - @unittest.skipIf(SKIP_RESOLVE == '1', - "skip requested for resolvers test") + @unittest.skipUnless(RUN_RESOLVE, "RUN_RESOLVE is 0") def test_resolution_default(self): # If no resolver is setup, be sure to resolve to default_resolver bucket = self.client.bucket(testrun_sibs_bucket) diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index e0897f6a..b6cd068f 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -18,11 +18,15 @@ import unittest -testrun_yz_mr = {'btype': 'pytest-mr', 'bucket': 'mrbucket', 'index': 'mrbucket'} +testrun_yz_mr = {'btype': 'pytest-mr', + 'bucket': 'mrbucket', + 'index': 'mrbucket'} + def setUpModule(): yzSetUp(testrun_yz_mr) + def tearDownModule(): yzTearDown(testrun_yz_mr) @@ -516,7 +520,7 @@ def test_mr_list_add_mix(self): u'"fooval2"', u'"fooval3"']) - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') + @unittest.skipUnless(RUN_YZ, 'RUN_YZ is 0') def test_mr_search(self): """ Try a successful map/reduce from search results. diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index edfba3f5..f1088244 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -5,7 +5,7 @@ from riak.transports.pool import Pool, BadResource from random import SystemRandom from time import sleep -from riak.tests import SKIP_POOL +from riak.tests import RUN_POOL from riak.tests.comparison import Comparison if platform.python_version() < '2.7': @@ -37,7 +37,7 @@ def create_resource(self): return [] -@unittest.skipIf(SKIP_POOL, 'Skipping connection pool tests') +@unittest.skipUnless(RUN_POOL, 'RUN_POOL is 0') class PoolTest(unittest.TestCase, Comparison): def test_yields_new_object_when_empty(self): diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index 73c6cd47..7cc369b6 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import print_function import platform -from riak.tests import SKIP_SEARCH +from riak.tests import RUN_SEARCH, RUN_YZ from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': @@ -11,27 +11,29 @@ testrun_search_bucket = 'searchbucket' + def setUpModule(): - if not SKIP_SEARCH and not RUN_YZ: + if RUN_SEARCH and not RUN_YZ: c = IntegrationTestBase.create_client() b = c.bucket(testrun_search_bucket) b.enable_search() c.close() + def tearDownModule(): - if not SKIP_SEARCH and not RUN_YZ: + if RUN_SEARCH and not RUN_YZ: c = IntegrationTestBase.create_client() b = c.bucket(testrun_search_bucket) b.clear_properties() c.close() + +@unittest.skipUnless(RUN_SEARCH, 'RUN_SEARCH is 0') class EnableSearchTests(IntegrationTestBase, unittest.TestCase): - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_bucket_search_enabled(self): bucket = self.client.bucket(self.bucket_name) self.assertFalse(bucket.search_enabled()) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_enable_search_commit_hook(self): bucket = self.client.bucket(testrun_search_bucket) bucket.clear_properties() @@ -46,7 +48,6 @@ def test_enable_search_commit_hook(self): self.assertTrue(c.bucket(testrun_search_bucket).search_enabled()) c.close() - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_disable_search_commit_hook(self): bucket = self.client.bucket(testrun_search_bucket) bucket.clear_properties() @@ -65,8 +66,8 @@ def test_disable_search_commit_hook(self): bucket.enable_search() +@unittest.skipUnless(RUN_SEARCH, 'RUN_SEARCH is 0') class SolrSearchTests(IntegrationTestBase, unittest.TestCase): - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_add_document_to_index(self): self.client.fulltext_add(testrun_search_bucket, [{"id": "doc", "username": "tony"}]) @@ -74,7 +75,6 @@ def test_add_document_to_index(self): "username:tony") self.assertEqual("tony", results['docs'][0]['username']) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_add_multiple_documents_to_index(self): self.client.fulltext_add( testrun_search_bucket, @@ -84,7 +84,6 @@ def test_add_multiple_documents_to_index(self): testrun_search_bucket, "username:russell OR username:dizzy") self.assertEqual(2, len(results['docs'])) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_delete_documents_from_search_by_id(self): self.client.fulltext_add( testrun_search_bucket, @@ -95,7 +94,6 @@ def test_delete_documents_from_search_by_id(self): testrun_search_bucket, "username:russell OR username:dizzy") self.assertEqual(1, len(results['docs'])) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_delete_documents_from_search_by_query(self): self.client.fulltext_add( testrun_search_bucket, @@ -108,7 +106,6 @@ def test_delete_documents_from_search_by_query(self): testrun_search_bucket, "username:russell OR username:dizzy") self.assertEqual(0, len(results['docs'])) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_delete_documents_from_search_by_query_and_id(self): self.client.fulltext_add( testrun_search_bucket, @@ -124,22 +121,20 @@ def test_delete_documents_from_search_by_query_and_id(self): self.assertEqual(0, len(results['docs'])) +@unittest.skipUnless(RUN_SEARCH, 'RUN_SEARCH is 0') class SearchTests(IntegrationTestBase, unittest.TestCase): - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search_from_bucket(self): bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() results = bucket.search("username:roidrage") self.assertEqual(1, len(results['docs'])) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search_with_params_from_bucket(self): bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() results = bucket.search("username:roidrage", wt="xml") self.assertEqual(1, len(results['docs'])) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search_with_params(self): bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() @@ -148,7 +143,6 @@ def test_solr_search_with_params(self): "username:roidrage", wt="xml") self.assertEqual(1, len(results['docs'])) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_solr_search(self): bucket = self.client.bucket(testrun_search_bucket) bucket.new("user", {"username": "roidrage"}).store() @@ -156,7 +150,6 @@ def test_solr_search(self): "username:roidrage") self.assertEqual(1, len(results["docs"])) - @unittest.skipIf(SKIP_SEARCH, 'SKIP_SEARCH is defined') def test_search_integration(self): # Create some objects to search across... bucket = self.client.bucket(testrun_search_bucket) diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index 0c8a5a49..135bb112 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -16,14 +16,14 @@ class SecurityTests(IntegrationTestBase, unittest.TestCase): - @unittest.skipIf(RUN_SECURITY, 'RUN_SECURITY is set') + @unittest.skipIf(RUN_SECURITY, 'RUN_SECURITY is 1') def test_security_disabled(self): - topts = { 'timeout' : 1 } - creds = SecurityCreds(username='foo', - password='bar', - cacert_file=SECURITY_CACERT, - ciphers=SECURITY_CIPHERS) - client = self.create_client(credentials=creds, transport_options=topts) + """ + Test valid security settings without security enabled + """ + topts = {'timeout': 1} + client = self.create_client(credentials=SECURITY_CREDS, + transport_options=topts) myBucket = client.bucket('test') val1 = "foobar" key1 = myBucket.new('x', data=val1) @@ -31,7 +31,7 @@ def test_security_disabled(self): key1.store() client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_basic_connection(self): myBucket = self.client.bucket('test') val1 = "foobar" @@ -39,7 +39,7 @@ def test_security_basic_connection(self): key1.store() myBucket.get('x') - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_bad_user(self): creds = SecurityCreds(username='foo', password=SECURITY_PASSWD, @@ -50,7 +50,7 @@ def test_security_bad_user(self): client.get_buckets() client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_bad_password(self): creds = SecurityCreds(username=SECURITY_USER, password='foo', @@ -61,7 +61,7 @@ def test_security_bad_password(self): client.get_buckets() client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_invalid_cert(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, @@ -72,7 +72,7 @@ def test_security_invalid_cert(self): client.get_buckets() client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_password_without_cacert(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, @@ -85,7 +85,7 @@ def test_security_password_without_cacert(self): key1.store() client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_cert_authentication(self): creds = SecurityCreds(username=SECURITY_CERT_USER, password=SECURITY_CERT_PASSWD, @@ -108,9 +108,10 @@ def test_security_cert_authentication(self): myBucket.get('x') client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_revoked_cert(self): - creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, + creds = SecurityCreds(username=SECURITY_USER, + password=SECURITY_PASSWD, ciphers=SECURITY_CIPHERS, cacert_file=SECURITY_CACERT, crl_file=SECURITY_REVOKED) @@ -123,7 +124,7 @@ def test_security_revoked_cert(self): client.get_buckets() client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_bad_ca_cert(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, ciphers=SECURITY_CIPHERS, @@ -133,7 +134,7 @@ def test_security_bad_ca_cert(self): client.get_buckets() client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_ciphers(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, ciphers=SECURITY_CIPHERS, @@ -146,7 +147,7 @@ def test_security_ciphers(self): myBucket.get('x') client.close() - @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is not set') + @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_bad_ciphers(self): creds = SecurityCreds(username=SECURITY_USER, password=SECURITY_PASSWD, cacert_file=SECURITY_CACERT, diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index dbc1674d..d6f63c07 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -3,14 +3,12 @@ import os import platform import riak_pb -import sys -import time from riak.table import Table from riak.ts_object import TsObject from riak.transports.pbc.codec import RiakPbcCodec from riak.util import str_to_bytes, bytes_to_str -from riak.tests import SKIP_TIMESERIES +from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': @@ -27,18 +25,20 @@ ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) ts1 = ts0 + fiveMins + +@unittest.skipUnless(RUN_TIMESERIES, 'RUN_TIMESERIES is 0') class TimeseriesUnitTests(unittest.TestCase): def setUp(self): self.c = RiakPbcCodec() self.ts0ms = self.c._unix_time_millis(ts0) self.ts1ms = self.c._unix_time_millis(ts1) self.rows = [ - [ bd0, 0, 1.2, ts0, True ], - [ bd1, 3, 4.5, ts1, False ] + [bd0, 0, 1.2, ts0, True], + [bd1, 3, 4.5, ts1, False] ] self.test_key = { - 'user' : 'user2', - 'time' : ts0 + 'user': 'user2', + 'time': ts0 } self.table = Table(None, 'test-table') @@ -157,7 +157,8 @@ def test_decode_data_from_query(self): self.assertEqual(r1[3], ts1) self.assertEqual(r1[4], self.rows[1][4]) -@unittest.skipIf(SKIP_TIMESERIES == 1, "skip requested for timeseries tests") + +@unittest.skipUnless(RUN_TIMESERIES, 'RUN_TIMESERIES is 0') class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): @@ -179,14 +180,14 @@ def setUpClass(cls): # PRIMARY KEY((quantum(time, 15, m), user), time, user) # ) rows = [ - [ 'hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3 ], - [ 'hash1', 'user2', fifteenMinsAgo, 'rain', 79.0 ], - [ 'hash1', 'user2', fiveMinsAgo, 'wind', None ], - [ 'hash1', 'user2', cls.now, 'snow', 20.1 ] + ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], + ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], + ['hash1', 'user2', fiveMinsAgo, 'wind', None], + ['hash1', 'user2', cls.now, 'snow', 20.1] ] ts_obj = table.new(rows) result = ts_obj.store() - if result != True: + if not result: raise AssertionError("expected success") client.close() @@ -207,33 +208,36 @@ def validate_data(self, ts_obj): self.assertIsNone(row[4]) def test_query_that_returns_no_data(self): - query = "select * from {} where time > 0 and time < 10 and user = 'user1'".format(table_name) + query = "select * from {} where time > 0 and " + \ + "time < 10 and user = 'user1'".format(table_name) ts_obj = self.client.ts_query('GeoCheckin', query) self.assertEqual(len(ts_obj.columns), 0) self.assertEqual(len(ts_obj.rows), 0) def test_query_that_matches_some_data(self): - query = "select * from {} where time > {} and time < {} and user = 'user2'".format(table_name, self.tenMinsAgoMsec, self.nowMsec) + query = "select * from {} where time > {} and " + \ + " time < {} and user = 'user2'" \ + .format(table_name, self.tenMinsAgoMsec, self.nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) def test_get_single_value_using_dict(self): key = { - 'user' : 'user2', - 'time' : self.fiveMinsAgo + 'user': 'user2', + 'time': self.fiveMinsAgo } ts_obj = self.client.ts_get('GeoCheckin', key) self.validate_data(ts_obj) def test_get_single_value_using_array(self): - key = [ self.fiveMinsAgo, 'user2' ] + key = [self.fiveMinsAgo, 'user2'] ts_obj = self.client.ts_get('GeoCheckin', key) self.validate_data(ts_obj) def test_delete_single_value_using_dict(self): key = { - 'user' : 'user2', - 'time' : self.twentyMinsAgo + 'user': 'user2', + 'time': self.twentyMinsAgo } rslt = self.client.ts_delete('GeoCheckin', key) self.assertTrue(rslt) diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index 55399aac..52f9af88 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -10,6 +10,7 @@ else: import unittest + def wait_for_yz_index(bucket, key, index=None): """ Wait until Solr index has been updated and a value returns from a query. @@ -25,16 +26,21 @@ def wait_for_yz_index(bucket, key, index=None): # YZ index on bucket of the same name testrun_yz = {'btype': None, 'bucket': 'yzbucket', 'index': 'yzbucket'} # YZ index on bucket of a different name -testrun_yz_index = {'btype': None, 'bucket': 'yzindexbucket', 'index': 'yzindex'} +testrun_yz_index = {'btype': None, + 'bucket': 'yzindexbucket', + 'index': 'yzindex'} + def setUpModule(): yzSetUp(testrun_yz, testrun_yz_index) + def tearDownModule(): yzTearDown(testrun_yz, testrun_yz_index) + +@unittest.skipUnless(RUN_YZ, 'RUN_YZ is 0') class YZSearchTests(IntegrationTestBase, unittest.TestCase, Comparison): - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_from_bucket(self): bucket = self.client.bucket(testrun_yz['bucket']) bucket.new("user", {"user_s": "Z"}).store() @@ -51,16 +57,15 @@ def test_yz_search_from_bucket(self): self.assertIn('user_s', result) self.assertEqual(u'Z', result['user_s']) - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_index_using_bucket(self): bucket = self.client.bucket(testrun_yz_index['bucket']) bucket.new("feliz", {"name_s": "Felix", "species_s": "Felis catus"}).store() wait_for_yz_index(bucket, "feliz", index=testrun_yz_index['index']) - results = bucket.search('name_s:Felix', index=testrun_yz_index['index']) + results = bucket.search('name_s:Felix', + index=testrun_yz_index['index']) self.assertEqual(1, len(results['docs'])) - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_search_index_using_wrong_bucket(self): bucket = self.client.bucket(testrun_yz_index['bucket']) bucket.new("feliz", @@ -69,7 +74,6 @@ def test_yz_search_index_using_wrong_bucket(self): with self.assertRaises(Exception): bucket.search('name_s:Felix') - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_get_search_index(self): index = self.client.get_search_index(testrun_yz['bucket']) self.assertEqual(testrun_yz['bucket'], index['name']) @@ -78,7 +82,6 @@ def test_yz_get_search_index(self): with self.assertRaises(Exception): self.client.get_search_index('NOT' + testrun_yz['bucket']) - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_delete_search_index(self): # expected to fail, since there's an attached bucket with self.assertRaises(Exception): @@ -96,13 +99,11 @@ def test_yz_delete_search_index(self): while testrun_yz['bucket'] not in indexes: indexes = [i['name'] for i in self.client.list_search_indexes()] - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_list_search_indexes(self): indexes = self.client.list_search_indexes() self.assertIn(testrun_yz['bucket'], [item['name'] for item in indexes]) self.assertLessEqual(1, len(indexes)) - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_create_schema(self): content = """ @@ -138,7 +139,6 @@ def test_yz_create_schema(self): self.assertEqual(schema_name, schema['name']) self.assertEqual(content, schema['content']) - @unittest.skipUnless(RUN_YZ, 'RUN_YZ is undefined') def test_yz_create_bad_schema(self): bad_content = """ timestamp: '%s', timestamp_value '%d'", - cell, ts_cell.timestamp_value) + cell, ts_cell.timestamp_value) elif isinstance(cell, bool): logging.debug("cell -> boolean: '%s'", cell) ts_cell.boolean_value = cell elif isinstance(cell, str): logging.debug("cell -> str: '%s'", cell) ts_cell.binary_value = str_to_bytes(cell) - elif isinstance(cell, int) or isinstance(cell, long): + elif isinstance(cell, int) or isinstance(cell, long): # noqa logging.debug("cell -> int/long: '%s'", cell) ts_cell.integer_value = cell elif isinstance(cell, float): @@ -645,7 +648,8 @@ def _encode_to_ts_cell(self, cell, ts_cell): ts_cell.double_value = cell else: t = type(cell) - raise RiakError("can't serialize type '{}', value '{}'".format(t, cell)) + raise RiakError("can't serialize type '{}', value '{}'" + .format(t, cell)) def _encode_timeseries_keyreq(self, table, key, req): key_vals = None @@ -678,11 +682,11 @@ def _encode_timeseries_put(self, tsobj, ts_put_req): if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: - tsr = ts_put_req.rows.add() # NB: type riak_pb.TsRow + tsr = ts_put_req.rows.add() # NB: type riak_pb.TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: - tsc = tsr.cells.add() # NB: type riak_pb.TsCell + tsc = tsr.cells.add() # NB: type riak_pb.TsCell self._encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") @@ -706,7 +710,8 @@ def _decode_timeseries(self, ts_rsp, tsobj): tsobj.columns.append(col) for ts_row in ts_rsp.rows: - tsobj.rows.append(self._decode_timeseries_row(ts_row, ts_rsp.columns)) + tsobj.rows.append(self._decode_timeseries_row(ts_row, + ts_rsp.columns)) def _decode_timeseries_row(self, ts_row, ts_columns): """ @@ -722,13 +727,18 @@ def _decode_timeseries_row(self, ts_row, ts_columns): for i, ts_cell in enumerate(ts_row.cells): ts_col = ts_columns[i] logging.debug("ts_cell: '%s', ts_col: '%d'", ts_cell, ts_col.type) - if ts_col.type == riak_pb.TsColumnType.Value('BINARY') and ts_cell.HasField('binary_value'): - logging.debug("ts_cell.binary_value: '%s'", ts_cell.binary_value) + if ts_col.type == riak_pb.TsColumnType.Value('BINARY')\ + and ts_cell.HasField('binary_value'): + logging.debug("ts_cell.binary_value: '%s'", + ts_cell.binary_value) row.append(ts_cell.binary_value) - elif ts_col.type == riak_pb.TsColumnType.Value('INTEGER') and ts_cell.HasField('integer_value'): - logging.debug("ts_cell.integer_value: '%s'", ts_cell.integer_value) + elif ts_col.type == riak_pb.TsColumnType.Value('INTEGER')\ + and ts_cell.HasField('integer_value'): + logging.debug("ts_cell.integer_value: '%s'", + ts_cell.integer_value) row.append(ts_cell.integer_value) - elif ts_col.type == riak_pb.TsColumnType.Value('FLOAT') and ts_cell.HasField('double_value'): + elif ts_col.type == riak_pb.TsColumnType.Value('FLOAT')\ + and ts_cell.HasField('double_value'): value = None if ts_cell.HasField('double_value'): value = ts_cell.double_value @@ -739,13 +749,17 @@ def _decode_timeseries_row(self, ts_row, ts_columns): elif ts_col.type == riak_pb.TsColumnType.Value('TIMESTAMP'): dt = None if ts_cell.HasField('timestamp_value'): - dt = self._datetime_from_unix_time_millis(ts_cell.timestamp_value) + dt = self._datetime_from_unix_time_millis( + ts_cell.timestamp_value) elif ts_cell.HasField('integer_value'): - dt = self._datetime_from_unix_time_millis(ts_cell.integer_value) + dt = self._datetime_from_unix_time_millis( + ts_cell.integer_value) logging.debug("ts_cell datetime: '%s'", dt) row.append(dt) - elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN') and ts_cell.HasField('boolean_value'): - logging.debug("ts_cell.boolean_value: '%s'", ts_cell.boolean_value) + elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN')\ + and ts_cell.HasField('boolean_value'): + logging.debug("ts_cell.boolean_value: '%s'", + ts_cell.boolean_value) row.append(ts_cell.boolean_value) else: row.append(None) diff --git a/riak/ts_object.py b/riak/ts_object.py index f1e4f028..ef01baff 100644 --- a/riak/ts_object.py +++ b/riak/ts_object.py @@ -1,6 +1,7 @@ from riak import RiakError from riak.table import Table + class TsObject(object): """ The TsObject holds information about Timeseries data, plus the data diff --git a/setup.py b/setup.py index d5ba1bd9..3e2b84d1 100755 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ #!/usr/bin/env python import os import sys -from multiprocessing import util from setuptools import setup, find_packages from version import get_version from commands import preconfigure, configure, create_bucket_types, \ From 2c59f20487d2b4412db92d9ce447803625ab6ce7 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Oct 2015 17:45:56 -0700 Subject: [PATCH 036/324] Move contributions to README like other clients. --- README.rst | 50 ++++++++++++++++++++++++++++++++++++++----- THANKS | 45 -------------------------------------- riak/tests/test_kv.py | 2 +- 3 files changed, 46 insertions(+), 51 deletions(-) delete mode 100644 THANKS diff --git a/README.rst b/README.rst index b2cb0b65..a04e86ad 100644 --- a/README.rst +++ b/README.rst @@ -247,8 +247,48 @@ To run the tests, then simply Contributors -------------------------- - - Rusty Klophaus - - Justin Sheehy - - Jay Baird - - Andy Gross - - Jon Meredith + - Andrew Thompson + - Andy Gross + - Armon Dadgar + - Brett Hazen + - Brett Hoerner + - Brian Roach + - Bryan Fink + - Daniel Lindsley + - Daniel Néri + - Daniel Reverri + - David Koblas + - Dmitry Rozhkov + - Eric Florenzano + - Eric Moritz + - Filip de Waard + - Gilles Devaux + - Greg Nelson + - Greg Stein + - Gregory Burd + - Ian Plosker + - Jayson Baird + - Jeffrey Massung + - Jon Meredith + - Josip Lisec + - Justin Sheehy + - Kevin Smith + - `Luke Bakken `_ + - Mark Erdmann + - Mark Phillips + - Mathias Meyer + - Matt Heitzenroder + - Mikhail Sobolev + - Reid Draper + - Russell Brown + - Rusty Klophaus + - Rusty Klophaus + - Scott Lystig Fritchie + - Sean Cribbs + - Shuhao Wu + - Silas Sewell + - Socrates Lee + - Soren Hansen + - Sreejith Kesavan + - Timothée Peignier + - William Kral diff --git a/THANKS b/THANKS deleted file mode 100644 index 4fccfe7f..00000000 --- a/THANKS +++ /dev/null @@ -1,45 +0,0 @@ -The following people have contributed to the Riak Python client: - -Andrew Thompson -Andy Gross -Armon Dadgar -Brett Hazen -Brett Hoerner -Brian Roach -Bryan Fink -Daniel Lindsley -Daniel Néri -Daniel Reverri -David Koblas -Dmitry Rozhkov -Eric Florenzano -Eric Moritz -Filip de Waard -Gilles Devaux -Greg Nelson -Greg Stein -Gregory Burd -Ian Plosker -Jayson Baird -Jeffrey Massung -Jon Meredith -Josip Lisec -Justin Sheehy -Kevin Smith -Mark Erdmann -Mark Phillips -Mathias Meyer -Matt Heitzenroder -Mikhail Sobolev -Reid Draper -Russell Brown -Rusty Klophaus -Scott Lystig Fritchie -Sean Cribbs -Shuhao Wu -Silas Sewell -Socrates Lee -Soren Hansen -Sreejith Kesavan -Timothée Peignier -William Kral diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index cc7b20c9..c9eb94dd 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -697,7 +697,7 @@ def test_store_binary_object_from_file(self): def test_store_binary_object_from_file_should_use_default_mimetype(self): bucket = self.client.bucket(self.bucket_name) filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), - os.pardir, os.pardir, 'THANKS') + os.pardir, os.pardir, 'README.rst') obj = bucket.new_from_file(self.key_name, filepath) obj.store() obj = bucket.get(self.key_name) From b2b15e75742ac2ad27ade007e76c853471ece9fb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Oct 2015 19:29:01 -0700 Subject: [PATCH 037/324] Small fix for test when security is disabled --- riak/tests/test_security.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index 135bb112..9fc64e2c 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -22,7 +22,10 @@ def test_security_disabled(self): Test valid security settings without security enabled """ topts = {'timeout': 1} - client = self.create_client(credentials=SECURITY_CREDS, + # NB: can't use SECURITY_CREDS here since they won't be set + # if RUN_SECURITY is UN-set + creds = SecurityCreds(username='foo', password='bar') + client = self.create_client(credentials=creds, transport_options=topts) myBucket = client.bucket('test') val1 = "foobar" From bac106c431d2e03179e3fce568e5ee65ee28f64e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 29 Oct 2015 10:41:45 -0700 Subject: [PATCH 038/324] Ensure format strings work correctly and make the linter happy --- riak/tests/test_timeseries.py | 18 +++++++++++++----- 1 file changed, 13 insertions(+), 5 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index d6f63c07..46218af6 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -208,16 +208,24 @@ def validate_data(self, ts_obj): self.assertIsNone(row[4]) def test_query_that_returns_no_data(self): - query = "select * from {} where time > 0 and " + \ - "time < 10 and user = 'user1'".format(table_name) + fmt = """ + select * from {table} where + time > 0 and time < 10 and user = 'user1' + """ + query = fmt.format(table=table_name) ts_obj = self.client.ts_query('GeoCheckin', query) self.assertEqual(len(ts_obj.columns), 0) self.assertEqual(len(ts_obj.rows), 0) def test_query_that_matches_some_data(self): - query = "select * from {} where time > {} and " + \ - " time < {} and user = 'user2'" \ - .format(table_name, self.tenMinsAgoMsec, self.nowMsec) + fmt = """ + select * from {table} where + time > {t1} and time < {t2} and user = 'user2' + """ + query = fmt.format( + table=table_name, + t1=self.tenMinsAgoMsec, + t2=self.nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) From 254994412500a45a4787d4f9ad9bc58b3b5e2ec7 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 30 Oct 2015 11:20:09 -0700 Subject: [PATCH 039/324] Changes after renaming data types and fields in the PB messages --- commands.py | 25 +++++++++++++------------ riak/tests/test_security.py | 2 +- riak/tests/test_timeseries.py | 24 ++++++++++++++---------- riak/transports/pbc/codec.py | 30 +++++++++++++----------------- 4 files changed, 41 insertions(+), 40 deletions(-) diff --git a/commands.py b/commands.py index cd178e8b..d7b7923d 100644 --- a/commands.py +++ b/commands.py @@ -178,13 +178,6 @@ class create_bucket_types(bucket_type_commands, Command): class setup_timeseries(bucket_type_commands, Command): """ Creates bucket-types appropriate for timeseries. - By default this will create: - - * `GeoCheckin` with ``{"props": {"n_val": 3, - "table_def": "CREATE TABLE GeoCheckin (geohash varchar not null, - user varchar not null, time timestamp not null, - weather varchar not null, temperature float, - PRIMARY KEY((quantum(time, 15, m),user), time, user))"}}`` """ description = "create bucket-types used in timeseries tests" @@ -196,11 +189,19 @@ class setup_timeseries(bucket_type_commands, Command): _props = { 'GeoCheckin': { 'n_val': 3, - 'table_def': - 'CREATE TABLE GeoCheckin (geohash varchar not null, ' + - 'user varchar not null, time timestamp not null, ' + - 'weather varchar not null, temperature float, ' + - 'PRIMARY KEY((quantum(time, 15, m),user), time, user))'} + 'table_def': ''' + CREATE TABLE GeoCheckin ( + geohash varchar not null, + user varchar not null, + time timestamp not null, + weather varchar not null, + temperature float, + PRIMARY KEY( + (geohash, user, quantum(time, 15, m)), + geohash, user, time + ) + )''' + } } diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index 9fc64e2c..85588ee0 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -5,7 +5,7 @@ from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT, \ - SECURITY_CREDS, SECURITY_CIPHERS + SECURITY_CIPHERS from riak.security import SecurityCreds from riak.tests.base import IntegrationTestBase diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 46218af6..37e9c931 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -69,14 +69,14 @@ def test_encode_data_for_put(self): r0 = ts_put_req.rows[0] self.assertEqual(r0.cells[0].binary_value, self.rows[0][0]) - self.assertEqual(r0.cells[1].integer_value, self.rows[0][1]) + self.assertEqual(r0.cells[1].sint64_value, self.rows[0][1]) self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) r1 = ts_put_req.rows[1] self.assertEqual(r1.cells[0].binary_value, self.rows[1][0]) - self.assertEqual(r1.cells[1].integer_value, self.rows[1][1]) + self.assertEqual(r1.cells[1].sint64_value, self.rows[1][1]) self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) @@ -89,10 +89,10 @@ def test_decode_data_from_query(self): c0.type = riak_pb.TsColumnType.Value('BINARY') c1 = tqr.columns.add() c1.name = str_to_bytes('col_integer') - c1.type = riak_pb.TsColumnType.Value('INTEGER') + c1.type = riak_pb.TsColumnType.Value('SINT64') c2 = tqr.columns.add() c2.name = str_to_bytes('col_double') - c2.type = riak_pb.TsColumnType.Value('FLOAT') + c2.type = riak_pb.TsColumnType.Value('DOUBLE') c3 = tqr.columns.add() c3.name = str_to_bytes('col_timestamp') c3.type = riak_pb.TsColumnType.Value('TIMESTAMP') @@ -104,7 +104,7 @@ def test_decode_data_from_query(self): r0c0 = r0.cells.add() r0c0.binary_value = self.rows[0][0] r0c1 = r0.cells.add() - r0c1.integer_value = self.rows[0][1] + r0c1.sint64_value = self.rows[0][1] r0c2 = r0.cells.add() r0c2.double_value = self.rows[0][2] r0c3 = r0.cells.add() @@ -116,7 +116,7 @@ def test_decode_data_from_query(self): r1c0 = r1.cells.add() r1c0.binary_value = self.rows[1][0] r1c1 = r1.cells.add() - r1c1.integer_value = self.rows[1][1] + r1c1.sint64_value = self.rows[1][1] r1c2 = r1.cells.add() r1c2.double_value = self.rows[1][2] r1c3 = r1.cells.add() @@ -135,9 +135,9 @@ def test_decode_data_from_query(self): self.assertEqual(c[0][0], 'col_binary') self.assertEqual(c[0][1], riak_pb.TsColumnType.Value('BINARY')) self.assertEqual(c[1][0], 'col_integer') - self.assertEqual(c[1][1], riak_pb.TsColumnType.Value('INTEGER')) + self.assertEqual(c[1][1], riak_pb.TsColumnType.Value('SINT64')) self.assertEqual(c[2][0], 'col_double') - self.assertEqual(c[2][1], riak_pb.TsColumnType.Value('FLOAT')) + self.assertEqual(c[2][1], riak_pb.TsColumnType.Value('DOUBLE')) self.assertEqual(c[3][0], 'col_timestamp') self.assertEqual(c[3][1], riak_pb.TsColumnType.Value('TIMESTAMP')) self.assertEqual(c[4][0], 'col_boolean') @@ -210,7 +210,9 @@ def validate_data(self, ts_obj): def test_query_that_returns_no_data(self): fmt = """ select * from {table} where - time > 0 and time < 10 and user = 'user1' + time > 0 and time < 10 and + geohash = 'hash1' and + user = 'user1' """ query = fmt.format(table=table_name) ts_obj = self.client.ts_query('GeoCheckin', query) @@ -220,7 +222,9 @@ def test_query_that_returns_no_data(self): def test_query_that_matches_some_data(self): fmt = """ select * from {table} where - time > {t1} and time < {t2} and user = 'user2' + time > {t1} and time < {t2} and + geohash = 'hash1' and + user = 'user2' """ query = fmt.format( table=table_name, diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index ac7d9122..bb5bf5f0 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -642,9 +642,9 @@ def _encode_to_ts_cell(self, cell, ts_cell): ts_cell.binary_value = str_to_bytes(cell) elif isinstance(cell, int) or isinstance(cell, long): # noqa logging.debug("cell -> int/long: '%s'", cell) - ts_cell.integer_value = cell + ts_cell.sint64_value = cell elif isinstance(cell, float): - logging.debug("cell -> float: '%s'", cell) + logging.debug("cell -> double: '%s'", cell) ts_cell.double_value = cell else: t = type(cell) @@ -732,28 +732,24 @@ def _decode_timeseries_row(self, ts_row, ts_columns): logging.debug("ts_cell.binary_value: '%s'", ts_cell.binary_value) row.append(ts_cell.binary_value) - elif ts_col.type == riak_pb.TsColumnType.Value('INTEGER')\ - and ts_cell.HasField('integer_value'): - logging.debug("ts_cell.integer_value: '%s'", - ts_cell.integer_value) - row.append(ts_cell.integer_value) - elif ts_col.type == riak_pb.TsColumnType.Value('FLOAT')\ + elif ts_col.type == riak_pb.TsColumnType.Value('SINT64')\ + and ts_cell.HasField('sint64_value'): + logging.debug("ts_cell.sint64_value: '%s'", + ts_cell.sint64_value) + row.append(ts_cell.sint64_value) + elif ts_col.type == riak_pb.TsColumnType.Value('DOUBLE')\ and ts_cell.HasField('double_value'): - value = None - if ts_cell.HasField('double_value'): - value = ts_cell.double_value - elif ts_cell.HasField('float_value'): - value = ts_cell.float_value - logging.debug("ts_cell double/float value: '%d'", value) - row.append(value) + logging.debug("ts_cell.double_value: '%d'", + ts_cell.double_value) + row.append(ts_cell.double_value) elif ts_col.type == riak_pb.TsColumnType.Value('TIMESTAMP'): dt = None if ts_cell.HasField('timestamp_value'): dt = self._datetime_from_unix_time_millis( ts_cell.timestamp_value) - elif ts_cell.HasField('integer_value'): + elif ts_cell.HasField('sint64_value'): dt = self._datetime_from_unix_time_millis( - ts_cell.integer_value) + ts_cell.sint64_value) logging.debug("ts_cell datetime: '%s'", dt) row.append(dt) elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN')\ From d46c9404537d9c76e869ded89fdf51c56f229dfd Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 5 Nov 2015 08:45:19 -0800 Subject: [PATCH 040/324] TS integration tests are complete --- commands.py | 2 +- riak/client/operations.py | 2 +- riak/security.py | 16 ++++++------ riak/table.py | 2 +- riak/tests/test_timeseries.py | 46 ++++++++++------------------------- riak/transports/pbc/codec.py | 4 +-- 6 files changed, 25 insertions(+), 47 deletions(-) diff --git a/commands.py b/commands.py index d7b7923d..b3d41ea7 100644 --- a/commands.py +++ b/commands.py @@ -195,7 +195,7 @@ class setup_timeseries(bucket_type_commands, Command): user varchar not null, time timestamp not null, weather varchar not null, - temperature float, + temperature double, PRIMARY KEY( (geohash, user, quantum(time, 15, m)), geohash, user, time diff --git a/riak/client/operations.py b/riak/client/operations.py index 4f2d20a7..0d0c7848 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -549,7 +549,7 @@ def ts_get(self, transport, table, key): :param table: The timeseries table. :type table: string or :class:`Table ` :param key: The timeseries value's key. - :type key: list or dict + :type key: list :rtype: :class:`TsObject ` """ t = table diff --git a/riak/security.py b/riak/security.py index 542ff225..c1b36123 100644 --- a/riak/security.py +++ b/riak/security.py @@ -37,11 +37,11 @@ # For Python 2.7 and Python 3.x sslver = ssl.OPENSSL_VERSION_NUMBER # Be sure to use at least OpenSSL 1.0.1g - if sslver < OPENSSL_VERSION_101G or \ - not hasattr(ssl, 'PROTOCOL_TLSv1_2'): + tls_12 = hasattr(ssl, 'PROTOCOL_TLSv1_2') + if sslver < OPENSSL_VERSION_101G or not tls_12: verstring = ssl.OPENSSL_VERSION - msg = "Found {0} version, but expected at least OpenSSL 1.0.1g. " \ - "Security may not support TLS 1.2.".format(verstring) + msg = "{0} (>= 1.0.1g required), TLS 1.2 support: {1}" \ + .format(verstring, tls_12) warnings.warn(msg, UserWarning) if hasattr(ssl, 'PROTOCOL_TLSv1_2'): DEFAULT_TLS_VERSION = ssl.PROTOCOL_TLSv1_2 @@ -56,11 +56,11 @@ # For Python 2.6 sslver = OpenSSL.SSL.OPENSSL_VERSION_NUMBER # Be sure to use at least OpenSSL 1.0.1g - if (sslver < OPENSSL_VERSION_101G) or \ - not hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): + tls_12 = hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD') + if (sslver < OPENSSL_VERSION_101G) or tls_12: verstring = OpenSSL.SSL.SSLeay_version(OpenSSL.SSL.SSLEAY_VERSION) - msg = "Found {0} version, but expected at least OpenSSL 1.0.1g. " \ - "Security may not support TLS 1.2.".format(verstring) + msg = "{0} (>= 1.0.1g required), TLS 1.2 support: {1}" \ + .format(verstring, tls_12) warnings.warn(msg, UserWarning) if hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): DEFAULT_TLS_VERSION = OpenSSL.SSL.TLSv1_2_METHOD diff --git a/riak/table.py b/riak/table.py index ea30752b..27312d66 100644 --- a/riak/table.py +++ b/riak/table.py @@ -57,7 +57,7 @@ def get(self, table, key): :param table: The timeseries table. :type table: string or :class:`Table ` :param key: The timeseries value's key. - :type key: list or dict + :type key: list :rtype: :class:`TsObject ` """ return self.client.ts_get(self, table, key) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 37e9c931..6ac818bc 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -4,6 +4,7 @@ import platform import riak_pb +from riak import RiakError from riak.table import Table from riak.ts_object import TsObject from riak.transports.pbc.codec import RiakPbcCodec @@ -36,17 +37,15 @@ def setUp(self): [bd0, 0, 1.2, ts0, True], [bd1, 3, 4.5, ts1, False] ] - self.test_key = { - 'user': 'user2', - 'time': ts0 - } + self.test_key = [ 'hash1', 'user2', ts0 ] self.table = Table(None, 'test-table') def validate_keyreq(self, req): self.assertEqual(self.table.name, bytes_to_str(req.table)) - self.assertEqual(len(self.test_key.values()), len(req.key)) - self.assertEqual('user2', bytes_to_str(req.key[0].binary_value)) - self.assertEqual(self.ts0ms, req.key[1].timestamp_value) + self.assertEqual(len(self.test_key), len(req.key)) + self.assertEqual('hash1', bytes_to_str(req.key[0].binary_value)) + self.assertEqual('user2', bytes_to_str(req.key[1].binary_value)) + self.assertEqual(self.ts0ms, req.key[2].timestamp_value) def test_encode_data_for_get(self): req = riak_pb.TsGetReq() @@ -171,14 +170,6 @@ def setUpClass(cls): client = cls.create_client() table = client.table(table_name) - # CREATE TABLE GeoCheckin ( - # geohash varchar not null, - # user varchar not null, - # time timestamp not null, - # weather varchar not null, - # temperature float, - # PRIMARY KEY((quantum(time, 15, m), user), time, user) - # ) rows = [ ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], @@ -198,7 +189,7 @@ def setUpClass(cls): cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) def validate_data(self, ts_obj): - self.assertEqual(len(ts_obj.columns), 5) + # TODO self.assertEqual(len(ts_obj.columns), 5) self.assertEqual(len(ts_obj.rows), 1) row = ts_obj.rows[0] self.assertEqual(row[0], 'hash1') @@ -233,25 +224,14 @@ def test_query_that_matches_some_data(self): ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) - def test_get_single_value_using_dict(self): - key = { - 'user': 'user2', - 'time': self.fiveMinsAgo - } + def test_get_single_value(self): + key = [ 'hash1', 'user2', self.fiveMinsAgo] ts_obj = self.client.ts_get('GeoCheckin', key) self.validate_data(ts_obj) - def test_get_single_value_using_array(self): - key = [self.fiveMinsAgo, 'user2'] - ts_obj = self.client.ts_get('GeoCheckin', key) - self.validate_data(ts_obj) - - def test_delete_single_value_using_dict(self): - key = { - 'user': 'user2', - 'time': self.twentyMinsAgo - } + def test_delete_single_value(self): + key = [ 'hash1', 'user2', self.twentyMinsAgo] rslt = self.client.ts_delete('GeoCheckin', key) self.assertTrue(rslt) - ts_obj = self.client.ts_get('GeoCheckin', key) - self.assertIsNone(ts_obj) + with self.assertRaises(RiakError): + self.client.ts_get('GeoCheckin', key) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index bb5bf5f0..e343e87d 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -655,10 +655,8 @@ def _encode_timeseries_keyreq(self, table, key, req): key_vals = None if isinstance(key, list): key_vals = key - elif isinstance(key, dict): - key_vals = key.values() else: - raise ValueError("key must be a list or dict") + raise ValueError("key must be a list") req.table = str_to_bytes(table.name) for cell in key_vals: From 5f738c5b248b9a906c6d686902457b9fea32f5c1 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 10 Nov 2015 09:36:37 -0800 Subject: [PATCH 041/324] TS rpb renaming binary -> varchar, fix up tests and add some --- riak/tests/test_timeseries.py | 55 ++++++++++++++++++++++++++--------- riak/transports/pbc/codec.py | 16 +++++----- 2 files changed, 50 insertions(+), 21 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 6ac818bc..76dab7ed 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import datetime +import logging import os import platform import riak_pb @@ -43,8 +44,8 @@ def setUp(self): def validate_keyreq(self, req): self.assertEqual(self.table.name, bytes_to_str(req.table)) self.assertEqual(len(self.test_key), len(req.key)) - self.assertEqual('hash1', bytes_to_str(req.key[0].binary_value)) - self.assertEqual('user2', bytes_to_str(req.key[1].binary_value)) + self.assertEqual('hash1', bytes_to_str(req.key[0].varchar_value)) + self.assertEqual('user2', bytes_to_str(req.key[1].varchar_value)) self.assertEqual(self.ts0ms, req.key[2].timestamp_value) def test_encode_data_for_get(self): @@ -67,14 +68,14 @@ def test_encode_data_for_put(self): self.assertEqual(len(self.rows), len(ts_put_req.rows)) r0 = ts_put_req.rows[0] - self.assertEqual(r0.cells[0].binary_value, self.rows[0][0]) + self.assertEqual(r0.cells[0].varchar_value, self.rows[0][0]) self.assertEqual(r0.cells[1].sint64_value, self.rows[0][1]) self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) r1 = ts_put_req.rows[1] - self.assertEqual(r1.cells[0].binary_value, self.rows[1][0]) + self.assertEqual(r1.cells[0].varchar_value, self.rows[1][0]) self.assertEqual(r1.cells[1].sint64_value, self.rows[1][1]) self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) @@ -84,8 +85,8 @@ def test_decode_data_from_query(self): tqr = riak_pb.TsQueryResp() c0 = tqr.columns.add() - c0.name = str_to_bytes('col_binary') - c0.type = riak_pb.TsColumnType.Value('BINARY') + c0.name = str_to_bytes('col_varchar') + c0.type = riak_pb.TsColumnType.Value('VARCHAR') c1 = tqr.columns.add() c1.name = str_to_bytes('col_integer') c1.type = riak_pb.TsColumnType.Value('SINT64') @@ -101,7 +102,7 @@ def test_decode_data_from_query(self): r0 = tqr.rows.add() r0c0 = r0.cells.add() - r0c0.binary_value = self.rows[0][0] + r0c0.varchar_value = self.rows[0][0] r0c1 = r0.cells.add() r0c1.sint64_value = self.rows[0][1] r0c2 = r0.cells.add() @@ -113,7 +114,7 @@ def test_decode_data_from_query(self): r1 = tqr.rows.add() r1c0 = r1.cells.add() - r1c0.binary_value = self.rows[1][0] + r1c0.varchar_value = self.rows[1][0] r1c1 = r1.cells.add() r1c1.sint64_value = self.rows[1][1] r1c2 = r1.cells.add() @@ -131,8 +132,8 @@ def test_decode_data_from_query(self): self.assertEqual(len(tqr.columns), len(tsobj.columns)) c = tsobj.columns - self.assertEqual(c[0][0], 'col_binary') - self.assertEqual(c[0][1], riak_pb.TsColumnType.Value('BINARY')) + self.assertEqual(c[0][0], 'col_varchar') + self.assertEqual(c[0][1], riak_pb.TsColumnType.Value('VARCHAR')) self.assertEqual(c[1][0], 'col_integer') self.assertEqual(c[1][1], riak_pb.TsColumnType.Value('SINT64')) self.assertEqual(c[2][0], 'col_double') @@ -187,9 +188,13 @@ def setUpClass(cls): cls.fiveMinsAgo = fiveMinsAgo cls.twentyMinsAgo = twentyMinsAgo cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) + cls.twentyMinsAgoMsec = codec._unix_time_millis(twentyMinsAgo) + cls.numCols = len(rows[0]) + cls.rows = rows def validate_data(self, ts_obj): - # TODO self.assertEqual(len(ts_obj.columns), 5) + if ts_obj.columns is not None: + self.assertEqual(len(ts_obj.columns), self.numCols) self.assertEqual(len(ts_obj.rows), 1) row = ts_obj.rows[0] self.assertEqual(row[0], 'hash1') @@ -224,14 +229,38 @@ def test_query_that_matches_some_data(self): ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) + def test_query_that_matches_all_data(self): + fmt = """ + select * from {table} where + time >= {t1} and time <= {t2} and + geohash = 'hash1' and + user = 'user2' + """ + query = fmt.format( + table=table_name, + t1=self.twentyMinsAgoMsec, + t2=self.nowMsec) + logging.debug("all data query: %s", query) + ts_obj = self.client.ts_query('GeoCheckin', query) + for i, want in enumerate(self.rows): + got = ts_obj.rows[i] + logging.debug("got: %s want: %s", got, want) + self.assertListEqual(got, want) + + def test_get_with_invalid_key(self): + key = [ 'hash1', 'user2' ] + with self.assertRaises(RiakError): + self.client.ts_get('GeoCheckin', key) + def test_get_single_value(self): key = [ 'hash1', 'user2', self.fiveMinsAgo] ts_obj = self.client.ts_get('GeoCheckin', key) + self.assertIsNotNone(ts_obj) self.validate_data(ts_obj) def test_delete_single_value(self): key = [ 'hash1', 'user2', self.twentyMinsAgo] rslt = self.client.ts_delete('GeoCheckin', key) self.assertTrue(rslt) - with self.assertRaises(RiakError): - self.client.ts_get('GeoCheckin', key) + ts_obj = self.client.ts_get('GeoCheckin', key) + self.assertEqual(len(ts_obj.rows), 0) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index e343e87d..47e4abb9 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -628,8 +628,8 @@ def _encode_map_update(self, dtype, msg, op): def _encode_to_ts_cell(self, cell, ts_cell): if cell is not None: if isinstance(cell, bytes) or isinstance(cell, bytearray): - logging.debug("cell -> binary_value: '%s'", cell) - ts_cell.binary_value = cell + logging.debug("cell -> varchar_value: '%s'", cell) + ts_cell.varchar_value = cell elif isinstance(cell, datetime.datetime): ts_cell.timestamp_value = self._unix_time_millis(cell) logging.debug("cell -> timestamp: '%s', timestamp_value '%d'", @@ -639,7 +639,7 @@ def _encode_to_ts_cell(self, cell, ts_cell): ts_cell.boolean_value = cell elif isinstance(cell, str): logging.debug("cell -> str: '%s'", cell) - ts_cell.binary_value = str_to_bytes(cell) + ts_cell.varchar_value = str_to_bytes(cell) elif isinstance(cell, int) or isinstance(cell, long): # noqa logging.debug("cell -> int/long: '%s'", cell) ts_cell.sint64_value = cell @@ -725,11 +725,11 @@ def _decode_timeseries_row(self, ts_row, ts_columns): for i, ts_cell in enumerate(ts_row.cells): ts_col = ts_columns[i] logging.debug("ts_cell: '%s', ts_col: '%d'", ts_cell, ts_col.type) - if ts_col.type == riak_pb.TsColumnType.Value('BINARY')\ - and ts_cell.HasField('binary_value'): - logging.debug("ts_cell.binary_value: '%s'", - ts_cell.binary_value) - row.append(ts_cell.binary_value) + if ts_col.type == riak_pb.TsColumnType.Value('VARCHAR')\ + and ts_cell.HasField('varchar_value'): + logging.debug("ts_cell.varchar_value: '%s'", + ts_cell.varchar_value) + row.append(ts_cell.varchar_value) elif ts_col.type == riak_pb.TsColumnType.Value('SINT64')\ and ts_cell.HasField('sint64_value'): logging.debug("ts_cell.sint64_value: '%s'", From 0bcebd711e1890d46bb47b6be531b7c6df806aa5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 23 Nov 2015 11:01:23 -0800 Subject: [PATCH 042/324] Fix tests. Timestamp is now returned in timestamp_value field --- riak/tests/test_timeseries.py | 13 ++++++++++--- riak/transports/pbc/codec.py | 12 ++++-------- 2 files changed, 14 insertions(+), 11 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 76dab7ed..ccf3b5a2 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -168,10 +168,12 @@ def setUpClass(cls): tenMinsAgo = fiveMinsAgo - fiveMins fifteenMinsAgo = tenMinsAgo - fiveMins twentyMinsAgo = fifteenMinsAgo - fiveMins + twentyFiveMinsAgo = twentyMinsAgo - fiveMins client = cls.create_client() table = client.table(table_name) rows = [ + ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], ['hash1', 'user2', fiveMinsAgo, 'wind', None], @@ -187,6 +189,7 @@ def setUpClass(cls): cls.nowMsec = codec._unix_time_millis(cls.now) cls.fiveMinsAgo = fiveMinsAgo cls.twentyMinsAgo = twentyMinsAgo + cls.twentyFiveMinsAgo = twentyFiveMinsAgo cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) cls.twentyMinsAgoMsec = codec._unix_time_millis(twentyMinsAgo) cls.numCols = len(rows[0]) @@ -229,7 +232,7 @@ def test_query_that_matches_some_data(self): ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) - def test_query_that_matches_all_data(self): + def test_query_that_matches_more_data(self): fmt = """ select * from {table} where time >= {t1} and time <= {t2} and @@ -242,8 +245,12 @@ def test_query_that_matches_all_data(self): t2=self.nowMsec) logging.debug("all data query: %s", query) ts_obj = self.client.ts_query('GeoCheckin', query) + j = 0 for i, want in enumerate(self.rows): - got = ts_obj.rows[i] + if want[2] == self.twentyFiveMinsAgo: + continue + got = ts_obj.rows[j] + j += 1 logging.debug("got: %s want: %s", got, want) self.assertListEqual(got, want) @@ -259,7 +266,7 @@ def test_get_single_value(self): self.validate_data(ts_obj) def test_delete_single_value(self): - key = [ 'hash1', 'user2', self.twentyMinsAgo] + key = [ 'hash1', 'user2', self.twentyFiveMinsAgo] rslt = self.client.ts_delete('GeoCheckin', key) self.assertTrue(rslt) ts_obj = self.client.ts_get('GeoCheckin', key) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 47e4abb9..98668345 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -740,14 +740,10 @@ def _decode_timeseries_row(self, ts_row, ts_columns): logging.debug("ts_cell.double_value: '%d'", ts_cell.double_value) row.append(ts_cell.double_value) - elif ts_col.type == riak_pb.TsColumnType.Value('TIMESTAMP'): - dt = None - if ts_cell.HasField('timestamp_value'): - dt = self._datetime_from_unix_time_millis( - ts_cell.timestamp_value) - elif ts_cell.HasField('sint64_value'): - dt = self._datetime_from_unix_time_millis( - ts_cell.sint64_value) + elif ts_col.type == riak_pb.TsColumnType.Value('TIMESTAMP')\ + and ts_cell.HasField('timestamp_value'): + dt = self._datetime_from_unix_time_millis( + ts_cell.timestamp_value) logging.debug("ts_cell datetime: '%s'", dt) row.append(dt) elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN')\ From 7330d75556efa40a70c67c0fd835f4a1fb605a6c Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Wed, 2 Dec 2015 23:14:07 +0000 Subject: [PATCH 043/324] PEP8 --- riak/tests/test_timeseries.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index ccf3b5a2..7baf310f 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -38,7 +38,7 @@ def setUp(self): [bd0, 0, 1.2, ts0, True], [bd1, 3, 4.5, ts1, False] ] - self.test_key = [ 'hash1', 'user2', ts0 ] + self.test_key = ['hash1', 'user2', ts0] self.table = Table(None, 'test-table') def validate_keyreq(self, req): @@ -255,18 +255,18 @@ def test_query_that_matches_more_data(self): self.assertListEqual(got, want) def test_get_with_invalid_key(self): - key = [ 'hash1', 'user2' ] + key = ['hash1', 'user2'] with self.assertRaises(RiakError): self.client.ts_get('GeoCheckin', key) def test_get_single_value(self): - key = [ 'hash1', 'user2', self.fiveMinsAgo] + key = ['hash1', 'user2', self.fiveMinsAgo] ts_obj = self.client.ts_get('GeoCheckin', key) self.assertIsNotNone(ts_obj) self.validate_data(ts_obj) def test_delete_single_value(self): - key = [ 'hash1', 'user2', self.twentyFiveMinsAgo] + key = ['hash1', 'user2', self.twentyFiveMinsAgo] rslt = self.client.ts_delete('GeoCheckin', key) self.assertTrue(rslt) ts_obj = self.client.ts_get('GeoCheckin', key) From 9aaf03c5f73b2a47821ba35b76f375876c52cf85 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 24 Nov 2015 12:20:52 -0800 Subject: [PATCH 044/324] Move riak_pb Python code into this repo as a submodule. Still working on import errors Getting rid of splat imports Continuing work to fix riak_pb imports Finish up changes to bring in riak_pb as a submodule Small fix for test when security is disabled tweak test lint fixes modify test to fix error Use all https clone urls, updated submodule commit Ensure that tox uses the correct executable for Python 2.7.9 Revert deletion of these files bash script cleanup All tests pass --- .gitignore | 2 + .gitmodules | 3 + MANIFEST.in | 2 +- Makefile | 79 ++ buildbot/Makefile | 4 +- buildbot/tox_setup.sh | 83 +- commands.py | 261 ++++- riak/riak_pb/__init__.py | 0 riak/riak_pb/messages.py | 152 +++ riak/riak_pb/riak_dt_pb2.py | 863 ++++++++++++++ riak/riak_pb/riak_kv_pb2.py | 1747 +++++++++++++++++++++++++++++ riak/riak_pb/riak_pb2.py | 786 +++++++++++++ riak/riak_pb/riak_search_pb2.py | 210 ++++ riak/riak_pb/riak_yokozuna_pb2.py | 372 ++++++ riak/tests/__init__.py | 6 +- riak/tests/test_all.py | 4 +- riak/tests/test_kv.py | 6 +- riak/tests/test_security.py | 12 +- riak/transports/pbc/codec.py | 106 +- riak/transports/pbc/connection.py | 51 +- riak/transports/pbc/stream.py | 34 +- riak/transports/pbc/transport.py | 282 ++--- riak_pb | 1 + setup.py | 28 +- tox.ini | 6 + 25 files changed, 4757 insertions(+), 343 deletions(-) create mode 100644 .gitmodules create mode 100644 Makefile create mode 100644 riak/riak_pb/__init__.py create mode 100644 riak/riak_pb/messages.py create mode 100644 riak/riak_pb/riak_dt_pb2.py create mode 100644 riak/riak_pb/riak_kv_pb2.py create mode 100644 riak/riak_pb/riak_pb2.py create mode 100644 riak/riak_pb/riak_search_pb2.py create mode 100644 riak/riak_pb/riak_yokozuna_pb2.py create mode 160000 riak_pb diff --git a/.gitignore b/.gitignore index f9515221..24c0bded 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,8 @@ *.pyc .python-version +.tox/ + docs/_build .*.swp diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..e0cba09c --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "riak_pb"] + path = riak_pb + url = git://github.com/basho/riak_pb.git diff --git a/MANIFEST.in b/MANIFEST.in index e7c87e33..e573808c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -5,4 +5,4 @@ include README.rst include LICENSE include RELEASE_NOTES.md include version.py -include commands.py \ No newline at end of file +include commands.py diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..0cd2c971 --- /dev/null +++ b/Makefile @@ -0,0 +1,79 @@ +.PHONY: all compile clean release +.PHONY: python_compile python_clean python_release python_install +.PHONY: python3_compile python3_clean python3_release python3_install + +all: python_compile python3_compile + +clean: python_clean python3_clean + +release: python_release python3_release + +# Python 2.x specific build steps +python_compile: + @echo "==> Python (compile)" + @protoc -I ./riak_pb/src --python_out=./riak/riak_pb ./riak_pb/src/*.proto + @python2 setup.py build_messages + +python_clean: + @echo "==> Python (clean)" + @python2 setup.py clean_messages + @rm -rf ./riak/riak_pb/*.pyc ./riak/riak_pb/*_pb2.py ./riak/riak_pb/*.pyc + +python_release: python_clean +ifeq ($(RELEASE_GPG_KEYNAME),) + @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" +else + @echo "==> Python (release)" + @protoc -Isrc --python_out=riak_pb src/*.proto + @python2.7 setup.py build_messages build --build-base=riak + @python2.7 setup.py build --build-base=python bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + @python2.7 setup.py clean --build-base=python clean_messages + @rm -rf *.pyc riak_pb/*_pb2.py riak_pb/*.pyc riak_pb.egg-info python + @protoc -Isrc --python_out=riak_pb src/*.proto + @python2.7 setup.py build_messages build --build-base=riak + @python2.7 setup.py build --build-base=python sdist upload -s -i $(RELEASE_GPG_KEYNAME) + @python2.6 setup.py clean --build-base=python clean_messages + @rm -rf riak_pb/*_pb2.pyc *.pyc python_riak_pb.egg-info python + @protoc -Isrc --python_out=riak_pb src/*.proto + @python2.6 setup.py build_messages build --build-base=riak + @python2.6 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) +endif + +python_install: python_compile + @echo "==> Python (install)" + @./setup.py build_messages build --build-base=riak install + +# Python 3.x specific build steps +python3_compile: + @echo "==> Python 3 (compile)" + @protoc -Isrc --python_out=riak_pb src/*.proto + @python3 setup.py build_messages build --build-base=riak + +python3_clean: + @echo "==> Python 3 (clean)" + @python3 setup.py clean --build-base=riak clean_messages + @rm -rf riak_pb/*_pb2.py riak_pb/__pycache__ __pycache__ python3_riak_pb.egg-info python3 + +python3_release: python3_clean +ifeq ($(RELEASE_GPG_KEYNAME),) + @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" +else + @echo "==> Python 3 (release)" + @protoc -Isrc --python_out=riak_pb src/*.proto + @python3.4 setup.py build_messages build --build-base=riak + @python3.4 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + @python3.4 setup.py clean --build-base=riak clean_messages + @rm -rf riak_pb/*_pb2.py riak_pb/__pycache__ __pycache__ python3_riak_pb.egg-info python3 + @protoc -Isrc --python_out=riak_pb src/*.proto + @python3.4 setup.py build_messages build --build-base=riak + @python3.4 setup.py build --build-base=riak sdist upload -s -i $(RELEASE_GPG_KEYNAME) + @python3.4 setup.py clean --build-base=riak clean_messages + @rm -rf riak_pb/*_pb2.py riak_pb/__pycache__ __pycache__ python3_riak_pb.egg-info python3 + @protoc -Isrc --python_out=riak_pb src/*.proto + @python3.3 setup.py build_messages build --build-base=riak + @python3.3 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) +endif + +python3_install: python3_compile + @echo "==> Python 3 (install)" + @python3 setup.py build_messages build --build-base=riak install diff --git a/buildbot/Makefile b/buildbot/Makefile index 1323a276..341da322 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -16,8 +16,8 @@ compile: lint: @pip install --upgrade pep8 flake8 - @cd ..; pep8 riak *.py - @cd ..; flake8 riak *.py + @cd ..; pep8 --exclude=riak_pb riak *.py + @cd ..; flake8 --exclude=riak_pb riak *.py @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/client.crt @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/server.crt diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 1dc3f72c..94246d0d 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -1,73 +1,90 @@ #!/usr/bin/env bash -# pyenv root -export PYENV_ROOT="$HOME/.pyenv" + +if [[ ! -d $PYENV_ROOT ]] +then + export PYENV_ROOT="$HOME/.pyenv" +fi + TEST_ROOT=$PWD/.. # Install pyenv if it's missing -if [[ ! -d $PYENV_ROOT ]]; then - git clone git://github.com/yyuu/pyenv.git ${PYENV_ROOT} - cd ${PYENV_ROOT} - # Get the latest tagged version - git checkout `git tag | tail -1` +if [[ ! -d $PYENV_ROOT ]] +then + git clone https://github.com/yyuu/pyenv.git $PYENV_ROOT + (cd $PYENV_ROOT && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) fi # Upgrade it, if it's too old -if [[ -z $(pyenv install --list | grep 3.4.3) ]]; then - cd ${PYENV_ROOT} - git pull origin master - git pull -u origin master - # Get the latest tagged version - git checkout `git tag | tail -1` +if [[ -z $(pyenv install --list | grep 3.4.3) ]] +then + (cd $PYENV_ROOT && git pull -u origin master && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) fi -if [[ ! -d ${PYENV_ROOT}/plugins/pyenv-virtualenv ]]; then - git clone https://github.com/yyuu/pyenv-virtualenv.git ${PYENV_ROOT}/plugins/pyenv-virtualenv - cd ${PYENV_ROOT}/plugins/pyenv-virtualenv - git checkout `git tag | tail -1` +if [[ ! -d $PYENV_ROOT/plugins/pyenv-virtualenv ]] +then + git clone https://github.com/yyuu/pyenv-virtualenv.git $PYENV_ROOT/plugins/pyenv-virtualenv + (cd $PYENV_ROOT/plugins/pyenv-virtualenv && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) fi -if [[ ! -d ${PYENV_ROOT}/plugins/pyenv-alias ]]; then - git clone https://github.com/s1341/pyenv-alias.git ${PYENV_ROOT}/plugins/pyenv-alias +if [[ ! -d $PYENV_ROOT/plugins/pyenv-alias ]] +then + git clone https://github.com/s1341/pyenv-alias.git $PYENV_ROOT/plugins/pyenv-alias fi # Add pyenv root to PATH # and initialize pyenv -PATH="$PYENV_ROOT/bin:$PATH" -# initialize pyenv -eval "$(pyenv init -)" -# initialize pyenv virtualenv -eval "$(pyenv virtualenv-init -)" +if [[ $PATH != */.pyenv* ]] +then + echo "[INFO] adding $PYENV_ROOT/bin to PATH" + export PATH="$PYENV_ROOT/bin:$PATH" +fi + +if [[ $(type -t pyenv) != 'function' ]] +then + echo "[INFO] init pyenv" + eval "$(pyenv init -)" + eval "$(pyenv virtualenv-init -)" +fi # Now install (allthethings) versions for testing -if [[ -z $(pyenv versions | grep riak_3.4.3) ]]; then +if [[ -z $(pyenv versions | grep riak_3.4.3) ]] +then VERSION_ALIAS="riak_3.4.3" pyenv install 3.4.3 pyenv virtualenv riak_3.4.3 riak-py34 fi -if [[ -z $(pyenv versions | grep riak_3.3.6) ]]; then +if [[ -z $(pyenv versions | grep riak_3.3.6) ]] +then VERSION_ALIAS="riak_3.3.6" pyenv install 3.3.6 pyenv virtualenv riak_3.3.6 riak-py33 fi -if [[ -z $(pyenv versions | grep riak_2.7.10) ]]; then +if [[ -z $(pyenv versions | grep riak_2.7.10) ]] +then VERSION_ALIAS="riak_2.7.10" pyenv install 2.7.10 pyenv virtualenv riak_2.7.10 riak-py27 fi -if [[ -z $(pyenv versions | grep riak_2.7.9) ]]; then +if [[ -z $(pyenv versions | grep riak_2.7.9) ]] +then VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 pyenv virtualenv riak_2.7.9 riak-py279 fi -if [[ -z $(pyenv versions | grep riak_2.6.9) ]]; then +if [[ -z $(pyenv versions | grep riak_2.6.9) ]] +then VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 pyenv virtualenv riak_2.6.9 riak-py26 fi -pyenv global riak-py34 riak-py33 riak-py27 riak-py279 riak-py26 + +(cd $TEST_ROOT && pyenv local riak-py34 riak-py33 riak-py27 riak-py279 riak-py26) + pyenv versions # Now install tox pip install --upgrade pip -if [ -z "`pip show tox`" ]; then +if [[ -z $(pip show tox) ]] +then pip install -Iv tox - if [ -z "`pip show tox`" ]; then - echo "ERROR: Install of tox failed" + if [[ -z $(pip show tox) ]] + then + echo "[ERROR] install of tox failed" 1>&2 exit 1 fi pyenv rehash diff --git a/commands.py b/commands.py index 06ee3039..2ca53b7b 100644 --- a/commands.py +++ b/commands.py @@ -1,18 +1,39 @@ -""" -distutils commands for riak-python-client -""" -from distutils import log +import csv +import os +import os.path +import re +import shutil + +from datetime import date from distutils.core import Command from distutils.errors import DistutilsOptionError -from subprocess import Popen, PIPE +from distutils.file_util import write_file +from distutils import log from string import Template -import shutil -import re -import os.path +from subprocess import Popen, PIPE + +__all__ = ['create_bucket_types', 'build_messages', + 'setup_security', 'enable_security', 'disable_security', + 'preconfigure', 'configure'] -__all__ = ['create_bucket_types', 'setup_security', 'enable_security', - 'disable_security', 'preconfigure', 'configure'] + +LICENSE = """# Copyright {0} Basho Technologies, Inc. +# +# This file is provided to you under the Apache License, +# Version 2.0 (the "License"); you may not use this file +# except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +""".format(date.today().year) # Exception classes used by this module. @@ -472,3 +493,223 @@ def run(self): sub_commands = [('create_bucket_types', None), ('setup_security', None) ] + + +class ComparableMixin(object): + def _compare(self, other, method): + try: + return method(self._cmpkey(), other._cmpkey()) + except (AttributeError, TypeError): + # _cmpkey not implemented, or return different type, + # so I can't compare with "other". + return NotImplemented + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + +class MessageCodeMapping(ComparableMixin): + def __init__(self, code, message, proto): + self.code = int(code) + self.message = message + self.proto = proto + self.message_code_name = self._message_code_name() + self.module_name = 'riak.riak_pb.{0}_pb2'.format(self.proto) + self.message_class = self._message_class() + + def _cmpkey(self): + return self.code + + def __hash__(self): + return self.code + + def _message_code_name(self): + strip_rpb = re.sub(r"^Rpb", "", self.message) + word = re.sub(r"([A-Z]+)([A-Z][a-z])", r'\1_\2', strip_rpb) + word = re.sub(r"([a-z\d])([A-Z])", r'\1_\2', word) + word = word.replace("-", "_") + return "MSG_CODE_" + word.upper() + + def _message_class(self): + try: + pbmod = __import__(self.module_name, globals(), locals(), + [self.message]) + klass = pbmod.__dict__[self.message] + return klass + except KeyError: + log.warn("Did not find '%s' message class in module '%s'", + self.message, self.module_name) + except ImportError as e: + log.error("Could not import module '%s', exception: %s", + self.module_name, e) + raise + return None + + +# NOTE: TO RUN THIS SUCCESSFULLY, YOU NEED TO HAVE THESE +# PACKAGES INSTALLED: +# protobuf or python3_protobuf +# six +# +# Run the following command to install them: +# python setup.py install +# +# TO DEBUG: Set DISTUTILS_DEBUG=1 in the environment or run as +# 'python setup.py -vv build_messages' +class build_messages(Command): + """ + Generates message code mappings. Add to the build process using:: + + setup(cmd_class={'build_messages': build_messages}) + """ + + description = "generate protocol message code mappings" + + user_options = [ + ('source=', None, 'source CSV file containing message code mappings'), + ('destination=', None, 'destination Python source file') + ] + + # Used in loading and generating + _pb_imports = set() + _messages = set() + _linesep = os.linesep + _indented_item_sep = ',{0} '.format(_linesep) + + _docstring = [ + '' + '# This is a generated file. DO NOT EDIT.', + '', + '"""', + 'Constants and mappings between Riak protocol codes and messages.', + '"""', + '' + ] + + def initialize_options(self): + self.source = None + self.destination = None + self.update_import = None + + def finalize_options(self): + if self.source is None: + self.source = 'riak_pb/src/riak_pb_messages.csv' + if self.destination is None: + self.destination = 'riak/riak_pb/messages.py' + + def run(self): + self.force = True + self.make_file(self.source, self.destination, + self._load_and_generate, []) + + def _load_and_generate(self): + self._format_python2_or_3() + self._load() + self._generate() + + def _load(self): + with open(self.source, 'r', buffering=1) as csvfile: + reader = csv.reader(csvfile) + for row in reader: + message = MessageCodeMapping(*row) + self._messages.add(message) + self._pb_imports.add(message.module_name) + + def _generate(self): + self._contents = [] + self._generate_doc() + self._generate_imports() + self._generate_codes() + self._generate_classes() + write_file(self.destination, self._contents) + + def _generate_doc(self): + # Write the license and docstring header + self._contents.append(LICENSE) + self._contents.extend(self._docstring) + + def _generate_imports(self): + # Write imports + for im in sorted(self._pb_imports): + self._contents.append("import {0}".format(im)) + + def _generate_codes(self): + # Write protocol code constants + self._contents.extend(['', "# Protocol codes"]) + for message in sorted(self._messages): + self._contents.append("{0} = {1}".format(message.message_code_name, + message.code)) + + def _generate_classes(self): + # Write message classes + classes = [self._generate_mapping(message) + for message in sorted(self._messages)] + + classes = self._indented_item_sep.join(classes) + self._contents.extend(['', + "# Mapping from code to protobuf class", + 'MESSAGE_CLASSES = {', + ' ' + classes, + '}']) + + def _generate_mapping(self, m): + if m.message_class is not None: + klass = "{0}.{1}".format(m.module_name, + m.message_class.__name__) + else: + klass = "None" + pair = "{0}: {1}".format(m.message_code_name, klass) + if len(pair) > 76: + # Try to satisfy PEP8, lulz + pair = (self._linesep + ' ').join(pair.split(' ')) + return pair + + def _format_python2_or_3(self): + """ + Change the PB files to use full pathnames for Python 3.x + and modify the metaclasses to be version agnostic + """ + pb_files = set() + with open(self.source, 'r', buffering=1) as csvfile: + reader = csv.reader(csvfile) + for row in reader: + _, _, proto = row + pb_files.add('riak/riak_pb/{0}_pb2.py'.format(proto)) + + for im in sorted(pb_files): + with open(im, 'r', buffering=1) as pbfile: + contents = 'from six import *\n' + pbfile.read() + contents = re.sub(r'riak_pb2', + r'riak.riak_pb.riak_pb2', + contents) + # Look for this pattern in the protoc-generated file: + # + # class RpbCounterGetResp(_message.Message): + # __metaclass__ = _reflection.GeneratedProtocolMessageType + # + # and convert it to: + # + # @add_metaclass(_reflection.GeneratedProtocolMessageType) + # class RpbCounterGetResp(_message.Message): + contents = re.sub( + r'class\s+(\S+)\((\S+)\):\s*\n' + '\s+__metaclass__\s+=\s+(\S+)\s*\n', + r'@add_metaclass(\3)\nclass \1(\2):\n', contents) + + with open(im, 'w', buffering=1) as pbfile: + pbfile.write(contents) diff --git a/riak/riak_pb/__init__.py b/riak/riak_pb/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/riak/riak_pb/messages.py b/riak/riak_pb/messages.py new file mode 100644 index 00000000..7d7f8b91 --- /dev/null +++ b/riak/riak_pb/messages.py @@ -0,0 +1,152 @@ +# Copyright 2015 Basho Technologies, Inc. +# +# This file is provided to you under the Apache License, +# Version 2.0 (the "License"); you may not use this file +# except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is a generated file. DO NOT EDIT. + +""" +Constants and mappings between Riak protocol codes and messages. +""" + +import riak.riak_pb.riak_dt_pb2 +import riak.riak_pb.riak_kv_pb2 +import riak.riak_pb.riak_pb2 +import riak.riak_pb.riak_search_pb2 +import riak.riak_pb.riak_yokozuna_pb2 + +# Protocol codes +MSG_CODE_ERROR_RESP = 0 +MSG_CODE_PING_REQ = 1 +MSG_CODE_PING_RESP = 2 +MSG_CODE_GET_CLIENT_ID_REQ = 3 +MSG_CODE_GET_CLIENT_ID_RESP = 4 +MSG_CODE_SET_CLIENT_ID_REQ = 5 +MSG_CODE_SET_CLIENT_ID_RESP = 6 +MSG_CODE_GET_SERVER_INFO_REQ = 7 +MSG_CODE_GET_SERVER_INFO_RESP = 8 +MSG_CODE_GET_REQ = 9 +MSG_CODE_GET_RESP = 10 +MSG_CODE_PUT_REQ = 11 +MSG_CODE_PUT_RESP = 12 +MSG_CODE_DEL_REQ = 13 +MSG_CODE_DEL_RESP = 14 +MSG_CODE_LIST_BUCKETS_REQ = 15 +MSG_CODE_LIST_BUCKETS_RESP = 16 +MSG_CODE_LIST_KEYS_REQ = 17 +MSG_CODE_LIST_KEYS_RESP = 18 +MSG_CODE_GET_BUCKET_REQ = 19 +MSG_CODE_GET_BUCKET_RESP = 20 +MSG_CODE_SET_BUCKET_REQ = 21 +MSG_CODE_SET_BUCKET_RESP = 22 +MSG_CODE_MAP_RED_REQ = 23 +MSG_CODE_MAP_RED_RESP = 24 +MSG_CODE_INDEX_REQ = 25 +MSG_CODE_INDEX_RESP = 26 +MSG_CODE_SEARCH_QUERY_REQ = 27 +MSG_CODE_SEARCH_QUERY_RESP = 28 +MSG_CODE_RESET_BUCKET_REQ = 29 +MSG_CODE_RESET_BUCKET_RESP = 30 +MSG_CODE_GET_BUCKET_TYPE_REQ = 31 +MSG_CODE_SET_BUCKET_TYPE_REQ = 32 +MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ = 33 +MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP = 34 +MSG_CODE_CS_BUCKET_REQ = 40 +MSG_CODE_CS_BUCKET_RESP = 41 +MSG_CODE_COUNTER_UPDATE_REQ = 50 +MSG_CODE_COUNTER_UPDATE_RESP = 51 +MSG_CODE_COUNTER_GET_REQ = 52 +MSG_CODE_COUNTER_GET_RESP = 53 +MSG_CODE_YOKOZUNA_INDEX_GET_REQ = 54 +MSG_CODE_YOKOZUNA_INDEX_GET_RESP = 55 +MSG_CODE_YOKOZUNA_INDEX_PUT_REQ = 56 +MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ = 57 +MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ = 58 +MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP = 59 +MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ = 60 +MSG_CODE_DT_FETCH_REQ = 80 +MSG_CODE_DT_FETCH_RESP = 81 +MSG_CODE_DT_UPDATE_REQ = 82 +MSG_CODE_DT_UPDATE_RESP = 83 +MSG_CODE_AUTH_REQ = 253 +MSG_CODE_AUTH_RESP = 254 +MSG_CODE_START_TLS = 255 + +# Mapping from code to protobuf class +MESSAGE_CLASSES = { + MSG_CODE_ERROR_RESP: riak.riak_pb.riak_pb2.RpbErrorResp, + MSG_CODE_PING_REQ: None, + MSG_CODE_PING_RESP: None, + MSG_CODE_GET_CLIENT_ID_REQ: None, + MSG_CODE_GET_CLIENT_ID_RESP: riak.riak_pb.riak_kv_pb2.RpbGetClientIdResp, + MSG_CODE_SET_CLIENT_ID_REQ: riak.riak_pb.riak_kv_pb2.RpbSetClientIdReq, + MSG_CODE_SET_CLIENT_ID_RESP: None, + MSG_CODE_GET_SERVER_INFO_REQ: None, + MSG_CODE_GET_SERVER_INFO_RESP: riak.riak_pb.riak_pb2.RpbGetServerInfoResp, + MSG_CODE_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbGetReq, + MSG_CODE_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbGetResp, + MSG_CODE_PUT_REQ: riak.riak_pb.riak_kv_pb2.RpbPutReq, + MSG_CODE_PUT_RESP: riak.riak_pb.riak_kv_pb2.RpbPutResp, + MSG_CODE_DEL_REQ: riak.riak_pb.riak_kv_pb2.RpbDelReq, + MSG_CODE_DEL_RESP: None, + MSG_CODE_LIST_BUCKETS_REQ: riak.riak_pb.riak_kv_pb2.RpbListBucketsReq, + MSG_CODE_LIST_BUCKETS_RESP: riak.riak_pb.riak_kv_pb2.RpbListBucketsResp, + MSG_CODE_LIST_KEYS_REQ: riak.riak_pb.riak_kv_pb2.RpbListKeysReq, + MSG_CODE_LIST_KEYS_RESP: riak.riak_pb.riak_kv_pb2.RpbListKeysResp, + MSG_CODE_GET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbGetBucketReq, + MSG_CODE_GET_BUCKET_RESP: riak.riak_pb.riak_pb2.RpbGetBucketResp, + MSG_CODE_SET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbSetBucketReq, + MSG_CODE_SET_BUCKET_RESP: None, + MSG_CODE_MAP_RED_REQ: riak.riak_pb.riak_kv_pb2.RpbMapRedReq, + MSG_CODE_MAP_RED_RESP: riak.riak_pb.riak_kv_pb2.RpbMapRedResp, + MSG_CODE_INDEX_REQ: riak.riak_pb.riak_kv_pb2.RpbIndexReq, + MSG_CODE_INDEX_RESP: riak.riak_pb.riak_kv_pb2.RpbIndexResp, + MSG_CODE_SEARCH_QUERY_REQ: riak.riak_pb.riak_search_pb2.RpbSearchQueryReq, + MSG_CODE_SEARCH_QUERY_RESP: riak.riak_pb.riak_search_pb2.RpbSearchQueryResp, + MSG_CODE_RESET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbResetBucketReq, + MSG_CODE_RESET_BUCKET_RESP: None, + MSG_CODE_GET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbGetBucketTypeReq, + MSG_CODE_SET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbSetBucketTypeReq, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ: + riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP: + riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistResp, + MSG_CODE_CS_BUCKET_REQ: riak.riak_pb.riak_kv_pb2.RpbCSBucketReq, + MSG_CODE_CS_BUCKET_RESP: riak.riak_pb.riak_kv_pb2.RpbCSBucketResp, + MSG_CODE_COUNTER_UPDATE_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateReq, + MSG_CODE_COUNTER_UPDATE_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateResp, + MSG_CODE_COUNTER_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterGetReq, + MSG_CODE_COUNTER_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterGetResp, + MSG_CODE_YOKOZUNA_INDEX_GET_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq, + MSG_CODE_YOKOZUNA_INDEX_GET_RESP: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetResp, + MSG_CODE_YOKOZUNA_INDEX_PUT_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq, + MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq, + MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq, + MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetResp, + MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq, + MSG_CODE_DT_FETCH_REQ: riak.riak_pb.riak_dt_pb2.DtFetchReq, + MSG_CODE_DT_FETCH_RESP: riak.riak_pb.riak_dt_pb2.DtFetchResp, + MSG_CODE_DT_UPDATE_REQ: riak.riak_pb.riak_dt_pb2.DtUpdateReq, + MSG_CODE_DT_UPDATE_RESP: riak.riak_pb.riak_dt_pb2.DtUpdateResp, + MSG_CODE_AUTH_REQ: riak.riak_pb.riak_pb2.RpbAuthReq, + MSG_CODE_AUTH_RESP: None, + MSG_CODE_START_TLS: None +} diff --git a/riak/riak_pb/riak_dt_pb2.py b/riak/riak_pb/riak_dt_pb2.py new file mode 100644 index 00000000..58a2f54b --- /dev/null +++ b/riak/riak_pb/riak_dt_pb2.py @@ -0,0 +1,863 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_dt.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_dt.proto', + package='', + serialized_pb='\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"Q\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\"\x87\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\")\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"V\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"t\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntryB#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') + + + +_MAPFIELD_MAPFIELDTYPE = _descriptor.EnumDescriptor( + name='MapFieldType', + full_name='MapField.MapFieldType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='COUNTER', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SET', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REGISTER', index=2, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FLAG', index=3, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAP', index=4, number=5, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=82, + serialized_end=151, +) + +_DTFETCHRESP_DATATYPE = _descriptor.EnumDescriptor( + name='DataType', + full_name='DtFetchResp.DataType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='COUNTER', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SET', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAP', index=2, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=696, + serialized_end=737, +) + +_MAPUPDATE_FLAGOP = _descriptor.EnumDescriptor( + name='FlagOp', + full_name='MapUpdate.FlagOp', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ENABLE', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISABLE', index=1, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=988, + serialized_end=1021, +) + + +_MAPFIELD = _descriptor.Descriptor( + name='MapField', + full_name='MapField', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='MapField.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='MapField.type', index=1, + number=2, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _MAPFIELD_MAPFIELDTYPE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=18, + serialized_end=151, +) + + +_MAPENTRY = _descriptor.Descriptor( + name='MapEntry', + full_name='MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field', full_name='MapEntry.field', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='counter_value', full_name='MapEntry.counter_value', index=1, + number=2, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_value', full_name='MapEntry.set_value', index=2, + number=3, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='register_value', full_name='MapEntry.register_value', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='flag_value', full_name='MapEntry.flag_value', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_value', full_name='MapEntry.map_value', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=154, + serialized_end=306, +) + + +_DTFETCHREQ = _descriptor.Descriptor( + name='DtFetchReq', + full_name='DtFetchReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='DtFetchReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='DtFetchReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='DtFetchReq.type', index=2, + number=3, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='DtFetchReq.r', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='DtFetchReq.pr', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='DtFetchReq.basic_quorum', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='DtFetchReq.notfound_ok', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='DtFetchReq.timeout', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='DtFetchReq.sloppy_quorum', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='DtFetchReq.n_val', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='include_context', full_name='DtFetchReq.include_context', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=309, + serialized_end=516, +) + + +_DTVALUE = _descriptor.Descriptor( + name='DtValue', + full_name='DtValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='counter_value', full_name='DtValue.counter_value', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_value', full_name='DtValue.set_value', index=1, + number=2, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_value', full_name='DtValue.map_value', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=518, + serialized_end=599, +) + + +_DTFETCHRESP = _descriptor.Descriptor( + name='DtFetchResp', + full_name='DtFetchResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='context', full_name='DtFetchResp.context', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='DtFetchResp.type', index=1, + number=2, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='DtFetchResp.value', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DTFETCHRESP_DATATYPE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=602, + serialized_end=737, +) + + +_COUNTEROP = _descriptor.Descriptor( + name='CounterOp', + full_name='CounterOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='increment', full_name='CounterOp.increment', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=739, + serialized_end=769, +) + + +_SETOP = _descriptor.Descriptor( + name='SetOp', + full_name='SetOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='adds', full_name='SetOp.adds', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='removes', full_name='SetOp.removes', index=1, + number=2, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=771, + serialized_end=809, +) + + +_MAPUPDATE = _descriptor.Descriptor( + name='MapUpdate', + full_name='MapUpdate', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field', full_name='MapUpdate.field', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='counter_op', full_name='MapUpdate.counter_op', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_op', full_name='MapUpdate.set_op', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='register_op', full_name='MapUpdate.register_op', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='flag_op', full_name='MapUpdate.flag_op', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_op', full_name='MapUpdate.map_op', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _MAPUPDATE_FLAGOP, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=812, + serialized_end=1021, +) + + +_MAPOP = _descriptor.Descriptor( + name='MapOp', + full_name='MapOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='removes', full_name='MapOp.removes', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='updates', full_name='MapOp.updates', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1023, + serialized_end=1087, +) + + +_DTOP = _descriptor.Descriptor( + name='DtOp', + full_name='DtOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='counter_op', full_name='DtOp.counter_op', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_op', full_name='DtOp.set_op', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_op', full_name='DtOp.map_op', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1089, + serialized_end=1175, +) + + +_DTUPDATEREQ = _descriptor.Descriptor( + name='DtUpdateReq', + full_name='DtUpdateReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='DtUpdateReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='DtUpdateReq.key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='DtUpdateReq.type', index=2, + number=3, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='context', full_name='DtUpdateReq.context', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='op', full_name='DtUpdateReq.op', index=4, + number=5, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='DtUpdateReq.w', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='DtUpdateReq.dw', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='DtUpdateReq.pw', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_body', full_name='DtUpdateReq.return_body', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='DtUpdateReq.timeout', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='DtUpdateReq.sloppy_quorum', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='DtUpdateReq.n_val', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='include_context', full_name='DtUpdateReq.include_context', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1178, + serialized_end=1419, +) + + +_DTUPDATERESP = _descriptor.Descriptor( + name='DtUpdateResp', + full_name='DtUpdateResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='DtUpdateResp.key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='context', full_name='DtUpdateResp.context', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='counter_value', full_name='DtUpdateResp.counter_value', index=2, + number=3, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_value', full_name='DtUpdateResp.set_value', index=3, + number=4, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_value', full_name='DtUpdateResp.map_value', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1421, + serialized_end=1537, +) + +_MAPFIELD.fields_by_name['type'].enum_type = _MAPFIELD_MAPFIELDTYPE +_MAPFIELD_MAPFIELDTYPE.containing_type = _MAPFIELD; +_MAPENTRY.fields_by_name['field'].message_type = _MAPFIELD +_MAPENTRY.fields_by_name['map_value'].message_type = _MAPENTRY +_DTVALUE.fields_by_name['map_value'].message_type = _MAPENTRY +_DTFETCHRESP.fields_by_name['type'].enum_type = _DTFETCHRESP_DATATYPE +_DTFETCHRESP.fields_by_name['value'].message_type = _DTVALUE +_DTFETCHRESP_DATATYPE.containing_type = _DTFETCHRESP; +_MAPUPDATE.fields_by_name['field'].message_type = _MAPFIELD +_MAPUPDATE.fields_by_name['counter_op'].message_type = _COUNTEROP +_MAPUPDATE.fields_by_name['set_op'].message_type = _SETOP +_MAPUPDATE.fields_by_name['flag_op'].enum_type = _MAPUPDATE_FLAGOP +_MAPUPDATE.fields_by_name['map_op'].message_type = _MAPOP +_MAPUPDATE_FLAGOP.containing_type = _MAPUPDATE; +_MAPOP.fields_by_name['removes'].message_type = _MAPFIELD +_MAPOP.fields_by_name['updates'].message_type = _MAPUPDATE +_DTOP.fields_by_name['counter_op'].message_type = _COUNTEROP +_DTOP.fields_by_name['set_op'].message_type = _SETOP +_DTOP.fields_by_name['map_op'].message_type = _MAPOP +_DTUPDATEREQ.fields_by_name['op'].message_type = _DTOP +_DTUPDATERESP.fields_by_name['map_value'].message_type = _MAPENTRY +DESCRIPTOR.message_types_by_name['MapField'] = _MAPFIELD +DESCRIPTOR.message_types_by_name['MapEntry'] = _MAPENTRY +DESCRIPTOR.message_types_by_name['DtFetchReq'] = _DTFETCHREQ +DESCRIPTOR.message_types_by_name['DtValue'] = _DTVALUE +DESCRIPTOR.message_types_by_name['DtFetchResp'] = _DTFETCHRESP +DESCRIPTOR.message_types_by_name['CounterOp'] = _COUNTEROP +DESCRIPTOR.message_types_by_name['SetOp'] = _SETOP +DESCRIPTOR.message_types_by_name['MapUpdate'] = _MAPUPDATE +DESCRIPTOR.message_types_by_name['MapOp'] = _MAPOP +DESCRIPTOR.message_types_by_name['DtOp'] = _DTOP +DESCRIPTOR.message_types_by_name['DtUpdateReq'] = _DTUPDATEREQ +DESCRIPTOR.message_types_by_name['DtUpdateResp'] = _DTUPDATERESP + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapField(_message.Message): + DESCRIPTOR = _MAPFIELD + + # @@protoc_insertion_point(class_scope:MapField) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapEntry(_message.Message): + DESCRIPTOR = _MAPENTRY + + # @@protoc_insertion_point(class_scope:MapEntry) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtFetchReq(_message.Message): + DESCRIPTOR = _DTFETCHREQ + + # @@protoc_insertion_point(class_scope:DtFetchReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtValue(_message.Message): + DESCRIPTOR = _DTVALUE + + # @@protoc_insertion_point(class_scope:DtValue) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtFetchResp(_message.Message): + DESCRIPTOR = _DTFETCHRESP + + # @@protoc_insertion_point(class_scope:DtFetchResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class CounterOp(_message.Message): + DESCRIPTOR = _COUNTEROP + + # @@protoc_insertion_point(class_scope:CounterOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class SetOp(_message.Message): + DESCRIPTOR = _SETOP + + # @@protoc_insertion_point(class_scope:SetOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapUpdate(_message.Message): + DESCRIPTOR = _MAPUPDATE + + # @@protoc_insertion_point(class_scope:MapUpdate) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapOp(_message.Message): + DESCRIPTOR = _MAPOP + + # @@protoc_insertion_point(class_scope:MapOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtOp(_message.Message): + DESCRIPTOR = _DTOP + + # @@protoc_insertion_point(class_scope:DtOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtUpdateReq(_message.Message): + DESCRIPTOR = _DTUPDATEREQ + + # @@protoc_insertion_point(class_scope:DtUpdateReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtUpdateResp(_message.Message): + DESCRIPTOR = _DTUPDATERESP + + # @@protoc_insertion_point(class_scope:DtUpdateResp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakDtPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_kv_pb2.py b/riak/riak_pb/riak_kv_pb2.py new file mode 100644 index 00000000..c8411e06 --- /dev/null +++ b/riak/riak_pb/riak_kv_pb2.py @@ -0,0 +1,1747 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_kv.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + +import riak.riak_pb.riak_pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_kv.proto', + package='', + serialized_pb='\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xcd\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"\xc1\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') + + + +_RPBINDEXREQ_INDEXQUERYTYPE = _descriptor.EnumDescriptor( + name='IndexQueryType', + full_name='RpbIndexReq.IndexQueryType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='eq', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='range', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1644, + serialized_end=1679, +) + + +_RPBGETCLIENTIDRESP = _descriptor.Descriptor( + name='RpbGetClientIdResp', + full_name='RpbGetClientIdResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='client_id', full_name='RpbGetClientIdResp.client_id', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=29, + serialized_end=68, +) + + +_RPBSETCLIENTIDREQ = _descriptor.Descriptor( + name='RpbSetClientIdReq', + full_name='RpbSetClientIdReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='client_id', full_name='RpbSetClientIdReq.client_id', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=70, + serialized_end=108, +) + + +_RPBGETREQ = _descriptor.Descriptor( + name='RpbGetReq', + full_name='RpbGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbGetReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbGetReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbGetReq.r', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbGetReq.pr', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='RpbGetReq.basic_quorum', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='RpbGetReq.notfound_ok', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='if_modified', full_name='RpbGetReq.if_modified', index=6, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='head', full_name='RpbGetReq.head', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deletedvclock', full_name='RpbGetReq.deletedvclock', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbGetReq.timeout', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='RpbGetReq.sloppy_quorum', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbGetReq.n_val', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetReq.type', index=12, + number=13, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=111, + serialized_end=344, +) + + +_RPBGETRESP = _descriptor.Descriptor( + name='RpbGetResp', + full_name='RpbGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='RpbGetResp.content', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbGetResp.vclock', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unchanged', full_name='RpbGetResp.unchanged', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=346, + serialized_end=423, +) + + +_RPBPUTREQ = _descriptor.Descriptor( + name='RpbPutReq', + full_name='RpbPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbPutReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbPutReq.key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbPutReq.vclock', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='RpbPutReq.content', index=3, + number=4, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbPutReq.w', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbPutReq.dw', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_body', full_name='RpbPutReq.return_body', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbPutReq.pw', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='if_not_modified', full_name='RpbPutReq.if_not_modified', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='if_none_match', full_name='RpbPutReq.if_none_match', index=9, + number=10, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_head', full_name='RpbPutReq.return_head', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbPutReq.timeout', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='asis', full_name='RpbPutReq.asis', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='RpbPutReq.sloppy_quorum', index=13, + number=14, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbPutReq.n_val', index=14, + number=15, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbPutReq.type', index=15, + number=16, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=426, + serialized_end=720, +) + + +_RPBPUTRESP = _descriptor.Descriptor( + name='RpbPutResp', + full_name='RpbPutResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='RpbPutResp.content', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbPutResp.vclock', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbPutResp.key', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=722, + serialized_end=793, +) + + +_RPBDELREQ = _descriptor.Descriptor( + name='RpbDelReq', + full_name='RpbDelReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbDelReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbDelReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rw', full_name='RpbDelReq.rw', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbDelReq.vclock', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbDelReq.r', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbDelReq.w', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbDelReq.pr', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbDelReq.pw', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbDelReq.dw', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbDelReq.timeout', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='RpbDelReq.sloppy_quorum', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbDelReq.n_val', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbDelReq.type', index=12, + number=13, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=796, + serialized_end=991, +) + + +_RPBLISTBUCKETSREQ = _descriptor.Descriptor( + name='RpbListBucketsReq', + full_name='RpbListBucketsReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbListBucketsReq.timeout', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream', full_name='RpbListBucketsReq.stream', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbListBucketsReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=993, + serialized_end=1059, +) + + +_RPBLISTBUCKETSRESP = _descriptor.Descriptor( + name='RpbListBucketsResp', + full_name='RpbListBucketsResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='buckets', full_name='RpbListBucketsResp.buckets', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbListBucketsResp.done', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1061, + serialized_end=1112, +) + + +_RPBLISTKEYSREQ = _descriptor.Descriptor( + name='RpbListKeysReq', + full_name='RpbListKeysReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbListKeysReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbListKeysReq.timeout', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbListKeysReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1114, + serialized_end=1177, +) + + +_RPBLISTKEYSRESP = _descriptor.Descriptor( + name='RpbListKeysResp', + full_name='RpbListKeysResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='keys', full_name='RpbListKeysResp.keys', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbListKeysResp.done', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1179, + serialized_end=1224, +) + + +_RPBMAPREDREQ = _descriptor.Descriptor( + name='RpbMapRedReq', + full_name='RpbMapRedReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='request', full_name='RpbMapRedReq.request', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content_type', full_name='RpbMapRedReq.content_type', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1226, + serialized_end=1279, +) + + +_RPBMAPREDRESP = _descriptor.Descriptor( + name='RpbMapRedResp', + full_name='RpbMapRedResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='phase', full_name='RpbMapRedResp.phase', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='response', full_name='RpbMapRedResp.response', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbMapRedResp.done', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1281, + serialized_end=1343, +) + + +_RPBINDEXREQ = _descriptor.Descriptor( + name='RpbIndexReq', + full_name='RpbIndexReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbIndexReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='index', full_name='RpbIndexReq.index', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='qtype', full_name='RpbIndexReq.qtype', index=2, + number=3, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbIndexReq.key', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='range_min', full_name='RpbIndexReq.range_min', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='range_max', full_name='RpbIndexReq.range_max', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_terms', full_name='RpbIndexReq.return_terms', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream', full_name='RpbIndexReq.stream', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_results', full_name='RpbIndexReq.max_results', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbIndexReq.continuation', index=9, + number=10, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbIndexReq.timeout', index=10, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbIndexReq.type', index=11, + number=12, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='term_regex', full_name='RpbIndexReq.term_regex', index=12, + number=13, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pagination_sort', full_name='RpbIndexReq.pagination_sort', index=13, + number=14, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _RPBINDEXREQ_INDEXQUERYTYPE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1346, + serialized_end=1679, +) + + +_RPBINDEXRESP = _descriptor.Descriptor( + name='RpbIndexResp', + full_name='RpbIndexResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='keys', full_name='RpbIndexResp.keys', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='results', full_name='RpbIndexResp.results', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbIndexResp.continuation', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbIndexResp.done', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1681, + serialized_end=1772, +) + + +_RPBCSBUCKETREQ = _descriptor.Descriptor( + name='RpbCSBucketReq', + full_name='RpbCSBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbCSBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_key', full_name='RpbCSBucketReq.start_key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_key', full_name='RpbCSBucketReq.end_key', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_incl', full_name='RpbCSBucketReq.start_incl', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_incl', full_name='RpbCSBucketReq.end_incl', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbCSBucketReq.continuation', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_results', full_name='RpbCSBucketReq.max_results', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbCSBucketReq.timeout', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbCSBucketReq.type', index=8, + number=9, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1775, + serialized_end=1968, +) + + +_RPBCSBUCKETRESP = _descriptor.Descriptor( + name='RpbCSBucketResp', + full_name='RpbCSBucketResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='objects', full_name='RpbCSBucketResp.objects', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbCSBucketResp.continuation', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbCSBucketResp.done', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1970, + serialized_end=2057, +) + + +_RPBINDEXOBJECT = _descriptor.Descriptor( + name='RpbIndexObject', + full_name='RpbIndexObject', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='RpbIndexObject.key', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='object', full_name='RpbIndexObject.object', index=1, + number=2, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2059, + serialized_end=2117, +) + + +_RPBCONTENT = _descriptor.Descriptor( + name='RpbContent', + full_name='RpbContent', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='RpbContent.value', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content_type', full_name='RpbContent.content_type', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='charset', full_name='RpbContent.charset', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content_encoding', full_name='RpbContent.content_encoding', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vtag', full_name='RpbContent.vtag', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='links', full_name='RpbContent.links', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_mod', full_name='RpbContent.last_mod', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_mod_usecs', full_name='RpbContent.last_mod_usecs', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='usermeta', full_name='RpbContent.usermeta', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='indexes', full_name='RpbContent.indexes', index=9, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deleted', full_name='RpbContent.deleted', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2120, + serialized_end=2365, +) + + +_RPBLINK = _descriptor.Descriptor( + name='RpbLink', + full_name='RpbLink', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbLink.bucket', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbLink.key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tag', full_name='RpbLink.tag', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2367, + serialized_end=2418, +) + + +_RPBCOUNTERUPDATEREQ = _descriptor.Descriptor( + name='RpbCounterUpdateReq', + full_name='RpbCounterUpdateReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbCounterUpdateReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbCounterUpdateReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='amount', full_name='RpbCounterUpdateReq.amount', index=2, + number=3, type=18, cpp_type=2, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbCounterUpdateReq.w', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbCounterUpdateReq.dw', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbCounterUpdateReq.pw', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='returnvalue', full_name='RpbCounterUpdateReq.returnvalue', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2420, + serialized_end=2542, +) + + +_RPBCOUNTERUPDATERESP = _descriptor.Descriptor( + name='RpbCounterUpdateResp', + full_name='RpbCounterUpdateResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='RpbCounterUpdateResp.value', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2544, + serialized_end=2581, +) + + +_RPBCOUNTERGETREQ = _descriptor.Descriptor( + name='RpbCounterGetReq', + full_name='RpbCounterGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbCounterGetReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbCounterGetReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbCounterGetReq.r', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbCounterGetReq.pr', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='RpbCounterGetReq.basic_quorum', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='RpbCounterGetReq.notfound_ok', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2583, + serialized_end=2696, +) + + +_RPBCOUNTERGETRESP = _descriptor.Descriptor( + name='RpbCounterGetResp', + full_name='RpbCounterGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='RpbCounterGetResp.value', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2698, + serialized_end=2732, +) + + +_RPBGETBUCKETKEYPREFLISTREQ = _descriptor.Descriptor( + name='RpbGetBucketKeyPreflistReq', + full_name='RpbGetBucketKeyPreflistReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbGetBucketKeyPreflistReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbGetBucketKeyPreflistReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetBucketKeyPreflistReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2734, + serialized_end=2805, +) + + +_RPBGETBUCKETKEYPREFLISTRESP = _descriptor.Descriptor( + name='RpbGetBucketKeyPreflistResp', + full_name='RpbGetBucketKeyPreflistResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='preflist', full_name='RpbGetBucketKeyPreflistResp.preflist', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2807, + serialized_end=2881, +) + + +_RPBBUCKETKEYPREFLISTITEM = _descriptor.Descriptor( + name='RpbBucketKeyPreflistItem', + full_name='RpbBucketKeyPreflistItem', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='partition', full_name='RpbBucketKeyPreflistItem.partition', index=0, + number=1, type=3, cpp_type=2, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='node', full_name='RpbBucketKeyPreflistItem.node', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='primary', full_name='RpbBucketKeyPreflistItem.primary', index=2, + number=3, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2883, + serialized_end=2959, +) + +_RPBGETRESP.fields_by_name['content'].message_type = _RPBCONTENT +_RPBPUTREQ.fields_by_name['content'].message_type = _RPBCONTENT +_RPBPUTRESP.fields_by_name['content'].message_type = _RPBCONTENT +_RPBINDEXREQ.fields_by_name['qtype'].enum_type = _RPBINDEXREQ_INDEXQUERYTYPE +_RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ; +_RPBINDEXRESP.fields_by_name['results'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBCSBUCKETRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT +_RPBINDEXOBJECT.fields_by_name['object'].message_type = _RPBGETRESP +_RPBCONTENT.fields_by_name['links'].message_type = _RPBLINK +_RPBCONTENT.fields_by_name['usermeta'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBCONTENT.fields_by_name['indexes'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBGETBUCKETKEYPREFLISTRESP.fields_by_name['preflist'].message_type = _RPBBUCKETKEYPREFLISTITEM +DESCRIPTOR.message_types_by_name['RpbGetClientIdResp'] = _RPBGETCLIENTIDRESP +DESCRIPTOR.message_types_by_name['RpbSetClientIdReq'] = _RPBSETCLIENTIDREQ +DESCRIPTOR.message_types_by_name['RpbGetReq'] = _RPBGETREQ +DESCRIPTOR.message_types_by_name['RpbGetResp'] = _RPBGETRESP +DESCRIPTOR.message_types_by_name['RpbPutReq'] = _RPBPUTREQ +DESCRIPTOR.message_types_by_name['RpbPutResp'] = _RPBPUTRESP +DESCRIPTOR.message_types_by_name['RpbDelReq'] = _RPBDELREQ +DESCRIPTOR.message_types_by_name['RpbListBucketsReq'] = _RPBLISTBUCKETSREQ +DESCRIPTOR.message_types_by_name['RpbListBucketsResp'] = _RPBLISTBUCKETSRESP +DESCRIPTOR.message_types_by_name['RpbListKeysReq'] = _RPBLISTKEYSREQ +DESCRIPTOR.message_types_by_name['RpbListKeysResp'] = _RPBLISTKEYSRESP +DESCRIPTOR.message_types_by_name['RpbMapRedReq'] = _RPBMAPREDREQ +DESCRIPTOR.message_types_by_name['RpbMapRedResp'] = _RPBMAPREDRESP +DESCRIPTOR.message_types_by_name['RpbIndexReq'] = _RPBINDEXREQ +DESCRIPTOR.message_types_by_name['RpbIndexResp'] = _RPBINDEXRESP +DESCRIPTOR.message_types_by_name['RpbCSBucketReq'] = _RPBCSBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbCSBucketResp'] = _RPBCSBUCKETRESP +DESCRIPTOR.message_types_by_name['RpbIndexObject'] = _RPBINDEXOBJECT +DESCRIPTOR.message_types_by_name['RpbContent'] = _RPBCONTENT +DESCRIPTOR.message_types_by_name['RpbLink'] = _RPBLINK +DESCRIPTOR.message_types_by_name['RpbCounterUpdateReq'] = _RPBCOUNTERUPDATEREQ +DESCRIPTOR.message_types_by_name['RpbCounterUpdateResp'] = _RPBCOUNTERUPDATERESP +DESCRIPTOR.message_types_by_name['RpbCounterGetReq'] = _RPBCOUNTERGETREQ +DESCRIPTOR.message_types_by_name['RpbCounterGetResp'] = _RPBCOUNTERGETRESP +DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistReq'] = _RPBGETBUCKETKEYPREFLISTREQ +DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistResp'] = _RPBGETBUCKETKEYPREFLISTRESP +DESCRIPTOR.message_types_by_name['RpbBucketKeyPreflistItem'] = _RPBBUCKETKEYPREFLISTITEM + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetClientIdResp(_message.Message): + DESCRIPTOR = _RPBGETCLIENTIDRESP + + # @@protoc_insertion_point(class_scope:RpbGetClientIdResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetClientIdReq(_message.Message): + DESCRIPTOR = _RPBSETCLIENTIDREQ + + # @@protoc_insertion_point(class_scope:RpbSetClientIdReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetReq(_message.Message): + DESCRIPTOR = _RPBGETREQ + + # @@protoc_insertion_point(class_scope:RpbGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetResp(_message.Message): + DESCRIPTOR = _RPBGETRESP + + # @@protoc_insertion_point(class_scope:RpbGetResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPutReq(_message.Message): + DESCRIPTOR = _RPBPUTREQ + + # @@protoc_insertion_point(class_scope:RpbPutReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPutResp(_message.Message): + DESCRIPTOR = _RPBPUTRESP + + # @@protoc_insertion_point(class_scope:RpbPutResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbDelReq(_message.Message): + DESCRIPTOR = _RPBDELREQ + + # @@protoc_insertion_point(class_scope:RpbDelReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListBucketsReq(_message.Message): + DESCRIPTOR = _RPBLISTBUCKETSREQ + + # @@protoc_insertion_point(class_scope:RpbListBucketsReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListBucketsResp(_message.Message): + DESCRIPTOR = _RPBLISTBUCKETSRESP + + # @@protoc_insertion_point(class_scope:RpbListBucketsResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListKeysReq(_message.Message): + DESCRIPTOR = _RPBLISTKEYSREQ + + # @@protoc_insertion_point(class_scope:RpbListKeysReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListKeysResp(_message.Message): + DESCRIPTOR = _RPBLISTKEYSRESP + + # @@protoc_insertion_point(class_scope:RpbListKeysResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbMapRedReq(_message.Message): + DESCRIPTOR = _RPBMAPREDREQ + + # @@protoc_insertion_point(class_scope:RpbMapRedReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbMapRedResp(_message.Message): + DESCRIPTOR = _RPBMAPREDRESP + + # @@protoc_insertion_point(class_scope:RpbMapRedResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexReq(_message.Message): + DESCRIPTOR = _RPBINDEXREQ + + # @@protoc_insertion_point(class_scope:RpbIndexReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexResp(_message.Message): + DESCRIPTOR = _RPBINDEXRESP + + # @@protoc_insertion_point(class_scope:RpbIndexResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCSBucketReq(_message.Message): + DESCRIPTOR = _RPBCSBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbCSBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCSBucketResp(_message.Message): + DESCRIPTOR = _RPBCSBUCKETRESP + + # @@protoc_insertion_point(class_scope:RpbCSBucketResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexObject(_message.Message): + DESCRIPTOR = _RPBINDEXOBJECT + + # @@protoc_insertion_point(class_scope:RpbIndexObject) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbContent(_message.Message): + DESCRIPTOR = _RPBCONTENT + + # @@protoc_insertion_point(class_scope:RpbContent) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbLink(_message.Message): + DESCRIPTOR = _RPBLINK + + # @@protoc_insertion_point(class_scope:RpbLink) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterUpdateReq(_message.Message): + DESCRIPTOR = _RPBCOUNTERUPDATEREQ + + # @@protoc_insertion_point(class_scope:RpbCounterUpdateReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterUpdateResp(_message.Message): + DESCRIPTOR = _RPBCOUNTERUPDATERESP + + # @@protoc_insertion_point(class_scope:RpbCounterUpdateResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterGetReq(_message.Message): + DESCRIPTOR = _RPBCOUNTERGETREQ + + # @@protoc_insertion_point(class_scope:RpbCounterGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterGetResp(_message.Message): + DESCRIPTOR = _RPBCOUNTERGETRESP + + # @@protoc_insertion_point(class_scope:RpbCounterGetResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketKeyPreflistReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTREQ + + # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketKeyPreflistResp(_message.Message): + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTRESP + + # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbBucketKeyPreflistItem(_message.Message): + DESCRIPTOR = _RPBBUCKETKEYPREFLISTITEM + + # @@protoc_insertion_point(class_scope:RpbBucketKeyPreflistItem) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakKvPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_pb2.py b/riak/riak_pb/riak_pb2.py new file mode 100644 index 00000000..a757940a --- /dev/null +++ b/riak/riak_pb/riak_pb2.py @@ -0,0 +1,786 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak.proto', + package='', + serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') + + + +_RPBBUCKETPROPS_RPBREPLMODE = _descriptor.EnumDescriptor( + name='RpbReplMode', + full_name='RpbBucketProps.RpbReplMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FALSE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REALTIME', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FULLSYNC', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TRUE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1236, + serialized_end=1298, +) + + +_RPBERRORRESP = _descriptor.Descriptor( + name='RpbErrorResp', + full_name='RpbErrorResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='errmsg', full_name='RpbErrorResp.errmsg', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='errcode', full_name='RpbErrorResp.errcode', index=1, + number=2, type=13, cpp_type=3, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=14, + serialized_end=61, +) + + +_RPBGETSERVERINFORESP = _descriptor.Descriptor( + name='RpbGetServerInfoResp', + full_name='RpbGetServerInfoResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='node', full_name='RpbGetServerInfoResp.node', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='server_version', full_name='RpbGetServerInfoResp.server_version', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=63, + serialized_end=123, +) + + +_RPBPAIR = _descriptor.Descriptor( + name='RpbPair', + full_name='RpbPair', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='RpbPair.key', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='RpbPair.value', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=125, + serialized_end=162, +) + + +_RPBGETBUCKETREQ = _descriptor.Descriptor( + name='RpbGetBucketReq', + full_name='RpbGetBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbGetBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetBucketReq.type', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=164, + serialized_end=211, +) + + +_RPBGETBUCKETRESP = _descriptor.Descriptor( + name='RpbGetBucketResp', + full_name='RpbGetBucketResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='props', full_name='RpbGetBucketResp.props', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=213, + serialized_end=263, +) + + +_RPBSETBUCKETREQ = _descriptor.Descriptor( + name='RpbSetBucketReq', + full_name='RpbSetBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbSetBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='props', full_name='RpbSetBucketReq.props', index=1, + number=2, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbSetBucketReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=265, + serialized_end=344, +) + + +_RPBRESETBUCKETREQ = _descriptor.Descriptor( + name='RpbResetBucketReq', + full_name='RpbResetBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbResetBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbResetBucketReq.type', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=346, + serialized_end=395, +) + + +_RPBGETBUCKETTYPEREQ = _descriptor.Descriptor( + name='RpbGetBucketTypeReq', + full_name='RpbGetBucketTypeReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetBucketTypeReq.type', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=397, + serialized_end=432, +) + + +_RPBSETBUCKETTYPEREQ = _descriptor.Descriptor( + name='RpbSetBucketTypeReq', + full_name='RpbSetBucketTypeReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='RpbSetBucketTypeReq.type', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='props', full_name='RpbSetBucketTypeReq.props', index=1, + number=2, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=434, + serialized_end=501, +) + + +_RPBMODFUN = _descriptor.Descriptor( + name='RpbModFun', + full_name='RpbModFun', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='module', full_name='RpbModFun.module', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='function', full_name='RpbModFun.function', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=503, + serialized_end=548, +) + + +_RPBCOMMITHOOK = _descriptor.Descriptor( + name='RpbCommitHook', + full_name='RpbCommitHook', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='modfun', full_name='RpbCommitHook.modfun', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='RpbCommitHook.name', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=550, + serialized_end=607, +) + + +_RPBBUCKETPROPS = _descriptor.Descriptor( + name='RpbBucketProps', + full_name='RpbBucketProps', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbBucketProps.n_val', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='allow_mult', full_name='RpbBucketProps.allow_mult', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_write_wins', full_name='RpbBucketProps.last_write_wins', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='precommit', full_name='RpbBucketProps.precommit', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_precommit', full_name='RpbBucketProps.has_precommit', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='postcommit', full_name='RpbBucketProps.postcommit', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_postcommit', full_name='RpbBucketProps.has_postcommit', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='chash_keyfun', full_name='RpbBucketProps.chash_keyfun', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='linkfun', full_name='RpbBucketProps.linkfun', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='old_vclock', full_name='RpbBucketProps.old_vclock', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='young_vclock', full_name='RpbBucketProps.young_vclock', index=10, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='big_vclock', full_name='RpbBucketProps.big_vclock', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='small_vclock', full_name='RpbBucketProps.small_vclock', index=12, + number=13, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbBucketProps.pr', index=13, + number=14, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbBucketProps.r', index=14, + number=15, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbBucketProps.w', index=15, + number=16, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbBucketProps.pw', index=16, + number=17, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbBucketProps.dw', index=17, + number=18, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rw', full_name='RpbBucketProps.rw', index=18, + number=19, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='RpbBucketProps.basic_quorum', index=19, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='RpbBucketProps.notfound_ok', index=20, + number=21, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='backend', full_name='RpbBucketProps.backend', index=21, + number=22, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='search', full_name='RpbBucketProps.search', index=22, + number=23, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repl', full_name='RpbBucketProps.repl', index=23, + number=24, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='search_index', full_name='RpbBucketProps.search_index', index=24, + number=25, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='datatype', full_name='RpbBucketProps.datatype', index=25, + number=26, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='consistent', full_name='RpbBucketProps.consistent', index=26, + number=27, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='write_once', full_name='RpbBucketProps.write_once', index=27, + number=28, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _RPBBUCKETPROPS_RPBREPLMODE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=610, + serialized_end=1298, +) + + +_RPBAUTHREQ = _descriptor.Descriptor( + name='RpbAuthReq', + full_name='RpbAuthReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='user', full_name='RpbAuthReq.user', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='password', full_name='RpbAuthReq.password', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1300, + serialized_end=1344, +) + +_RPBGETBUCKETRESP.fields_by_name['props'].message_type = _RPBBUCKETPROPS +_RPBSETBUCKETREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS +_RPBSETBUCKETTYPEREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS +_RPBCOMMITHOOK.fields_by_name['modfun'].message_type = _RPBMODFUN +_RPBBUCKETPROPS.fields_by_name['precommit'].message_type = _RPBCOMMITHOOK +_RPBBUCKETPROPS.fields_by_name['postcommit'].message_type = _RPBCOMMITHOOK +_RPBBUCKETPROPS.fields_by_name['chash_keyfun'].message_type = _RPBMODFUN +_RPBBUCKETPROPS.fields_by_name['linkfun'].message_type = _RPBMODFUN +_RPBBUCKETPROPS.fields_by_name['repl'].enum_type = _RPBBUCKETPROPS_RPBREPLMODE +_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS; +DESCRIPTOR.message_types_by_name['RpbErrorResp'] = _RPBERRORRESP +DESCRIPTOR.message_types_by_name['RpbGetServerInfoResp'] = _RPBGETSERVERINFORESP +DESCRIPTOR.message_types_by_name['RpbPair'] = _RPBPAIR +DESCRIPTOR.message_types_by_name['RpbGetBucketReq'] = _RPBGETBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbGetBucketResp'] = _RPBGETBUCKETRESP +DESCRIPTOR.message_types_by_name['RpbSetBucketReq'] = _RPBSETBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbResetBucketReq'] = _RPBRESETBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbGetBucketTypeReq'] = _RPBGETBUCKETTYPEREQ +DESCRIPTOR.message_types_by_name['RpbSetBucketTypeReq'] = _RPBSETBUCKETTYPEREQ +DESCRIPTOR.message_types_by_name['RpbModFun'] = _RPBMODFUN +DESCRIPTOR.message_types_by_name['RpbCommitHook'] = _RPBCOMMITHOOK +DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS +DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbErrorResp(_message.Message): + DESCRIPTOR = _RPBERRORRESP + + # @@protoc_insertion_point(class_scope:RpbErrorResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetServerInfoResp(_message.Message): + DESCRIPTOR = _RPBGETSERVERINFORESP + + # @@protoc_insertion_point(class_scope:RpbGetServerInfoResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPair(_message.Message): + DESCRIPTOR = _RPBPAIR + + # @@protoc_insertion_point(class_scope:RpbPair) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbGetBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketResp(_message.Message): + DESCRIPTOR = _RPBGETBUCKETRESP + + # @@protoc_insertion_point(class_scope:RpbGetBucketResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetBucketReq(_message.Message): + DESCRIPTOR = _RPBSETBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbSetBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbResetBucketReq(_message.Message): + DESCRIPTOR = _RPBRESETBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbResetBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketTypeReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETTYPEREQ + + # @@protoc_insertion_point(class_scope:RpbGetBucketTypeReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetBucketTypeReq(_message.Message): + DESCRIPTOR = _RPBSETBUCKETTYPEREQ + + # @@protoc_insertion_point(class_scope:RpbSetBucketTypeReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbModFun(_message.Message): + DESCRIPTOR = _RPBMODFUN + + # @@protoc_insertion_point(class_scope:RpbModFun) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCommitHook(_message.Message): + DESCRIPTOR = _RPBCOMMITHOOK + + # @@protoc_insertion_point(class_scope:RpbCommitHook) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbBucketProps(_message.Message): + DESCRIPTOR = _RPBBUCKETPROPS + + # @@protoc_insertion_point(class_scope:RpbBucketProps) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbAuthReq(_message.Message): + DESCRIPTOR = _RPBAUTHREQ + + # @@protoc_insertion_point(class_scope:RpbAuthReq) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\006RiakPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_search_pb2.py b/riak/riak_pb/riak_search_pb2.py new file mode 100644 index 00000000..1608f575 --- /dev/null +++ b/riak/riak_pb/riak_search_pb2.py @@ -0,0 +1,210 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_search.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + +import riak.riak_pb.riak_pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_search.proto', + package='', + serialized_pb='\n\x11riak_search.proto\x1a\nriak.proto\"(\n\x0cRpbSearchDoc\x12\x18\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x08.RpbPair\"\x9d\x01\n\x11RpbSearchQueryReq\x12\t\n\x01q\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12\x0c\n\x04rows\x18\x03 \x01(\r\x12\r\n\x05start\x18\x04 \x01(\r\x12\x0c\n\x04sort\x18\x05 \x01(\x0c\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\x0c\x12\n\n\x02\x64\x66\x18\x07 \x01(\x0c\x12\n\n\x02op\x18\x08 \x01(\x0c\x12\n\n\x02\x66l\x18\t \x03(\x0c\x12\x0f\n\x07presort\x18\n \x01(\x0c\"W\n\x12RpbSearchQueryResp\x12\x1b\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\r.RpbSearchDoc\x12\x11\n\tmax_score\x18\x02 \x01(\x02\x12\x11\n\tnum_found\x18\x03 \x01(\rB\'\n\x17\x63om.basho.riak.protobufB\x0cRiakSearchPB') + + + + +_RPBSEARCHDOC = _descriptor.Descriptor( + name='RpbSearchDoc', + full_name='RpbSearchDoc', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fields', full_name='RpbSearchDoc.fields', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=33, + serialized_end=73, +) + + +_RPBSEARCHQUERYREQ = _descriptor.Descriptor( + name='RpbSearchQueryReq', + full_name='RpbSearchQueryReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='q', full_name='RpbSearchQueryReq.q', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='index', full_name='RpbSearchQueryReq.index', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows', full_name='RpbSearchQueryReq.rows', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start', full_name='RpbSearchQueryReq.start', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sort', full_name='RpbSearchQueryReq.sort', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='RpbSearchQueryReq.filter', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='df', full_name='RpbSearchQueryReq.df', index=6, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='op', full_name='RpbSearchQueryReq.op', index=7, + number=8, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='fl', full_name='RpbSearchQueryReq.fl', index=8, + number=9, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='presort', full_name='RpbSearchQueryReq.presort', index=9, + number=10, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=76, + serialized_end=233, +) + + +_RPBSEARCHQUERYRESP = _descriptor.Descriptor( + name='RpbSearchQueryResp', + full_name='RpbSearchQueryResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='docs', full_name='RpbSearchQueryResp.docs', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_score', full_name='RpbSearchQueryResp.max_score', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='num_found', full_name='RpbSearchQueryResp.num_found', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=235, + serialized_end=322, +) + +_RPBSEARCHDOC.fields_by_name['fields'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBSEARCHQUERYRESP.fields_by_name['docs'].message_type = _RPBSEARCHDOC +DESCRIPTOR.message_types_by_name['RpbSearchDoc'] = _RPBSEARCHDOC +DESCRIPTOR.message_types_by_name['RpbSearchQueryReq'] = _RPBSEARCHQUERYREQ +DESCRIPTOR.message_types_by_name['RpbSearchQueryResp'] = _RPBSEARCHQUERYRESP + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchDoc(_message.Message): + DESCRIPTOR = _RPBSEARCHDOC + + # @@protoc_insertion_point(class_scope:RpbSearchDoc) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchQueryReq(_message.Message): + DESCRIPTOR = _RPBSEARCHQUERYREQ + + # @@protoc_insertion_point(class_scope:RpbSearchQueryReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchQueryResp(_message.Message): + DESCRIPTOR = _RPBSEARCHQUERYRESP + + # @@protoc_insertion_point(class_scope:RpbSearchQueryResp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\014RiakSearchPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_yokozuna_pb2.py b/riak/riak_pb/riak_yokozuna_pb2.py new file mode 100644 index 00000000..1673f538 --- /dev/null +++ b/riak/riak_pb/riak_yokozuna_pb2.py @@ -0,0 +1,372 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_yokozuna.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_yokozuna.proto', + package='', + serialized_pb='\n\x13riak_yokozuna.proto\"?\n\x10RpbYokozunaIndex\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0e\n\x06schema\x18\x02 \x01(\x0c\x12\r\n\x05n_val\x18\x03 \x01(\r\"&\n\x16RpbYokozunaIndexGetReq\x12\x0c\n\x04name\x18\x01 \x01(\x0c\";\n\x17RpbYokozunaIndexGetResp\x12 \n\x05index\x18\x01 \x03(\x0b\x32\x11.RpbYokozunaIndex\"K\n\x16RpbYokozunaIndexPutReq\x12 \n\x05index\x18\x01 \x02(\x0b\x32\x11.RpbYokozunaIndex\x12\x0f\n\x07timeout\x18\x02 \x01(\r\")\n\x19RpbYokozunaIndexDeleteReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\"2\n\x11RpbYokozunaSchema\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"=\n\x17RpbYokozunaSchemaPutReq\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchema\"\'\n\x17RpbYokozunaSchemaGetReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\">\n\x18RpbYokozunaSchemaGetResp\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchemaB)\n\x17\x63om.basho.riak.protobufB\x0eRiakYokozunaPB') + + + + +_RPBYOKOZUNAINDEX = _descriptor.Descriptor( + name='RpbYokozunaIndex', + full_name='RpbYokozunaIndex', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaIndex.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='schema', full_name='RpbYokozunaIndex.schema', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbYokozunaIndex.n_val', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=23, + serialized_end=86, +) + + +_RPBYOKOZUNAINDEXGETREQ = _descriptor.Descriptor( + name='RpbYokozunaIndexGetReq', + full_name='RpbYokozunaIndexGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaIndexGetReq.name', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=88, + serialized_end=126, +) + + +_RPBYOKOZUNAINDEXGETRESP = _descriptor.Descriptor( + name='RpbYokozunaIndexGetResp', + full_name='RpbYokozunaIndexGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='index', full_name='RpbYokozunaIndexGetResp.index', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=128, + serialized_end=187, +) + + +_RPBYOKOZUNAINDEXPUTREQ = _descriptor.Descriptor( + name='RpbYokozunaIndexPutReq', + full_name='RpbYokozunaIndexPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='index', full_name='RpbYokozunaIndexPutReq.index', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbYokozunaIndexPutReq.timeout', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=189, + serialized_end=264, +) + + +_RPBYOKOZUNAINDEXDELETEREQ = _descriptor.Descriptor( + name='RpbYokozunaIndexDeleteReq', + full_name='RpbYokozunaIndexDeleteReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaIndexDeleteReq.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=266, + serialized_end=307, +) + + +_RPBYOKOZUNASCHEMA = _descriptor.Descriptor( + name='RpbYokozunaSchema', + full_name='RpbYokozunaSchema', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaSchema.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='RpbYokozunaSchema.content', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=309, + serialized_end=359, +) + + +_RPBYOKOZUNASCHEMAPUTREQ = _descriptor.Descriptor( + name='RpbYokozunaSchemaPutReq', + full_name='RpbYokozunaSchemaPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='schema', full_name='RpbYokozunaSchemaPutReq.schema', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=361, + serialized_end=422, +) + + +_RPBYOKOZUNASCHEMAGETREQ = _descriptor.Descriptor( + name='RpbYokozunaSchemaGetReq', + full_name='RpbYokozunaSchemaGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaSchemaGetReq.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=424, + serialized_end=463, +) + + +_RPBYOKOZUNASCHEMAGETRESP = _descriptor.Descriptor( + name='RpbYokozunaSchemaGetResp', + full_name='RpbYokozunaSchemaGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='schema', full_name='RpbYokozunaSchemaGetResp.schema', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=465, + serialized_end=527, +) + +_RPBYOKOZUNAINDEXGETRESP.fields_by_name['index'].message_type = _RPBYOKOZUNAINDEX +_RPBYOKOZUNAINDEXPUTREQ.fields_by_name['index'].message_type = _RPBYOKOZUNAINDEX +_RPBYOKOZUNASCHEMAPUTREQ.fields_by_name['schema'].message_type = _RPBYOKOZUNASCHEMA +_RPBYOKOZUNASCHEMAGETRESP.fields_by_name['schema'].message_type = _RPBYOKOZUNASCHEMA +DESCRIPTOR.message_types_by_name['RpbYokozunaIndex'] = _RPBYOKOZUNAINDEX +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexGetReq'] = _RPBYOKOZUNAINDEXGETREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexGetResp'] = _RPBYOKOZUNAINDEXGETRESP +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexPutReq'] = _RPBYOKOZUNAINDEXPUTREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexDeleteReq'] = _RPBYOKOZUNAINDEXDELETEREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaSchema'] = _RPBYOKOZUNASCHEMA +DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaPutReq'] = _RPBYOKOZUNASCHEMAPUTREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetReq'] = _RPBYOKOZUNASCHEMAGETREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetResp'] = _RPBYOKOZUNASCHEMAGETRESP + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndex(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEX + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndex) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexGetReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXGETREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexGetResp(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXGETRESP + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexPutReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXPUTREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexPutReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexDeleteReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXDELETEREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexDeleteReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchema(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMA + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchema) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaPutReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAPUTREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaPutReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaGetReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaGetResp(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETRESP + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetResp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\016RiakYokozunaPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index d85447ff..698a64ba 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -12,7 +12,7 @@ test_server.start() try: - __import__('riak_pb') + __import__('riak.riak_pb') HAVE_PROTO = True except ImportError: HAVE_PROTO = False @@ -60,7 +60,9 @@ SECURITY_CERT_PASSWD = os.environ.get('RIAK_TEST_SECURITY_CERT_PASSWD', 'certpass') -SECURITY_CIPHERS = 'DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:AES128-SHA256:AES128-SHA:AES256-SHA256:AES256-SHA:RC4-SHA' +SECURITY_CIPHERS = 'DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:' + \ + 'DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:AES128-SHA256:' + \ + 'AES128-SHA:AES256-SHA256:AES256-SHA:RC4-SHA' SECURITY_CREDS = None if RUN_SECURITY: diff --git a/riak/tests/test_all.py b/riak/tests/test_all.py index 2a6ef8cc..04d26a24 100644 --- a/riak/tests/test_all.py +++ b/riak/tests/test_all.py @@ -349,9 +349,9 @@ def test_pool_close(self): # Do something to add to the connection pool self.test_multiget_bucket() if self.client.protocol == 'pbc': - self.assertGreater(len(self.client._pb_pool.resources), 1) + self.assertGreaterEqual(len(self.client._pb_pool.resources), 1) else: - self.assertGreater(len(self.client._http_pool.resources), 1) + self.assertGreaterEqual(len(self.client._http_pool.resources), 1) # Now close them all up self.client.close() self.assertEqual(len(self.client._http_pool.resources), 0) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index d1b28298..a8befb04 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -489,7 +489,11 @@ def test_tombstone_siblings(self): vals = set(self.generate_siblings(obj, count=4)) obj = bucket.get(self.key_name) - self.assertEqual(len(obj.siblings), 5) + + # TODO this used to be 5, only + siblen = len(obj.siblings) + self.assertTrue(siblen == 4 or siblen == 5) + non_tombstones = 0 for sib in obj.siblings: if sib.exists: diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index f0489039..00f9b019 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -21,7 +21,7 @@ from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT, \ - SECURITY_CREDS, SECURITY_CIPHERS + SECURITY_CIPHERS from riak.security import SecurityCreds if sys.version_info < (2, 7): unittest = __import__('unittest2') @@ -32,7 +32,15 @@ class SecurityTests(object): @unittest.skipIf(RUN_SECURITY, 'RUN_SECURITY is set') def test_security_disabled(self): - client = self.create_client(credentials=SECURITY_CREDS) + """ + Test valid security settings without security enabled + """ + topts = {'timeout': 1} + # NB: can't use SECURITY_CREDS here since they won't be set + # if RUN_SECURITY is UN-set + creds = SecurityCreds(username='foo', password='bar') + client = self.create_client(credentials=creds, + transport_options=topts) myBucket = client.bucket('test') val1 = "foobar" key1 = myBucket.new('x', data=val1) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 02c53ca5..ec5f9a58 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,25 +1,13 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" -import riak_pb +import riak.riak_pb +import riak.riak_pb.riak_pb2 +import riak.riak_pb.riak_dt_pb2 +import riak.riak_pb.riak_kv_pb2 + from riak import RiakError from riak.content import RiakContent from riak.util import decode_index_value, str_to_bytes, bytes_to_str from riak.multidict import MultiDict + from six import string_types, PY2 @@ -30,10 +18,12 @@ def _invert(d): out[value] = key return out -REPL_TO_PY = {riak_pb.RpbBucketProps.FALSE: False, - riak_pb.RpbBucketProps.TRUE: True, - riak_pb.RpbBucketProps.REALTIME: 'realtime', - riak_pb.RpbBucketProps.FULLSYNC: 'fullsync'} +REPL_TO_PY = { + riak.riak_pb.riak_pb2.RpbBucketProps.FALSE: False, + riak.riak_pb.riak_pb2.RpbBucketProps.TRUE: True, + riak.riak_pb.riak_pb2.RpbBucketProps.REALTIME: 'realtime', + riak.riak_pb.riak_pb2.RpbBucketProps.FULLSYNC: 'fullsync' +} REPL_TO_PB = _invert(REPL_TO_PY) @@ -58,22 +48,22 @@ def _invert(d): QUORUM_PROPS = ['r', 'pr', 'w', 'pw', 'dw', 'rw'] MAP_FIELD_TYPES = { - riak_pb.MapField.COUNTER: 'counter', - riak_pb.MapField.SET: 'set', - riak_pb.MapField.REGISTER: 'register', - riak_pb.MapField.FLAG: 'flag', - riak_pb.MapField.MAP: 'map', - 'counter': riak_pb.MapField.COUNTER, - 'set': riak_pb.MapField.SET, - 'register': riak_pb.MapField.REGISTER, - 'flag': riak_pb.MapField.FLAG, - 'map': riak_pb.MapField.MAP + riak.riak_pb.riak_dt_pb2.MapField.COUNTER: 'counter', + riak.riak_pb.riak_dt_pb2.MapField.SET: 'set', + riak.riak_pb.riak_dt_pb2.MapField.REGISTER: 'register', + riak.riak_pb.riak_dt_pb2.MapField.FLAG: 'flag', + riak.riak_pb.riak_dt_pb2.MapField.MAP: 'map', + 'counter': riak.riak_pb.riak_dt_pb2.MapField.COUNTER, + 'set': riak.riak_pb.riak_dt_pb2.MapField.SET, + 'register': riak.riak_pb.riak_dt_pb2.MapField.REGISTER, + 'flag': riak.riak_pb.riak_dt_pb2.MapField.FLAG, + 'map': riak.riak_pb.riak_dt_pb2.MapField.MAP } DT_FETCH_TYPES = { - riak_pb.DtFetchResp.COUNTER: 'counter', - riak_pb.DtFetchResp.SET: 'set', - riak_pb.DtFetchResp.MAP: 'map' + riak.riak_pb.riak_dt_pb2.DtFetchResp.COUNTER: 'counter', + riak.riak_pb.riak_dt_pb2.DtFetchResp.SET: 'set', + riak.riak_pb.riak_dt_pb2.DtFetchResp.MAP: 'map' } @@ -83,7 +73,7 @@ class RiakPbcCodec(object): """ def __init__(self, **unused_args): - if riak_pb is None: + if riak.riak_pb is None: raise NotImplementedError("this transport is not available") super(RiakPbcCodec, self).__init__(**unused_args) @@ -141,7 +131,7 @@ def _decode_content(self, rpb_content, sibling): a RiakObject. :param rpb_content: a single RpbContent message - :type rpb_content: riak_pb.RpbContent + :type rpb_content: riak.riak_pb.riak_pb2.RpbContent :param sibling: a RiakContent sibling container :type sibling: RiakContent :rtype: RiakContent @@ -186,7 +176,7 @@ def _encode_content(self, robj, rpb_content): :param robj: a RiakObject :type robj: RiakObject :param rpb_content: the protobuf message to fill - :type rpb_content: riak_pb.RpbContent + :type rpb_content: riak.riak_pb.riak_pb2.RpbContent """ if robj.content_type: rpb_content.content_type = str_to_bytes(robj.content_type) @@ -228,7 +218,7 @@ def _decode_link(self, link): Decodes an RpbLink message into a tuple :param link: an RpbLink message - :type link: riak_pb.RpbLink + :type link: riak.riak_pb.riak_pb2.RpbLink :rtype tuple """ @@ -268,7 +258,7 @@ def _encode_bucket_props(self, props, msg): :param props: bucket properties :type props: dict :param msg: the protobuf message to fill - :type msg: riak_pb.RpbSetBucketReq + :type msg: riak.riak_pb.riak_pb2.RpbSetBucketReq """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: @@ -301,7 +291,7 @@ def _decode_bucket_props(self, msg): Decodes the protobuf bucket properties message into a dict. :param msg: the protobuf message to decode - :type msg: riak_pb.RpbBucketProps + :type msg: riak.riak_pb.riak_pb2.RpbBucketProps :rtype dict """ props = {} @@ -331,7 +321,7 @@ def _decode_modfun(self, modfun): 'fun' keys. Used in bucket properties. :param modfun: the protobuf message to decode - :type modfun: riak_pb.RpbModFun + :type modfun: riak.riak_pb.riak_pb2.RpbModFun :rtype dict """ return {'mod': bytes_to_str(modfun.module), @@ -345,11 +335,11 @@ def _encode_modfun(self, props, msg=None): :param props: the module/function pair :type props: dict :param msg: the protobuf message to fill - :type msg: riak_pb.RpbModFun - :rtype riak_pb.RpbModFun + :type msg: riak.riak_pb.riak_pb2.RpbModFun + :rtype riak.riak_pb.riak_pb2.RpbModFun """ if msg is None: - msg = riak_pb.RpbModFun() + msg = riak.riak_pb.riak_pb2.RpbModFun() msg.module = str_to_bytes(props['mod']) msg.function = str_to_bytes(props['fun']) return msg @@ -384,7 +374,7 @@ def _decode_hook(self, hook): bucket properties. :param hook: the hook to decode - :type hook: riak_pb.RpbCommitHook + :type hook: riak.riak_pb.riak_pb2.RpbCommitHook :rtype dict """ if hook.HasField('modfun'): @@ -400,8 +390,8 @@ def _encode_hook(self, hook, msg): :param hook: the hook to encode :type hook: dict :param msg: the protobuf message to fill - :type msg: riak_pb.RpbCommitHook - :rtype riak_pb.RpbCommitHook + :type msg: riak.riak_pb.riak_pb2.RpbCommitHook + :rtype riak.riak_pb.riak_pb2.RpbCommitHook """ if 'name' in hook: msg.name = str_to_bytes(hook['name']) @@ -434,17 +424,18 @@ def _encode_index_req(self, bucket, index, startkey, endkey=None, :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string - :rtype riak_pb.RpbIndexReq + :rtype riak.riak_pb.riak_kv_pb2.RpbIndexReq """ - req = riak_pb.RpbIndexReq(bucket=str_to_bytes(bucket.name), - index=str_to_bytes(index)) + req = riak.riak_pb.riak_kv_pb2.RpbIndexReq( + bucket=str_to_bytes(bucket.name), + index=str_to_bytes(index)) self._add_bucket_type(req, bucket.bucket_type) if endkey is not None: - req.qtype = riak_pb.RpbIndexReq.range + req.qtype = riak.riak_pb.riak_kv_pb2.RpbIndexReq.range req.range_min = str_to_bytes(str(startkey)) req.range_max = str_to_bytes(str(endkey)) else: - req.qtype = riak_pb.RpbIndexReq.eq + req.qtype = riak.riak_pb.riak_kv_pb2.RpbIndexReq.eq req.key = str_to_bytes(str(startkey)) if return_terms is not None: req.return_terms = return_terms @@ -466,7 +457,7 @@ def _decode_search_index(self, index): Fills an RpbYokozunaIndex message with the appropriate data. :param index: a yz index message - :type index: riak_pb.RpbYokozunaIndex + :type index: riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndex :rtype dict """ result = {} @@ -620,16 +611,17 @@ def _encode_map_update(self, dtype, msg, op): msg.register_op = str_to_bytes(op[1]) elif dtype == 'flag': if op == 'enable': - msg.flag_op = riak_pb.MapUpdate.ENABLE + msg.flag_op = riak.riak_pb.riak_dt_pb2.MapUpdate.ENABLE else: - msg.flag_op = riak_pb.MapUpdate.DISABLE + msg.flag_op = riak.riak_pb.riak_dt_pb2.MapUpdate.DISABLE def _decode_preflist(self, item): """ Decodes a preflist response :param preflist: a bucket/key preflist - :type preflist: list of riak_pb.RpbBucketKeyPreflistItem + :type preflist: list of + riak.riak_pb.riak_kv_pb2.RpbBucketKeyPreflistItem :rtype dict """ result = {'partition': item.partition, diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 0bc58232..f864dc1c 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -1,34 +1,12 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import socket import struct -import riak_pb +import riak.riak_pb.riak_pb2 +import riak.riak_pb.messages + from riak.security import SecurityError, USE_STDLIB_SSL from riak import RiakError -from riak_pb.messages import ( - MESSAGE_CLASSES, - MSG_CODE_ERROR_RESP, - MSG_CODE_START_TLS, - MSG_CODE_AUTH_REQ, - MSG_CODE_AUTH_RESP -) from riak.util import bytes_to_str, str_to_bytes + from six import PY2 if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection @@ -90,8 +68,9 @@ def _starttls(self): Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response, False otherwise """ - msg_code, _ = self._non_connect_request(MSG_CODE_START_TLS) - if msg_code == MSG_CODE_START_TLS: + msg_code, _ = self._non_connect_request( + riak.riak_pb.messages.MSG_CODE_START_TLS) + if msg_code == riak.riak_pb.messages.MSG_CODE_START_TLS: return True else: return False @@ -103,12 +82,14 @@ def _auth(self): Note: Riak will sleep for a short period of time upon a failed auth request/response to prevent denial of service attacks """ - req = riak_pb.RpbAuthReq() + req = riak.riak_pb.riak_pb2.RpbAuthReq() req.user = str_to_bytes(self._client._credentials.username) req.password = str_to_bytes(self._client._credentials.password) - msg_code, _ = self._non_connect_request(MSG_CODE_AUTH_REQ, req, - MSG_CODE_AUTH_RESP) - if msg_code == MSG_CODE_AUTH_RESP: + msg_code, _ = self._non_connect_request( + riak.riak_pb.messages.MSG_CODE_AUTH_REQ, + req, + riak.riak_pb.messages.MSG_CODE_AUTH_RESP) + if msg_code == riak.riak_pb.messages.MSG_CODE_AUTH_RESP: return True else: return False @@ -173,10 +154,10 @@ def _ssl_handshake(self): def _recv_msg(self, expect=None): self._recv_pkt() msg_code, = struct.unpack("B", self._inbuf[:1]) - if msg_code is MSG_CODE_ERROR_RESP: + if msg_code is riak.riak_pb.messages.MSG_CODE_ERROR_RESP: err = self._parse_msg(msg_code, self._inbuf[1:]) raise RiakError(bytes_to_str(err.errmsg)) - elif msg_code in MESSAGE_CLASSES: + elif msg_code in riak.riak_pb.messages.MESSAGE_CLASSES: msg = self._parse_msg(msg_code, self._inbuf[1:]) else: raise Exception("unknown msg code %s" % msg_code) @@ -233,7 +214,7 @@ def close(self): def _parse_msg(self, code, packet): try: - pbclass = MESSAGE_CLASSES[code] + pbclass = riak.riak_pb.messages.MESSAGE_CLASSES[code] except KeyError: pbclass = None diff --git a/riak/transports/pbc/stream.py b/riak/transports/pbc/stream.py index 88e7abac..5cb71cbb 100644 --- a/riak/transports/pbc/stream.py +++ b/riak/transports/pbc/stream.py @@ -1,29 +1,5 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - - import json -from riak_pb.messages import ( - MSG_CODE_LIST_KEYS_RESP, - MSG_CODE_MAP_RED_RESP, - MSG_CODE_LIST_BUCKETS_RESP, - MSG_CODE_INDEX_RESP -) +import riak.riak_pb.messages from riak.util import decode_index_value, bytes_to_str from riak.client.index_page import CONTINUATION from six import PY2 @@ -89,7 +65,7 @@ class RiakPbcKeyStream(RiakPbcStream): Used internally by RiakPbcTransport to implement key-list streams. """ - _expect = MSG_CODE_LIST_KEYS_RESP + _expect = riak.riak_pb.messages.MSG_CODE_LIST_KEYS_RESP def next(self): response = super(RiakPbcKeyStream, self).next() @@ -110,7 +86,7 @@ class RiakPbcMapredStream(RiakPbcStream): streams. """ - _expect = MSG_CODE_MAP_RED_RESP + _expect = riak.riak_pb.messages.MSG_CODE_MAP_RED_RESP def next(self): response = super(RiakPbcMapredStream, self).next() @@ -130,7 +106,7 @@ class RiakPbcBucketStream(RiakPbcStream): Used internally by RiakPbcTransport to implement key-list streams. """ - _expect = MSG_CODE_LIST_BUCKETS_RESP + _expect = riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_RESP def next(self): response = super(RiakPbcBucketStream, self).next() @@ -151,7 +127,7 @@ class RiakPbcIndexStream(RiakPbcStream): streams. """ - _expect = MSG_CODE_INDEX_RESP + _expect = riak.riak_pb.messages.MSG_CODE_INDEX_RESP def __init__(self, transport, index, return_terms=False): super(RiakPbcIndexStream, self).__init__(transport) diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index e385c698..a158a284 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -1,25 +1,7 @@ -""" -Copyright 2015 Basho Technologies, Inc. -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - -import riak_pb +import riak.riak_pb.messages +import riak.riak_pb.riak_pb2 +import riak.riak_pb.riak_kv_pb2 + from riak import RiakError from riak.transports.transport import RiakTransport from riak.riak_object import VClock @@ -32,56 +14,6 @@ from riak.transports.pbc.codec import RiakPbcCodec from six import PY2, PY3 -from riak_pb.messages import ( - MSG_CODE_PING_REQ, - MSG_CODE_PING_RESP, - MSG_CODE_GET_CLIENT_ID_REQ, - MSG_CODE_GET_CLIENT_ID_RESP, - MSG_CODE_SET_CLIENT_ID_REQ, - MSG_CODE_SET_CLIENT_ID_RESP, - MSG_CODE_GET_SERVER_INFO_REQ, - MSG_CODE_GET_SERVER_INFO_RESP, - MSG_CODE_GET_REQ, - MSG_CODE_GET_RESP, - MSG_CODE_PUT_REQ, - MSG_CODE_PUT_RESP, - MSG_CODE_DEL_REQ, - MSG_CODE_DEL_RESP, - MSG_CODE_LIST_BUCKETS_REQ, - MSG_CODE_LIST_BUCKETS_RESP, - MSG_CODE_LIST_KEYS_REQ, - MSG_CODE_GET_BUCKET_REQ, - MSG_CODE_GET_BUCKET_RESP, - MSG_CODE_SET_BUCKET_REQ, - MSG_CODE_SET_BUCKET_RESP, - MSG_CODE_GET_BUCKET_TYPE_REQ, - MSG_CODE_SET_BUCKET_TYPE_REQ, - MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, - MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP, - MSG_CODE_MAP_RED_REQ, - MSG_CODE_INDEX_REQ, - MSG_CODE_INDEX_RESP, - MSG_CODE_SEARCH_QUERY_REQ, - MSG_CODE_SEARCH_QUERY_RESP, - MSG_CODE_RESET_BUCKET_REQ, - MSG_CODE_RESET_BUCKET_RESP, - MSG_CODE_COUNTER_UPDATE_REQ, - MSG_CODE_COUNTER_UPDATE_RESP, - MSG_CODE_COUNTER_GET_REQ, - MSG_CODE_COUNTER_GET_RESP, - MSG_CODE_YOKOZUNA_INDEX_GET_REQ, - MSG_CODE_YOKOZUNA_INDEX_GET_RESP, - MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, - MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, - MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, - MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP, - MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, - MSG_CODE_DT_FETCH_REQ, - MSG_CODE_DT_FETCH_RESP, - MSG_CODE_DT_UPDATE_REQ, - MSG_CODE_DT_UPDATE_RESP -) - class RiakPbcTransport(RiakTransport, RiakPbcConnection, RiakPbcCodec): """ @@ -114,8 +46,8 @@ def ping(self): Ping the remote server """ - msg_code, msg = self._request(MSG_CODE_PING_REQ) - if msg_code == MSG_CODE_PING_RESP: + msg_code, msg = self._request(riak.riak_pb.messages.MSG_CODE_PING_REQ) + if msg_code == riak.riak_pb.messages.MSG_CODE_PING_RESP: return True else: return False @@ -124,22 +56,25 @@ def get_server_info(self): """ Get information about the server """ - msg_code, resp = self._request(MSG_CODE_GET_SERVER_INFO_REQ, - expect=MSG_CODE_GET_SERVER_INFO_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, + expect=riak.riak_pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) return {'node': bytes_to_str(resp.node), 'server_version': bytes_to_str(resp.server_version)} def _get_client_id(self): - msg_code, resp = self._request(MSG_CODE_GET_CLIENT_ID_REQ, - expect=MSG_CODE_GET_CLIENT_ID_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_GET_CLIENT_ID_REQ, + expect=riak.riak_pb.messages.MSG_CODE_GET_CLIENT_ID_RESP) return bytes_to_str(resp.client_id) def _set_client_id(self, client_id): - req = riak_pb.RpbSetClientIdReq() + req = riak.riak_pb.riak_kv_pb2.RpbSetClientIdReq() req.client_id = str_to_bytes(client_id) - msg_code, resp = self._request(MSG_CODE_SET_CLIENT_ID_REQ, req, - MSG_CODE_SET_CLIENT_ID_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_SET_CLIENT_ID_REQ, req, + riak.riak_pb.messages.MSG_CODE_SET_CLIENT_ID_RESP) self._client_id = client_id @@ -153,7 +88,7 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, """ bucket = robj.bucket - req = riak_pb.RpbGetReq() + req = riak.riak_pb.riak_kv_pb2.RpbGetReq() if r: req.r = self._encode_quorum(r) if self.quorum_controls(): @@ -173,8 +108,9 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, req.key = str_to_bytes(robj.key) - msg_code, resp = self._request(MSG_CODE_GET_REQ, req, - MSG_CODE_GET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_GET_REQ, req, + riak.riak_pb.messages.MSG_CODE_GET_RESP) if resp is not None: if resp.HasField('vclock'): @@ -193,7 +129,7 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): bucket = robj.bucket - req = riak_pb.RpbPutReq() + req = riak.riak_pb.riak_kv_pb2.RpbPutReq() if w: req.w = self._encode_quorum(w) if dw: @@ -218,8 +154,9 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, self._encode_content(robj, req.content) - msg_code, resp = self._request(MSG_CODE_PUT_REQ, req, - MSG_CODE_PUT_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_PUT_REQ, req, + riak.riak_pb.messages.MSG_CODE_PUT_RESP) if resp is not None: if resp.HasField('key'): @@ -235,7 +172,7 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): - req = riak_pb.RpbDelReq() + req = riak.riak_pb.riak_kv_pb2.RpbDelReq() if rw: req.rw = self._encode_quorum(rw) if r: @@ -264,8 +201,9 @@ def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) - msg_code, resp = self._request(MSG_CODE_DEL_REQ, req, - MSG_CODE_DEL_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_DEL_REQ, req, + riak.riak_pb.messages.MSG_CODE_DEL_RESP) return self def get_keys(self, bucket, timeout=None): @@ -284,13 +222,13 @@ def stream_keys(self, bucket, timeout=None): Streams keys from a bucket, returning an iterator that yields lists of keys. """ - req = riak_pb.RpbListKeysReq() + req = riak.riak_pb.riak_kv_pb2.RpbListKeysReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) if self.client_timeouts() and timeout: req.timeout = timeout - self._send_msg(MSG_CODE_LIST_KEYS_REQ, req) + self._send_msg(riak.riak_pb.messages.MSG_CODE_LIST_KEYS_REQ, req) return RiakPbcKeyStream(self) @@ -298,14 +236,15 @@ def get_buckets(self, bucket_type=None, timeout=None): """ Serialize bucket listing request and deserialize response """ - req = riak_pb.RpbListBucketsReq() + req = riak.riak_pb.riak_kv_pb2.RpbListBucketsReq() self._add_bucket_type(req, bucket_type) if self.client_timeouts() and timeout: req.timeout = timeout - msg_code, resp = self._request(MSG_CODE_LIST_BUCKETS_REQ, req, - MSG_CODE_LIST_BUCKETS_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req, + riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_RESP) return resp.buckets def stream_buckets(self, bucket_type=None, timeout=None): @@ -317,7 +256,7 @@ def stream_buckets(self, bucket_type=None, timeout=None): raise NotImplementedError('Streaming list-buckets is not ' 'supported') - req = riak_pb.RpbListBucketsReq() + req = riak.riak_pb.riak_kv_pb2.RpbListBucketsReq() req.stream = True self._add_bucket_type(req, bucket_type) # Bucket streaming landed in the same release as timeouts, so @@ -325,7 +264,7 @@ def stream_buckets(self, bucket_type=None, timeout=None): if timeout: req.timeout = timeout - self._send_msg(MSG_CODE_LIST_BUCKETS_REQ, req) + self._send_msg(riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req) return RiakPbcBucketStream(self) @@ -333,12 +272,13 @@ def get_bucket_props(self, bucket): """ Serialize bucket property request and deserialize response """ - req = riak_pb.RpbGetBucketReq() + req = riak.riak_pb.riak_pb2.RpbGetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) - msg_code, resp = self._request(MSG_CODE_GET_BUCKET_REQ, req, - MSG_CODE_GET_BUCKET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_GET_BUCKET_REQ, req, + riak.riak_pb.messages.MSG_CODE_GET_BUCKET_RESP) return self._decode_bucket_props(resp.props) @@ -346,7 +286,7 @@ def set_bucket_props(self, bucket, props): """ Serialize set bucket property request and deserialize response """ - req = riak_pb.RpbSetBucketReq() + req = riak.riak_pb.riak_pb2.RpbSetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) @@ -358,8 +298,9 @@ def set_bucket_props(self, bucket, props): self._encode_bucket_props(props, req) - msg_code, resp = self._request(MSG_CODE_SET_BUCKET_REQ, req, - MSG_CODE_SET_BUCKET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_SET_BUCKET_REQ, req, + riak.riak_pb.messages.MSG_CODE_SET_BUCKET_RESP) return True def clear_bucket_props(self, bucket): @@ -369,11 +310,12 @@ def clear_bucket_props(self, bucket): if not self.pb_clear_bucket_props(): return False - req = riak_pb.RpbResetBucketReq() + req = riak.riak_pb.riak_pb2.RpbResetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) - self._request(MSG_CODE_RESET_BUCKET_REQ, req, - MSG_CODE_RESET_BUCKET_RESP) + self._request( + riak.riak_pb.messages.MSG_CODE_RESET_BUCKET_REQ, req, + riak.riak_pb.messages.MSG_CODE_RESET_BUCKET_RESP) return True def get_bucket_type_props(self, bucket_type): @@ -382,11 +324,12 @@ def get_bucket_type_props(self, bucket_type): """ self._check_bucket_types(bucket_type) - req = riak_pb.RpbGetBucketTypeReq() + req = riak.riak_pb.riak_pb2.RpbGetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) - msg_code, resp = self._request(MSG_CODE_GET_BUCKET_TYPE_REQ, req, - MSG_CODE_GET_BUCKET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ, req, + riak.riak_pb.messages.MSG_CODE_GET_BUCKET_RESP) return self._decode_bucket_props(resp.props) @@ -396,13 +339,15 @@ def set_bucket_type_props(self, bucket_type, props): """ self._check_bucket_types(bucket_type) - req = riak_pb.RpbSetBucketTypeReq() + req = riak.riak_pb.riak_pb2.RpbSetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) self._encode_bucket_props(props, req) - msg_code, resp = self._request(MSG_CODE_SET_BUCKET_TYPE_REQ, req, - MSG_CODE_SET_BUCKET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ, req, + riak.riak_pb.messages.MSG_CODE_SET_BUCKET_RESP) + return True def mapred(self, inputs, query, timeout=None): @@ -428,11 +373,11 @@ def stream_mapred(self, inputs, query, timeout=None): # Construct the job, optionally set the timeout... content = self._construct_mapred_json(inputs, query, timeout) - req = riak_pb.RpbMapRedReq() + req = riak.riak_pb.riak_kv_pb2.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") - self._send_msg(MSG_CODE_MAP_RED_REQ, req) + self._send_msg(riak.riak_pb.messages.MSG_CODE_MAP_RED_REQ, req) return RiakPbcMapredStream(self) @@ -450,8 +395,9 @@ def get_index(self, bucket, index, startkey, endkey=None, return_terms, max_results, continuation, timeout, term_regex) - msg_code, resp = self._request(MSG_CODE_INDEX_REQ, req, - MSG_CODE_INDEX_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_INDEX_REQ, req, + riak.riak_pb.messages.MSG_CODE_INDEX_RESP) if return_terms and resp.results: results = [(decode_index_value(index, pair.key), @@ -483,7 +429,7 @@ def stream_index(self, bucket, index, startkey, endkey=None, timeout, term_regex) req.stream = True - self._send_msg(MSG_CODE_INDEX_REQ, req) + self._send_msg(riak.riak_pb.messages.MSG_CODE_INDEX_REQ, req) return RiakPbcIndexStream(self, index, return_terms) @@ -493,27 +439,31 @@ def create_search_index(self, index, schema=None, n_val=None, raise NotImplementedError("Search 2.0 administration is not " "supported for this version") index = str_to_bytes(index) - idx = riak_pb.RpbYokozunaIndex(name=index) + idx = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) if schema: idx.schema = str_to_bytes(schema) if n_val: idx.n_val = n_val - req = riak_pb.RpbYokozunaIndexPutReq(index=idx) + req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) if timeout is not None: req.timeout = timeout - self._request(MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, - MSG_CODE_PUT_RESP) + self._request( + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, + riak.riak_pb.messages.MSG_CODE_PUT_RESP) + return True def get_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak_pb.RpbYokozunaIndexGetReq(name=str_to_bytes(index)) + req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( + name=str_to_bytes(index)) - msg_code, resp = self._request(MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, - MSG_CODE_YOKOZUNA_INDEX_GET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) if len(resp.index) > 0: return self._decode_search_index(resp.index[0]) else: @@ -523,10 +473,11 @@ def list_search_indexes(self): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak_pb.RpbYokozunaIndexGetReq() + req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() - msg_code, resp = self._request(MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, - MSG_CODE_YOKOZUNA_INDEX_GET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) return [self._decode_search_index(index) for index in resp.index] @@ -534,10 +485,12 @@ def delete_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak_pb.RpbYokozunaIndexDeleteReq(name=str_to_bytes(index)) + req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( + name=str_to_bytes(index)) - self._request(MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, req, - MSG_CODE_DEL_RESP) + self._request( + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, req, + riak.riak_pb.messages.MSG_CODE_DEL_RESP) return True @@ -545,22 +498,29 @@ def create_search_schema(self, schema, content): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - scma = riak_pb.RpbYokozunaSchema(name=str_to_bytes(schema), - content=str_to_bytes(content)) - req = riak_pb.RpbYokozunaSchemaPutReq(schema=scma) + scma = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchema( + name=str_to_bytes(schema), + content=str_to_bytes(content)) + req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( + schema=scma) + + self._request( + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, req, + riak.riak_pb.messages.MSG_CODE_PUT_RESP) - self._request(MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, req, - MSG_CODE_PUT_RESP) return True def get_search_schema(self, schema): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak_pb.RpbYokozunaSchemaGetReq(name=str_to_bytes(schema)) + req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( + name=str_to_bytes(schema)) + + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req, + riak.riak_pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) - msg_code, resp = self._request(MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req, - MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) result = {} result['name'] = bytes_to_str(resp.schema.name) result['content'] = bytes_to_str(resp.schema.content) @@ -573,12 +533,14 @@ def search(self, index, query, **params): if PY2 and isinstance(query, unicode): # noqa query = query.encode('utf8') - req = riak_pb.RpbSearchQueryReq(index=str_to_bytes(index), - q=str_to_bytes(query)) + req = riak.riak_pb.riak_search_pb2.RpbSearchQueryReq( + index=str_to_bytes(index), + q=str_to_bytes(query)) self._encode_search_query(req, params) - msg_code, resp = self._request(MSG_CODE_SEARCH_QUERY_REQ, req, - MSG_CODE_SEARCH_QUERY_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_SEARCH_QUERY_REQ, req, + riak.riak_pb.messages.MSG_CODE_SEARCH_QUERY_RESP) result = {} if resp.HasField('max_score'): @@ -597,7 +559,7 @@ def get_counter(self, bucket, key, **params): if not self.counters(): raise NotImplementedError("Counters are not supported") - req = riak_pb.RpbCounterGetReq() + req = riak.riak_pb.riak_kv_pb2.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) if params.get('r') is not None: @@ -609,8 +571,9 @@ def get_counter(self, bucket, key, **params): if params.get('notfound_ok') is not None: req.notfound_ok = params['notfound_ok'] - msg_code, resp = self._request(MSG_CODE_COUNTER_GET_REQ, req, - MSG_CODE_COUNTER_GET_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_COUNTER_GET_REQ, req, + riak.riak_pb.messages.MSG_CODE_COUNTER_GET_RESP) if resp.HasField('value'): return resp.value else: @@ -625,7 +588,7 @@ def update_counter(self, bucket, key, value, **params): if not self.counters(): raise NotImplementedError("Counters are not supported") - req = riak_pb.RpbCounterUpdateReq() + req = riak.riak_pb.riak_kv_pb2.RpbCounterUpdateReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.amount = value @@ -638,8 +601,9 @@ def update_counter(self, bucket, key, value, **params): if params.get('returnvalue') is not None: req.returnvalue = params['returnvalue'] - msg_code, resp = self._request(MSG_CODE_COUNTER_UPDATE_REQ, req, - MSG_CODE_COUNTER_UPDATE_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, req, + riak.riak_pb.messages.MSG_CODE_COUNTER_UPDATE_RESP) if resp.HasField('value'): return resp.value else: @@ -654,14 +618,15 @@ def fetch_datatype(self, bucket, key, **options): if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") - req = riak_pb.DtFetchReq() + req = riak.riak_pb.riak_dt_pb2.DtFetchReq() req.type = str_to_bytes(bucket.bucket_type.name) req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) self._encode_dt_options(req, options) - msg_code, resp = self._request(MSG_CODE_DT_FETCH_REQ, req, - MSG_CODE_DT_FETCH_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_DT_FETCH_REQ, req, + riak.riak_pb.messages.MSG_CODE_DT_FETCH_RESP) return self._decode_dt_fetch(resp) @@ -680,7 +645,7 @@ def update_datatype(self, datatype, **options): raise ValueError("No operation to send on datatype {!r}". format(datatype)) - req = riak_pb.DtUpdateReq() + req = riak.riak_pb.riak_dt_pb2.DtUpdateReq() req.bucket = str_to_bytes(datatype.bucket.name) req.type = str_to_bytes(datatype.bucket.bucket_type.name) @@ -693,8 +658,9 @@ def update_datatype(self, datatype, **options): self._encode_dt_op(type_name, req, op) - msg_code, resp = self._request(MSG_CODE_DT_UPDATE_REQ, req, - MSG_CODE_DT_UPDATE_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_DT_UPDATE_REQ, req, + riak.riak_pb.messages.MSG_CODE_DT_UPDATE_RESP) if resp.HasField('key'): datatype.key = resp.key[:] if resp.HasField('context'): @@ -715,13 +681,13 @@ def get_preflist(self, bucket, key): :type key: string :rtype: list of dicts """ - req = riak_pb.RpbGetBucketKeyPreflistReq() + req = riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.type = str_to_bytes(bucket.bucket_type.name) - msg_code, resp = self._request(MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, - req, - MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) + msg_code, resp = self._request( + riak.riak_pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, req, + riak.riak_pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) return [self._decode_preflist(item) for item in resp.preflist] diff --git a/riak_pb b/riak_pb new file mode 160000 index 00000000..f4f30571 --- /dev/null +++ b/riak_pb @@ -0,0 +1 @@ +Subproject commit f4f30571ee14e3456416d0048f2b7c4d9fd84c59 diff --git a/setup.py b/setup.py index 549f2799..152f2537 100755 --- a/setup.py +++ b/setup.py @@ -3,19 +3,22 @@ from setuptools import setup, find_packages from version import get_version from commands import preconfigure, configure, create_bucket_types, \ - setup_security, enable_security, disable_security + setup_security, enable_security, disable_security, \ + build_messages install_requires = ['six >= 1.8.0'] requires = ['six(>=1.8.0)'] if sys.version_info < (2, 7, 9): install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") + if sys.version_info < (3, ): - install_requires.append("riak_pb >=2.0.0") - requires.append("riak_pb(>=2.0.0)") + requires.append('protobuf(>=2.4.1,<2.7.0)') + install_requires.append('protobuf >=2.4.1, <2.7.0') else: - install_requires.append("python3_riak_pb >=2.0.0") - requires.append("python3_riak_pb(>=2.0.0)") + requires.append('python3_protobuf(>=2.4.1,<2.6.0)') + install_requires.append('python3_protobuf >=2.4.1, <2.6.0') + tests_require = [] if sys.version_info < (2, 7): tests_require.append("unittest2") @@ -38,12 +41,15 @@ author_email='clients@basho.com', test_suite='riak.tests.suite', url='https://github.com/basho/riak-python-client', - cmdclass={'create_bucket_types': create_bucket_types, - 'setup_security': setup_security, - 'preconfigure': preconfigure, - 'configure': configure, - 'enable_security': enable_security, - 'disable_security': disable_security}, + cmdclass={ + 'build_messages': build_messages, + 'create_bucket_types': create_bucket_types, + 'setup_security': setup_security, + 'preconfigure': preconfigure, + 'configure': configure, + 'enable_security': enable_security, + 'disable_security': disable_security + }, classifiers=['License :: OSI Approved :: Apache Software License', 'Intended Audience :: Developers', 'Operating System :: OS Independent', diff --git a/tox.ini b/tox.ini index 1bb27de4..03c15cbd 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,12 @@ envlist = py26, py279, py27, py33, py34 [testenv] +basepython = + py26: python2.6 + py279: {env:HOME}/.pyenv/versions/riak-py279/bin/python2.7 + py27: python2.7 + py33: python3.3 + py34: python3.4 install_command = pip install --upgrade {packages} commands = {envpython} setup.py test deps = six From 498e0930317dd33af09bde2fa6c38d115ca8b817 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 9 Dec 2015 09:06:35 -0800 Subject: [PATCH 045/324] No need to be redundant in namespace. pb instead of riak_pb --- Makefile | 24 +- commands.py | 27 +- riak/riak_pb/__init__.py | 0 riak/riak_pb/messages.py | 152 --- riak/riak_pb/riak_dt_pb2.py | 863 -------------- riak/riak_pb/riak_kv_pb2.py | 1747 ----------------------------- riak/riak_pb/riak_pb2.py | 786 ------------- riak/riak_pb/riak_search_pb2.py | 210 ---- riak/riak_pb/riak_yokozuna_pb2.py | 372 ------ riak/transports/pbc/codec.py | 84 +- riak/transports/pbc/connection.py | 22 +- riak/transports/pbc/stream.py | 10 +- riak/transports/pbc/transport.py | 172 +-- 13 files changed, 160 insertions(+), 4309 deletions(-) delete mode 100644 riak/riak_pb/__init__.py delete mode 100644 riak/riak_pb/messages.py delete mode 100644 riak/riak_pb/riak_dt_pb2.py delete mode 100644 riak/riak_pb/riak_kv_pb2.py delete mode 100644 riak/riak_pb/riak_pb2.py delete mode 100644 riak/riak_pb/riak_search_pb2.py delete mode 100644 riak/riak_pb/riak_yokozuna_pb2.py diff --git a/Makefile b/Makefile index 0cd2c971..71c05631 100644 --- a/Makefile +++ b/Makefile @@ -11,30 +11,30 @@ release: python_release python3_release # Python 2.x specific build steps python_compile: @echo "==> Python (compile)" - @protoc -I ./riak_pb/src --python_out=./riak/riak_pb ./riak_pb/src/*.proto + @protoc -I riak_pb/src --python_out=riak/pb riak_pb/src/*.proto @python2 setup.py build_messages python_clean: @echo "==> Python (clean)" @python2 setup.py clean_messages - @rm -rf ./riak/riak_pb/*.pyc ./riak/riak_pb/*_pb2.py ./riak/riak_pb/*.pyc + @rm -rf riak/pb/*.pyc riak/pb/*_pb2.py riak/pb/*.pyc python_release: python_clean ifeq ($(RELEASE_GPG_KEYNAME),) @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" else @echo "==> Python (release)" - @protoc -Isrc --python_out=riak_pb src/*.proto + @protoc -Isrc --python_out=riak/pb src/*.proto @python2.7 setup.py build_messages build --build-base=riak @python2.7 setup.py build --build-base=python bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @python2.7 setup.py clean --build-base=python clean_messages @rm -rf *.pyc riak_pb/*_pb2.py riak_pb/*.pyc riak_pb.egg-info python - @protoc -Isrc --python_out=riak_pb src/*.proto + @protoc -Isrc --python_out=riak/pb src/*.proto @python2.7 setup.py build_messages build --build-base=riak @python2.7 setup.py build --build-base=python sdist upload -s -i $(RELEASE_GPG_KEYNAME) @python2.6 setup.py clean --build-base=python clean_messages @rm -rf riak_pb/*_pb2.pyc *.pyc python_riak_pb.egg-info python - @protoc -Isrc --python_out=riak_pb src/*.proto + @protoc -Isrc --python_out=riak/pb src/*.proto @python2.6 setup.py build_messages build --build-base=riak @python2.6 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) endif @@ -46,30 +46,30 @@ python_install: python_compile # Python 3.x specific build steps python3_compile: @echo "==> Python 3 (compile)" - @protoc -Isrc --python_out=riak_pb src/*.proto + @protoc -Isrc --python_out=riak/pb src/*.proto @python3 setup.py build_messages build --build-base=riak python3_clean: @echo "==> Python 3 (clean)" @python3 setup.py clean --build-base=riak clean_messages - @rm -rf riak_pb/*_pb2.py riak_pb/__pycache__ __pycache__ python3_riak_pb.egg-info python3 + @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 python3_release: python3_clean ifeq ($(RELEASE_GPG_KEYNAME),) @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" else @echo "==> Python 3 (release)" - @protoc -Isrc --python_out=riak_pb src/*.proto + @protoc -Isrc --python_out=riak/pb src/*.proto @python3.4 setup.py build_messages build --build-base=riak @python3.4 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @python3.4 setup.py clean --build-base=riak clean_messages - @rm -rf riak_pb/*_pb2.py riak_pb/__pycache__ __pycache__ python3_riak_pb.egg-info python3 - @protoc -Isrc --python_out=riak_pb src/*.proto + @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 + @protoc -Isrc --python_out=riak/pb src/*.proto @python3.4 setup.py build_messages build --build-base=riak @python3.4 setup.py build --build-base=riak sdist upload -s -i $(RELEASE_GPG_KEYNAME) @python3.4 setup.py clean --build-base=riak clean_messages - @rm -rf riak_pb/*_pb2.py riak_pb/__pycache__ __pycache__ python3_riak_pb.egg-info python3 - @protoc -Isrc --python_out=riak_pb src/*.proto + @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 + @protoc -Isrc --python_out=riak/pb src/*.proto @python3.3 setup.py build_messages build --build-base=riak @python3.3 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) endif diff --git a/commands.py b/commands.py index 2ca53b7b..9f3c108b 100644 --- a/commands.py +++ b/commands.py @@ -18,24 +18,6 @@ 'preconfigure', 'configure'] -LICENSE = """# Copyright {0} Basho Technologies, Inc. -# -# This file is provided to you under the Apache License, -# Version 2.0 (the "License"); you may not use this file -# except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -""".format(date.today().year) - - # Exception classes used by this module. class CalledProcessError(Exception): """This exception is raised when a process run by check_call() or @@ -529,7 +511,7 @@ def __init__(self, code, message, proto): self.message = message self.proto = proto self.message_code_name = self._message_code_name() - self.module_name = 'riak.riak_pb.{0}_pb2'.format(self.proto) + self.module_name = 'riak.pb.{0}_pb2'.format(self.proto) self.message_class = self._message_class() def _cmpkey(self): @@ -610,7 +592,7 @@ def finalize_options(self): if self.source is None: self.source = 'riak_pb/src/riak_pb_messages.csv' if self.destination is None: - self.destination = 'riak/riak_pb/messages.py' + self.destination = 'riak/pb/messages.py' def run(self): self.force = True @@ -640,7 +622,6 @@ def _generate(self): def _generate_doc(self): # Write the license and docstring header - self._contents.append(LICENSE) self._contents.extend(self._docstring) def _generate_imports(self): @@ -689,13 +670,13 @@ def _format_python2_or_3(self): reader = csv.reader(csvfile) for row in reader: _, _, proto = row - pb_files.add('riak/riak_pb/{0}_pb2.py'.format(proto)) + pb_files.add('riak/pb/{0}_pb2.py'.format(proto)) for im in sorted(pb_files): with open(im, 'r', buffering=1) as pbfile: contents = 'from six import *\n' + pbfile.read() contents = re.sub(r'riak_pb2', - r'riak.riak_pb.riak_pb2', + r'riak.pb.riak_pb2', contents) # Look for this pattern in the protoc-generated file: # diff --git a/riak/riak_pb/__init__.py b/riak/riak_pb/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/riak/riak_pb/messages.py b/riak/riak_pb/messages.py deleted file mode 100644 index 7d7f8b91..00000000 --- a/riak/riak_pb/messages.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2015 Basho Technologies, Inc. -# -# This file is provided to you under the Apache License, -# Version 2.0 (the "License"); you may not use this file -# except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# This is a generated file. DO NOT EDIT. - -""" -Constants and mappings between Riak protocol codes and messages. -""" - -import riak.riak_pb.riak_dt_pb2 -import riak.riak_pb.riak_kv_pb2 -import riak.riak_pb.riak_pb2 -import riak.riak_pb.riak_search_pb2 -import riak.riak_pb.riak_yokozuna_pb2 - -# Protocol codes -MSG_CODE_ERROR_RESP = 0 -MSG_CODE_PING_REQ = 1 -MSG_CODE_PING_RESP = 2 -MSG_CODE_GET_CLIENT_ID_REQ = 3 -MSG_CODE_GET_CLIENT_ID_RESP = 4 -MSG_CODE_SET_CLIENT_ID_REQ = 5 -MSG_CODE_SET_CLIENT_ID_RESP = 6 -MSG_CODE_GET_SERVER_INFO_REQ = 7 -MSG_CODE_GET_SERVER_INFO_RESP = 8 -MSG_CODE_GET_REQ = 9 -MSG_CODE_GET_RESP = 10 -MSG_CODE_PUT_REQ = 11 -MSG_CODE_PUT_RESP = 12 -MSG_CODE_DEL_REQ = 13 -MSG_CODE_DEL_RESP = 14 -MSG_CODE_LIST_BUCKETS_REQ = 15 -MSG_CODE_LIST_BUCKETS_RESP = 16 -MSG_CODE_LIST_KEYS_REQ = 17 -MSG_CODE_LIST_KEYS_RESP = 18 -MSG_CODE_GET_BUCKET_REQ = 19 -MSG_CODE_GET_BUCKET_RESP = 20 -MSG_CODE_SET_BUCKET_REQ = 21 -MSG_CODE_SET_BUCKET_RESP = 22 -MSG_CODE_MAP_RED_REQ = 23 -MSG_CODE_MAP_RED_RESP = 24 -MSG_CODE_INDEX_REQ = 25 -MSG_CODE_INDEX_RESP = 26 -MSG_CODE_SEARCH_QUERY_REQ = 27 -MSG_CODE_SEARCH_QUERY_RESP = 28 -MSG_CODE_RESET_BUCKET_REQ = 29 -MSG_CODE_RESET_BUCKET_RESP = 30 -MSG_CODE_GET_BUCKET_TYPE_REQ = 31 -MSG_CODE_SET_BUCKET_TYPE_REQ = 32 -MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ = 33 -MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP = 34 -MSG_CODE_CS_BUCKET_REQ = 40 -MSG_CODE_CS_BUCKET_RESP = 41 -MSG_CODE_COUNTER_UPDATE_REQ = 50 -MSG_CODE_COUNTER_UPDATE_RESP = 51 -MSG_CODE_COUNTER_GET_REQ = 52 -MSG_CODE_COUNTER_GET_RESP = 53 -MSG_CODE_YOKOZUNA_INDEX_GET_REQ = 54 -MSG_CODE_YOKOZUNA_INDEX_GET_RESP = 55 -MSG_CODE_YOKOZUNA_INDEX_PUT_REQ = 56 -MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ = 57 -MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ = 58 -MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP = 59 -MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ = 60 -MSG_CODE_DT_FETCH_REQ = 80 -MSG_CODE_DT_FETCH_RESP = 81 -MSG_CODE_DT_UPDATE_REQ = 82 -MSG_CODE_DT_UPDATE_RESP = 83 -MSG_CODE_AUTH_REQ = 253 -MSG_CODE_AUTH_RESP = 254 -MSG_CODE_START_TLS = 255 - -# Mapping from code to protobuf class -MESSAGE_CLASSES = { - MSG_CODE_ERROR_RESP: riak.riak_pb.riak_pb2.RpbErrorResp, - MSG_CODE_PING_REQ: None, - MSG_CODE_PING_RESP: None, - MSG_CODE_GET_CLIENT_ID_REQ: None, - MSG_CODE_GET_CLIENT_ID_RESP: riak.riak_pb.riak_kv_pb2.RpbGetClientIdResp, - MSG_CODE_SET_CLIENT_ID_REQ: riak.riak_pb.riak_kv_pb2.RpbSetClientIdReq, - MSG_CODE_SET_CLIENT_ID_RESP: None, - MSG_CODE_GET_SERVER_INFO_REQ: None, - MSG_CODE_GET_SERVER_INFO_RESP: riak.riak_pb.riak_pb2.RpbGetServerInfoResp, - MSG_CODE_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbGetReq, - MSG_CODE_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbGetResp, - MSG_CODE_PUT_REQ: riak.riak_pb.riak_kv_pb2.RpbPutReq, - MSG_CODE_PUT_RESP: riak.riak_pb.riak_kv_pb2.RpbPutResp, - MSG_CODE_DEL_REQ: riak.riak_pb.riak_kv_pb2.RpbDelReq, - MSG_CODE_DEL_RESP: None, - MSG_CODE_LIST_BUCKETS_REQ: riak.riak_pb.riak_kv_pb2.RpbListBucketsReq, - MSG_CODE_LIST_BUCKETS_RESP: riak.riak_pb.riak_kv_pb2.RpbListBucketsResp, - MSG_CODE_LIST_KEYS_REQ: riak.riak_pb.riak_kv_pb2.RpbListKeysReq, - MSG_CODE_LIST_KEYS_RESP: riak.riak_pb.riak_kv_pb2.RpbListKeysResp, - MSG_CODE_GET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbGetBucketReq, - MSG_CODE_GET_BUCKET_RESP: riak.riak_pb.riak_pb2.RpbGetBucketResp, - MSG_CODE_SET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbSetBucketReq, - MSG_CODE_SET_BUCKET_RESP: None, - MSG_CODE_MAP_RED_REQ: riak.riak_pb.riak_kv_pb2.RpbMapRedReq, - MSG_CODE_MAP_RED_RESP: riak.riak_pb.riak_kv_pb2.RpbMapRedResp, - MSG_CODE_INDEX_REQ: riak.riak_pb.riak_kv_pb2.RpbIndexReq, - MSG_CODE_INDEX_RESP: riak.riak_pb.riak_kv_pb2.RpbIndexResp, - MSG_CODE_SEARCH_QUERY_REQ: riak.riak_pb.riak_search_pb2.RpbSearchQueryReq, - MSG_CODE_SEARCH_QUERY_RESP: riak.riak_pb.riak_search_pb2.RpbSearchQueryResp, - MSG_CODE_RESET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbResetBucketReq, - MSG_CODE_RESET_BUCKET_RESP: None, - MSG_CODE_GET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbGetBucketTypeReq, - MSG_CODE_SET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbSetBucketTypeReq, - MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ: - riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq, - MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP: - riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistResp, - MSG_CODE_CS_BUCKET_REQ: riak.riak_pb.riak_kv_pb2.RpbCSBucketReq, - MSG_CODE_CS_BUCKET_RESP: riak.riak_pb.riak_kv_pb2.RpbCSBucketResp, - MSG_CODE_COUNTER_UPDATE_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateReq, - MSG_CODE_COUNTER_UPDATE_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateResp, - MSG_CODE_COUNTER_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterGetReq, - MSG_CODE_COUNTER_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterGetResp, - MSG_CODE_YOKOZUNA_INDEX_GET_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq, - MSG_CODE_YOKOZUNA_INDEX_GET_RESP: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetResp, - MSG_CODE_YOKOZUNA_INDEX_PUT_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq, - MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq, - MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq, - MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetResp, - MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq, - MSG_CODE_DT_FETCH_REQ: riak.riak_pb.riak_dt_pb2.DtFetchReq, - MSG_CODE_DT_FETCH_RESP: riak.riak_pb.riak_dt_pb2.DtFetchResp, - MSG_CODE_DT_UPDATE_REQ: riak.riak_pb.riak_dt_pb2.DtUpdateReq, - MSG_CODE_DT_UPDATE_RESP: riak.riak_pb.riak_dt_pb2.DtUpdateResp, - MSG_CODE_AUTH_REQ: riak.riak_pb.riak_pb2.RpbAuthReq, - MSG_CODE_AUTH_RESP: None, - MSG_CODE_START_TLS: None -} diff --git a/riak/riak_pb/riak_dt_pb2.py b/riak/riak_pb/riak_dt_pb2.py deleted file mode 100644 index 58a2f54b..00000000 --- a/riak/riak_pb/riak_dt_pb2.py +++ /dev/null @@ -1,863 +0,0 @@ -from six import * -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: riak_dt.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='riak_dt.proto', - package='', - serialized_pb='\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"Q\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\"\x87\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\")\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"V\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"t\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntryB#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') - - - -_MAPFIELD_MAPFIELDTYPE = _descriptor.EnumDescriptor( - name='MapFieldType', - full_name='MapField.MapFieldType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='COUNTER', index=0, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='SET', index=1, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='REGISTER', index=2, number=3, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='FLAG', index=3, number=4, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MAP', index=4, number=5, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=82, - serialized_end=151, -) - -_DTFETCHRESP_DATATYPE = _descriptor.EnumDescriptor( - name='DataType', - full_name='DtFetchResp.DataType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='COUNTER', index=0, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='SET', index=1, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='MAP', index=2, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=696, - serialized_end=737, -) - -_MAPUPDATE_FLAGOP = _descriptor.EnumDescriptor( - name='FlagOp', - full_name='MapUpdate.FlagOp', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='ENABLE', index=0, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='DISABLE', index=1, number=2, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=988, - serialized_end=1021, -) - - -_MAPFIELD = _descriptor.Descriptor( - name='MapField', - full_name='MapField', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='MapField.name', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='MapField.type', index=1, - number=2, type=14, cpp_type=8, label=2, - has_default_value=False, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _MAPFIELD_MAPFIELDTYPE, - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=18, - serialized_end=151, -) - - -_MAPENTRY = _descriptor.Descriptor( - name='MapEntry', - full_name='MapEntry', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field', full_name='MapEntry.field', index=0, - number=1, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='counter_value', full_name='MapEntry.counter_value', index=1, - number=2, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='set_value', full_name='MapEntry.set_value', index=2, - number=3, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='register_value', full_name='MapEntry.register_value', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='flag_value', full_name='MapEntry.flag_value', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='map_value', full_name='MapEntry.map_value', index=5, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=154, - serialized_end=306, -) - - -_DTFETCHREQ = _descriptor.Descriptor( - name='DtFetchReq', - full_name='DtFetchReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='DtFetchReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='DtFetchReq.key', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='DtFetchReq.type', index=2, - number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='r', full_name='DtFetchReq.r', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pr', full_name='DtFetchReq.pr', index=4, - number=5, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='basic_quorum', full_name='DtFetchReq.basic_quorum', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='notfound_ok', full_name='DtFetchReq.notfound_ok', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='DtFetchReq.timeout', index=7, - number=8, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='sloppy_quorum', full_name='DtFetchReq.sloppy_quorum', index=8, - number=9, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='n_val', full_name='DtFetchReq.n_val', index=9, - number=10, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='include_context', full_name='DtFetchReq.include_context', index=10, - number=11, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=True, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=309, - serialized_end=516, -) - - -_DTVALUE = _descriptor.Descriptor( - name='DtValue', - full_name='DtValue', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='counter_value', full_name='DtValue.counter_value', index=0, - number=1, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='set_value', full_name='DtValue.set_value', index=1, - number=2, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='map_value', full_name='DtValue.map_value', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=518, - serialized_end=599, -) - - -_DTFETCHRESP = _descriptor.Descriptor( - name='DtFetchResp', - full_name='DtFetchResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='context', full_name='DtFetchResp.context', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='DtFetchResp.type', index=1, - number=2, type=14, cpp_type=8, label=2, - has_default_value=False, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='DtFetchResp.value', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _DTFETCHRESP_DATATYPE, - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=602, - serialized_end=737, -) - - -_COUNTEROP = _descriptor.Descriptor( - name='CounterOp', - full_name='CounterOp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='increment', full_name='CounterOp.increment', index=0, - number=1, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=739, - serialized_end=769, -) - - -_SETOP = _descriptor.Descriptor( - name='SetOp', - full_name='SetOp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='adds', full_name='SetOp.adds', index=0, - number=1, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='removes', full_name='SetOp.removes', index=1, - number=2, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=771, - serialized_end=809, -) - - -_MAPUPDATE = _descriptor.Descriptor( - name='MapUpdate', - full_name='MapUpdate', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='field', full_name='MapUpdate.field', index=0, - number=1, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='counter_op', full_name='MapUpdate.counter_op', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='set_op', full_name='MapUpdate.set_op', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='register_op', full_name='MapUpdate.register_op', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='flag_op', full_name='MapUpdate.flag_op', index=4, - number=5, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=1, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='map_op', full_name='MapUpdate.map_op', index=5, - number=6, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _MAPUPDATE_FLAGOP, - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=812, - serialized_end=1021, -) - - -_MAPOP = _descriptor.Descriptor( - name='MapOp', - full_name='MapOp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='removes', full_name='MapOp.removes', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='updates', full_name='MapOp.updates', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1023, - serialized_end=1087, -) - - -_DTOP = _descriptor.Descriptor( - name='DtOp', - full_name='DtOp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='counter_op', full_name='DtOp.counter_op', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='set_op', full_name='DtOp.set_op', index=1, - number=2, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='map_op', full_name='DtOp.map_op', index=2, - number=3, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1089, - serialized_end=1175, -) - - -_DTUPDATEREQ = _descriptor.Descriptor( - name='DtUpdateReq', - full_name='DtUpdateReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='DtUpdateReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='DtUpdateReq.key', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='DtUpdateReq.type', index=2, - number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='context', full_name='DtUpdateReq.context', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='op', full_name='DtUpdateReq.op', index=4, - number=5, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='w', full_name='DtUpdateReq.w', index=5, - number=6, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='dw', full_name='DtUpdateReq.dw', index=6, - number=7, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pw', full_name='DtUpdateReq.pw', index=7, - number=8, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='return_body', full_name='DtUpdateReq.return_body', index=8, - number=9, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='DtUpdateReq.timeout', index=9, - number=10, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='sloppy_quorum', full_name='DtUpdateReq.sloppy_quorum', index=10, - number=11, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='n_val', full_name='DtUpdateReq.n_val', index=11, - number=12, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='include_context', full_name='DtUpdateReq.include_context', index=12, - number=13, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=True, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1178, - serialized_end=1419, -) - - -_DTUPDATERESP = _descriptor.Descriptor( - name='DtUpdateResp', - full_name='DtUpdateResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='DtUpdateResp.key', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='context', full_name='DtUpdateResp.context', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='counter_value', full_name='DtUpdateResp.counter_value', index=2, - number=3, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='set_value', full_name='DtUpdateResp.set_value', index=3, - number=4, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='map_value', full_name='DtUpdateResp.map_value', index=4, - number=5, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1421, - serialized_end=1537, -) - -_MAPFIELD.fields_by_name['type'].enum_type = _MAPFIELD_MAPFIELDTYPE -_MAPFIELD_MAPFIELDTYPE.containing_type = _MAPFIELD; -_MAPENTRY.fields_by_name['field'].message_type = _MAPFIELD -_MAPENTRY.fields_by_name['map_value'].message_type = _MAPENTRY -_DTVALUE.fields_by_name['map_value'].message_type = _MAPENTRY -_DTFETCHRESP.fields_by_name['type'].enum_type = _DTFETCHRESP_DATATYPE -_DTFETCHRESP.fields_by_name['value'].message_type = _DTVALUE -_DTFETCHRESP_DATATYPE.containing_type = _DTFETCHRESP; -_MAPUPDATE.fields_by_name['field'].message_type = _MAPFIELD -_MAPUPDATE.fields_by_name['counter_op'].message_type = _COUNTEROP -_MAPUPDATE.fields_by_name['set_op'].message_type = _SETOP -_MAPUPDATE.fields_by_name['flag_op'].enum_type = _MAPUPDATE_FLAGOP -_MAPUPDATE.fields_by_name['map_op'].message_type = _MAPOP -_MAPUPDATE_FLAGOP.containing_type = _MAPUPDATE; -_MAPOP.fields_by_name['removes'].message_type = _MAPFIELD -_MAPOP.fields_by_name['updates'].message_type = _MAPUPDATE -_DTOP.fields_by_name['counter_op'].message_type = _COUNTEROP -_DTOP.fields_by_name['set_op'].message_type = _SETOP -_DTOP.fields_by_name['map_op'].message_type = _MAPOP -_DTUPDATEREQ.fields_by_name['op'].message_type = _DTOP -_DTUPDATERESP.fields_by_name['map_value'].message_type = _MAPENTRY -DESCRIPTOR.message_types_by_name['MapField'] = _MAPFIELD -DESCRIPTOR.message_types_by_name['MapEntry'] = _MAPENTRY -DESCRIPTOR.message_types_by_name['DtFetchReq'] = _DTFETCHREQ -DESCRIPTOR.message_types_by_name['DtValue'] = _DTVALUE -DESCRIPTOR.message_types_by_name['DtFetchResp'] = _DTFETCHRESP -DESCRIPTOR.message_types_by_name['CounterOp'] = _COUNTEROP -DESCRIPTOR.message_types_by_name['SetOp'] = _SETOP -DESCRIPTOR.message_types_by_name['MapUpdate'] = _MAPUPDATE -DESCRIPTOR.message_types_by_name['MapOp'] = _MAPOP -DESCRIPTOR.message_types_by_name['DtOp'] = _DTOP -DESCRIPTOR.message_types_by_name['DtUpdateReq'] = _DTUPDATEREQ -DESCRIPTOR.message_types_by_name['DtUpdateResp'] = _DTUPDATERESP - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapField(_message.Message): - DESCRIPTOR = _MAPFIELD - - # @@protoc_insertion_point(class_scope:MapField) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapEntry(_message.Message): - DESCRIPTOR = _MAPENTRY - - # @@protoc_insertion_point(class_scope:MapEntry) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtFetchReq(_message.Message): - DESCRIPTOR = _DTFETCHREQ - - # @@protoc_insertion_point(class_scope:DtFetchReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtValue(_message.Message): - DESCRIPTOR = _DTVALUE - - # @@protoc_insertion_point(class_scope:DtValue) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtFetchResp(_message.Message): - DESCRIPTOR = _DTFETCHRESP - - # @@protoc_insertion_point(class_scope:DtFetchResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class CounterOp(_message.Message): - DESCRIPTOR = _COUNTEROP - - # @@protoc_insertion_point(class_scope:CounterOp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class SetOp(_message.Message): - DESCRIPTOR = _SETOP - - # @@protoc_insertion_point(class_scope:SetOp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapUpdate(_message.Message): - DESCRIPTOR = _MAPUPDATE - - # @@protoc_insertion_point(class_scope:MapUpdate) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapOp(_message.Message): - DESCRIPTOR = _MAPOP - - # @@protoc_insertion_point(class_scope:MapOp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtOp(_message.Message): - DESCRIPTOR = _DTOP - - # @@protoc_insertion_point(class_scope:DtOp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtUpdateReq(_message.Message): - DESCRIPTOR = _DTUPDATEREQ - - # @@protoc_insertion_point(class_scope:DtUpdateReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtUpdateResp(_message.Message): - DESCRIPTOR = _DTUPDATERESP - - # @@protoc_insertion_point(class_scope:DtUpdateResp) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakDtPB') -# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_kv_pb2.py b/riak/riak_pb/riak_kv_pb2.py deleted file mode 100644 index c8411e06..00000000 --- a/riak/riak_pb/riak_kv_pb2.py +++ /dev/null @@ -1,1747 +0,0 @@ -from six import * -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: riak_kv.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - - -import riak.riak_pb.riak_pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='riak_kv.proto', - package='', - serialized_pb='\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xcd\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"\xc1\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') - - - -_RPBINDEXREQ_INDEXQUERYTYPE = _descriptor.EnumDescriptor( - name='IndexQueryType', - full_name='RpbIndexReq.IndexQueryType', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='eq', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='range', index=1, number=1, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1644, - serialized_end=1679, -) - - -_RPBGETCLIENTIDRESP = _descriptor.Descriptor( - name='RpbGetClientIdResp', - full_name='RpbGetClientIdResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='client_id', full_name='RpbGetClientIdResp.client_id', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=29, - serialized_end=68, -) - - -_RPBSETCLIENTIDREQ = _descriptor.Descriptor( - name='RpbSetClientIdReq', - full_name='RpbSetClientIdReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='client_id', full_name='RpbSetClientIdReq.client_id', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=70, - serialized_end=108, -) - - -_RPBGETREQ = _descriptor.Descriptor( - name='RpbGetReq', - full_name='RpbGetReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbGetReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbGetReq.key', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='r', full_name='RpbGetReq.r', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pr', full_name='RpbGetReq.pr', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='basic_quorum', full_name='RpbGetReq.basic_quorum', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='notfound_ok', full_name='RpbGetReq.notfound_ok', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='if_modified', full_name='RpbGetReq.if_modified', index=6, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='head', full_name='RpbGetReq.head', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='deletedvclock', full_name='RpbGetReq.deletedvclock', index=8, - number=9, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbGetReq.timeout', index=9, - number=10, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='sloppy_quorum', full_name='RpbGetReq.sloppy_quorum', index=10, - number=11, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='n_val', full_name='RpbGetReq.n_val', index=11, - number=12, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbGetReq.type', index=12, - number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=111, - serialized_end=344, -) - - -_RPBGETRESP = _descriptor.Descriptor( - name='RpbGetResp', - full_name='RpbGetResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='content', full_name='RpbGetResp.content', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='vclock', full_name='RpbGetResp.vclock', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='unchanged', full_name='RpbGetResp.unchanged', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=346, - serialized_end=423, -) - - -_RPBPUTREQ = _descriptor.Descriptor( - name='RpbPutReq', - full_name='RpbPutReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbPutReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbPutReq.key', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='vclock', full_name='RpbPutReq.vclock', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='content', full_name='RpbPutReq.content', index=3, - number=4, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='w', full_name='RpbPutReq.w', index=4, - number=5, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='dw', full_name='RpbPutReq.dw', index=5, - number=6, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='return_body', full_name='RpbPutReq.return_body', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pw', full_name='RpbPutReq.pw', index=7, - number=8, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='if_not_modified', full_name='RpbPutReq.if_not_modified', index=8, - number=9, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='if_none_match', full_name='RpbPutReq.if_none_match', index=9, - number=10, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='return_head', full_name='RpbPutReq.return_head', index=10, - number=11, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbPutReq.timeout', index=11, - number=12, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='asis', full_name='RpbPutReq.asis', index=12, - number=13, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='sloppy_quorum', full_name='RpbPutReq.sloppy_quorum', index=13, - number=14, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='n_val', full_name='RpbPutReq.n_val', index=14, - number=15, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbPutReq.type', index=15, - number=16, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=426, - serialized_end=720, -) - - -_RPBPUTRESP = _descriptor.Descriptor( - name='RpbPutResp', - full_name='RpbPutResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='content', full_name='RpbPutResp.content', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='vclock', full_name='RpbPutResp.vclock', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbPutResp.key', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=722, - serialized_end=793, -) - - -_RPBDELREQ = _descriptor.Descriptor( - name='RpbDelReq', - full_name='RpbDelReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbDelReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbDelReq.key', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='rw', full_name='RpbDelReq.rw', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='vclock', full_name='RpbDelReq.vclock', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='r', full_name='RpbDelReq.r', index=4, - number=5, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='w', full_name='RpbDelReq.w', index=5, - number=6, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pr', full_name='RpbDelReq.pr', index=6, - number=7, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pw', full_name='RpbDelReq.pw', index=7, - number=8, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='dw', full_name='RpbDelReq.dw', index=8, - number=9, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbDelReq.timeout', index=9, - number=10, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='sloppy_quorum', full_name='RpbDelReq.sloppy_quorum', index=10, - number=11, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='n_val', full_name='RpbDelReq.n_val', index=11, - number=12, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbDelReq.type', index=12, - number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=796, - serialized_end=991, -) - - -_RPBLISTBUCKETSREQ = _descriptor.Descriptor( - name='RpbListBucketsReq', - full_name='RpbListBucketsReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbListBucketsReq.timeout', index=0, - number=1, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='stream', full_name='RpbListBucketsReq.stream', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbListBucketsReq.type', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=993, - serialized_end=1059, -) - - -_RPBLISTBUCKETSRESP = _descriptor.Descriptor( - name='RpbListBucketsResp', - full_name='RpbListBucketsResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='buckets', full_name='RpbListBucketsResp.buckets', index=0, - number=1, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='done', full_name='RpbListBucketsResp.done', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1061, - serialized_end=1112, -) - - -_RPBLISTKEYSREQ = _descriptor.Descriptor( - name='RpbListKeysReq', - full_name='RpbListKeysReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbListKeysReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbListKeysReq.timeout', index=1, - number=2, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbListKeysReq.type', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1114, - serialized_end=1177, -) - - -_RPBLISTKEYSRESP = _descriptor.Descriptor( - name='RpbListKeysResp', - full_name='RpbListKeysResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='keys', full_name='RpbListKeysResp.keys', index=0, - number=1, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='done', full_name='RpbListKeysResp.done', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1179, - serialized_end=1224, -) - - -_RPBMAPREDREQ = _descriptor.Descriptor( - name='RpbMapRedReq', - full_name='RpbMapRedReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='request', full_name='RpbMapRedReq.request', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='content_type', full_name='RpbMapRedReq.content_type', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1226, - serialized_end=1279, -) - - -_RPBMAPREDRESP = _descriptor.Descriptor( - name='RpbMapRedResp', - full_name='RpbMapRedResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='phase', full_name='RpbMapRedResp.phase', index=0, - number=1, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='response', full_name='RpbMapRedResp.response', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='done', full_name='RpbMapRedResp.done', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1281, - serialized_end=1343, -) - - -_RPBINDEXREQ = _descriptor.Descriptor( - name='RpbIndexReq', - full_name='RpbIndexReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbIndexReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='index', full_name='RpbIndexReq.index', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='qtype', full_name='RpbIndexReq.qtype', index=2, - number=3, type=14, cpp_type=8, label=2, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbIndexReq.key', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='range_min', full_name='RpbIndexReq.range_min', index=4, - number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='range_max', full_name='RpbIndexReq.range_max', index=5, - number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='return_terms', full_name='RpbIndexReq.return_terms', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='stream', full_name='RpbIndexReq.stream', index=7, - number=8, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='max_results', full_name='RpbIndexReq.max_results', index=8, - number=9, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='continuation', full_name='RpbIndexReq.continuation', index=9, - number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbIndexReq.timeout', index=10, - number=11, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbIndexReq.type', index=11, - number=12, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='term_regex', full_name='RpbIndexReq.term_regex', index=12, - number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pagination_sort', full_name='RpbIndexReq.pagination_sort', index=13, - number=14, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _RPBINDEXREQ_INDEXQUERYTYPE, - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1346, - serialized_end=1679, -) - - -_RPBINDEXRESP = _descriptor.Descriptor( - name='RpbIndexResp', - full_name='RpbIndexResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='keys', full_name='RpbIndexResp.keys', index=0, - number=1, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='results', full_name='RpbIndexResp.results', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='continuation', full_name='RpbIndexResp.continuation', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='done', full_name='RpbIndexResp.done', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1681, - serialized_end=1772, -) - - -_RPBCSBUCKETREQ = _descriptor.Descriptor( - name='RpbCSBucketReq', - full_name='RpbCSBucketReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbCSBucketReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='start_key', full_name='RpbCSBucketReq.start_key', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='end_key', full_name='RpbCSBucketReq.end_key', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='start_incl', full_name='RpbCSBucketReq.start_incl', index=3, - number=4, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=True, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='end_incl', full_name='RpbCSBucketReq.end_incl', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='continuation', full_name='RpbCSBucketReq.continuation', index=5, - number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='max_results', full_name='RpbCSBucketReq.max_results', index=6, - number=7, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbCSBucketReq.timeout', index=7, - number=8, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbCSBucketReq.type', index=8, - number=9, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1775, - serialized_end=1968, -) - - -_RPBCSBUCKETRESP = _descriptor.Descriptor( - name='RpbCSBucketResp', - full_name='RpbCSBucketResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='objects', full_name='RpbCSBucketResp.objects', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='continuation', full_name='RpbCSBucketResp.continuation', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='done', full_name='RpbCSBucketResp.done', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1970, - serialized_end=2057, -) - - -_RPBINDEXOBJECT = _descriptor.Descriptor( - name='RpbIndexObject', - full_name='RpbIndexObject', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='RpbIndexObject.key', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='object', full_name='RpbIndexObject.object', index=1, - number=2, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2059, - serialized_end=2117, -) - - -_RPBCONTENT = _descriptor.Descriptor( - name='RpbContent', - full_name='RpbContent', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='RpbContent.value', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='content_type', full_name='RpbContent.content_type', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='charset', full_name='RpbContent.charset', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='content_encoding', full_name='RpbContent.content_encoding', index=3, - number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='vtag', full_name='RpbContent.vtag', index=4, - number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='links', full_name='RpbContent.links', index=5, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='last_mod', full_name='RpbContent.last_mod', index=6, - number=7, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='last_mod_usecs', full_name='RpbContent.last_mod_usecs', index=7, - number=8, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='usermeta', full_name='RpbContent.usermeta', index=8, - number=9, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='indexes', full_name='RpbContent.indexes', index=9, - number=10, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='deleted', full_name='RpbContent.deleted', index=10, - number=11, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2120, - serialized_end=2365, -) - - -_RPBLINK = _descriptor.Descriptor( - name='RpbLink', - full_name='RpbLink', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbLink.bucket', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbLink.key', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='tag', full_name='RpbLink.tag', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2367, - serialized_end=2418, -) - - -_RPBCOUNTERUPDATEREQ = _descriptor.Descriptor( - name='RpbCounterUpdateReq', - full_name='RpbCounterUpdateReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbCounterUpdateReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbCounterUpdateReq.key', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='amount', full_name='RpbCounterUpdateReq.amount', index=2, - number=3, type=18, cpp_type=2, label=2, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='w', full_name='RpbCounterUpdateReq.w', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='dw', full_name='RpbCounterUpdateReq.dw', index=4, - number=5, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pw', full_name='RpbCounterUpdateReq.pw', index=5, - number=6, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='returnvalue', full_name='RpbCounterUpdateReq.returnvalue', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2420, - serialized_end=2542, -) - - -_RPBCOUNTERUPDATERESP = _descriptor.Descriptor( - name='RpbCounterUpdateResp', - full_name='RpbCounterUpdateResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='RpbCounterUpdateResp.value', index=0, - number=1, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2544, - serialized_end=2581, -) - - -_RPBCOUNTERGETREQ = _descriptor.Descriptor( - name='RpbCounterGetReq', - full_name='RpbCounterGetReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbCounterGetReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbCounterGetReq.key', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='r', full_name='RpbCounterGetReq.r', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pr', full_name='RpbCounterGetReq.pr', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='basic_quorum', full_name='RpbCounterGetReq.basic_quorum', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='notfound_ok', full_name='RpbCounterGetReq.notfound_ok', index=5, - number=6, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2583, - serialized_end=2696, -) - - -_RPBCOUNTERGETRESP = _descriptor.Descriptor( - name='RpbCounterGetResp', - full_name='RpbCounterGetResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='value', full_name='RpbCounterGetResp.value', index=0, - number=1, type=18, cpp_type=2, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2698, - serialized_end=2732, -) - - -_RPBGETBUCKETKEYPREFLISTREQ = _descriptor.Descriptor( - name='RpbGetBucketKeyPreflistReq', - full_name='RpbGetBucketKeyPreflistReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbGetBucketKeyPreflistReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='key', full_name='RpbGetBucketKeyPreflistReq.key', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbGetBucketKeyPreflistReq.type', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2734, - serialized_end=2805, -) - - -_RPBGETBUCKETKEYPREFLISTRESP = _descriptor.Descriptor( - name='RpbGetBucketKeyPreflistResp', - full_name='RpbGetBucketKeyPreflistResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='preflist', full_name='RpbGetBucketKeyPreflistResp.preflist', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2807, - serialized_end=2881, -) - - -_RPBBUCKETKEYPREFLISTITEM = _descriptor.Descriptor( - name='RpbBucketKeyPreflistItem', - full_name='RpbBucketKeyPreflistItem', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='partition', full_name='RpbBucketKeyPreflistItem.partition', index=0, - number=1, type=3, cpp_type=2, label=2, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='node', full_name='RpbBucketKeyPreflistItem.node', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='primary', full_name='RpbBucketKeyPreflistItem.primary', index=2, - number=3, type=8, cpp_type=7, label=2, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=2883, - serialized_end=2959, -) - -_RPBGETRESP.fields_by_name['content'].message_type = _RPBCONTENT -_RPBPUTREQ.fields_by_name['content'].message_type = _RPBCONTENT -_RPBPUTRESP.fields_by_name['content'].message_type = _RPBCONTENT -_RPBINDEXREQ.fields_by_name['qtype'].enum_type = _RPBINDEXREQ_INDEXQUERYTYPE -_RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ; -_RPBINDEXRESP.fields_by_name['results'].message_type = riak.riak_pb.riak_pb2._RPBPAIR -_RPBCSBUCKETRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT -_RPBINDEXOBJECT.fields_by_name['object'].message_type = _RPBGETRESP -_RPBCONTENT.fields_by_name['links'].message_type = _RPBLINK -_RPBCONTENT.fields_by_name['usermeta'].message_type = riak.riak_pb.riak_pb2._RPBPAIR -_RPBCONTENT.fields_by_name['indexes'].message_type = riak.riak_pb.riak_pb2._RPBPAIR -_RPBGETBUCKETKEYPREFLISTRESP.fields_by_name['preflist'].message_type = _RPBBUCKETKEYPREFLISTITEM -DESCRIPTOR.message_types_by_name['RpbGetClientIdResp'] = _RPBGETCLIENTIDRESP -DESCRIPTOR.message_types_by_name['RpbSetClientIdReq'] = _RPBSETCLIENTIDREQ -DESCRIPTOR.message_types_by_name['RpbGetReq'] = _RPBGETREQ -DESCRIPTOR.message_types_by_name['RpbGetResp'] = _RPBGETRESP -DESCRIPTOR.message_types_by_name['RpbPutReq'] = _RPBPUTREQ -DESCRIPTOR.message_types_by_name['RpbPutResp'] = _RPBPUTRESP -DESCRIPTOR.message_types_by_name['RpbDelReq'] = _RPBDELREQ -DESCRIPTOR.message_types_by_name['RpbListBucketsReq'] = _RPBLISTBUCKETSREQ -DESCRIPTOR.message_types_by_name['RpbListBucketsResp'] = _RPBLISTBUCKETSRESP -DESCRIPTOR.message_types_by_name['RpbListKeysReq'] = _RPBLISTKEYSREQ -DESCRIPTOR.message_types_by_name['RpbListKeysResp'] = _RPBLISTKEYSRESP -DESCRIPTOR.message_types_by_name['RpbMapRedReq'] = _RPBMAPREDREQ -DESCRIPTOR.message_types_by_name['RpbMapRedResp'] = _RPBMAPREDRESP -DESCRIPTOR.message_types_by_name['RpbIndexReq'] = _RPBINDEXREQ -DESCRIPTOR.message_types_by_name['RpbIndexResp'] = _RPBINDEXRESP -DESCRIPTOR.message_types_by_name['RpbCSBucketReq'] = _RPBCSBUCKETREQ -DESCRIPTOR.message_types_by_name['RpbCSBucketResp'] = _RPBCSBUCKETRESP -DESCRIPTOR.message_types_by_name['RpbIndexObject'] = _RPBINDEXOBJECT -DESCRIPTOR.message_types_by_name['RpbContent'] = _RPBCONTENT -DESCRIPTOR.message_types_by_name['RpbLink'] = _RPBLINK -DESCRIPTOR.message_types_by_name['RpbCounterUpdateReq'] = _RPBCOUNTERUPDATEREQ -DESCRIPTOR.message_types_by_name['RpbCounterUpdateResp'] = _RPBCOUNTERUPDATERESP -DESCRIPTOR.message_types_by_name['RpbCounterGetReq'] = _RPBCOUNTERGETREQ -DESCRIPTOR.message_types_by_name['RpbCounterGetResp'] = _RPBCOUNTERGETRESP -DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistReq'] = _RPBGETBUCKETKEYPREFLISTREQ -DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistResp'] = _RPBGETBUCKETKEYPREFLISTRESP -DESCRIPTOR.message_types_by_name['RpbBucketKeyPreflistItem'] = _RPBBUCKETKEYPREFLISTITEM - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetClientIdResp(_message.Message): - DESCRIPTOR = _RPBGETCLIENTIDRESP - - # @@protoc_insertion_point(class_scope:RpbGetClientIdResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSetClientIdReq(_message.Message): - DESCRIPTOR = _RPBSETCLIENTIDREQ - - # @@protoc_insertion_point(class_scope:RpbSetClientIdReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetReq(_message.Message): - DESCRIPTOR = _RPBGETREQ - - # @@protoc_insertion_point(class_scope:RpbGetReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetResp(_message.Message): - DESCRIPTOR = _RPBGETRESP - - # @@protoc_insertion_point(class_scope:RpbGetResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbPutReq(_message.Message): - DESCRIPTOR = _RPBPUTREQ - - # @@protoc_insertion_point(class_scope:RpbPutReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbPutResp(_message.Message): - DESCRIPTOR = _RPBPUTRESP - - # @@protoc_insertion_point(class_scope:RpbPutResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbDelReq(_message.Message): - DESCRIPTOR = _RPBDELREQ - - # @@protoc_insertion_point(class_scope:RpbDelReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListBucketsReq(_message.Message): - DESCRIPTOR = _RPBLISTBUCKETSREQ - - # @@protoc_insertion_point(class_scope:RpbListBucketsReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListBucketsResp(_message.Message): - DESCRIPTOR = _RPBLISTBUCKETSRESP - - # @@protoc_insertion_point(class_scope:RpbListBucketsResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListKeysReq(_message.Message): - DESCRIPTOR = _RPBLISTKEYSREQ - - # @@protoc_insertion_point(class_scope:RpbListKeysReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListKeysResp(_message.Message): - DESCRIPTOR = _RPBLISTKEYSRESP - - # @@protoc_insertion_point(class_scope:RpbListKeysResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbMapRedReq(_message.Message): - DESCRIPTOR = _RPBMAPREDREQ - - # @@protoc_insertion_point(class_scope:RpbMapRedReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbMapRedResp(_message.Message): - DESCRIPTOR = _RPBMAPREDRESP - - # @@protoc_insertion_point(class_scope:RpbMapRedResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbIndexReq(_message.Message): - DESCRIPTOR = _RPBINDEXREQ - - # @@protoc_insertion_point(class_scope:RpbIndexReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbIndexResp(_message.Message): - DESCRIPTOR = _RPBINDEXRESP - - # @@protoc_insertion_point(class_scope:RpbIndexResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCSBucketReq(_message.Message): - DESCRIPTOR = _RPBCSBUCKETREQ - - # @@protoc_insertion_point(class_scope:RpbCSBucketReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCSBucketResp(_message.Message): - DESCRIPTOR = _RPBCSBUCKETRESP - - # @@protoc_insertion_point(class_scope:RpbCSBucketResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbIndexObject(_message.Message): - DESCRIPTOR = _RPBINDEXOBJECT - - # @@protoc_insertion_point(class_scope:RpbIndexObject) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbContent(_message.Message): - DESCRIPTOR = _RPBCONTENT - - # @@protoc_insertion_point(class_scope:RpbContent) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbLink(_message.Message): - DESCRIPTOR = _RPBLINK - - # @@protoc_insertion_point(class_scope:RpbLink) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterUpdateReq(_message.Message): - DESCRIPTOR = _RPBCOUNTERUPDATEREQ - - # @@protoc_insertion_point(class_scope:RpbCounterUpdateReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterUpdateResp(_message.Message): - DESCRIPTOR = _RPBCOUNTERUPDATERESP - - # @@protoc_insertion_point(class_scope:RpbCounterUpdateResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterGetReq(_message.Message): - DESCRIPTOR = _RPBCOUNTERGETREQ - - # @@protoc_insertion_point(class_scope:RpbCounterGetReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterGetResp(_message.Message): - DESCRIPTOR = _RPBCOUNTERGETRESP - - # @@protoc_insertion_point(class_scope:RpbCounterGetResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketKeyPreflistReq(_message.Message): - DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTREQ - - # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketKeyPreflistResp(_message.Message): - DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTRESP - - # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbBucketKeyPreflistItem(_message.Message): - DESCRIPTOR = _RPBBUCKETKEYPREFLISTITEM - - # @@protoc_insertion_point(class_scope:RpbBucketKeyPreflistItem) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakKvPB') -# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_pb2.py b/riak/riak_pb/riak_pb2.py deleted file mode 100644 index a757940a..00000000 --- a/riak/riak_pb/riak_pb2.py +++ /dev/null @@ -1,786 +0,0 @@ -from six import * -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: riak.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='riak.proto', - package='', - serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') - - - -_RPBBUCKETPROPS_RPBREPLMODE = _descriptor.EnumDescriptor( - name='RpbReplMode', - full_name='RpbBucketProps.RpbReplMode', - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name='FALSE', index=0, number=0, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='REALTIME', index=1, number=1, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='FULLSYNC', index=2, number=2, - options=None, - type=None), - _descriptor.EnumValueDescriptor( - name='TRUE', index=3, number=3, - options=None, - type=None), - ], - containing_type=None, - options=None, - serialized_start=1236, - serialized_end=1298, -) - - -_RPBERRORRESP = _descriptor.Descriptor( - name='RpbErrorResp', - full_name='RpbErrorResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='errmsg', full_name='RpbErrorResp.errmsg', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='errcode', full_name='RpbErrorResp.errcode', index=1, - number=2, type=13, cpp_type=3, label=2, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=14, - serialized_end=61, -) - - -_RPBGETSERVERINFORESP = _descriptor.Descriptor( - name='RpbGetServerInfoResp', - full_name='RpbGetServerInfoResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='node', full_name='RpbGetServerInfoResp.node', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='server_version', full_name='RpbGetServerInfoResp.server_version', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=63, - serialized_end=123, -) - - -_RPBPAIR = _descriptor.Descriptor( - name='RpbPair', - full_name='RpbPair', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='key', full_name='RpbPair.key', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='value', full_name='RpbPair.value', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=125, - serialized_end=162, -) - - -_RPBGETBUCKETREQ = _descriptor.Descriptor( - name='RpbGetBucketReq', - full_name='RpbGetBucketReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbGetBucketReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbGetBucketReq.type', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=164, - serialized_end=211, -) - - -_RPBGETBUCKETRESP = _descriptor.Descriptor( - name='RpbGetBucketResp', - full_name='RpbGetBucketResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='props', full_name='RpbGetBucketResp.props', index=0, - number=1, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=213, - serialized_end=263, -) - - -_RPBSETBUCKETREQ = _descriptor.Descriptor( - name='RpbSetBucketReq', - full_name='RpbSetBucketReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbSetBucketReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='props', full_name='RpbSetBucketReq.props', index=1, - number=2, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbSetBucketReq.type', index=2, - number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=265, - serialized_end=344, -) - - -_RPBRESETBUCKETREQ = _descriptor.Descriptor( - name='RpbResetBucketReq', - full_name='RpbResetBucketReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='bucket', full_name='RpbResetBucketReq.bucket', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='type', full_name='RpbResetBucketReq.type', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=346, - serialized_end=395, -) - - -_RPBGETBUCKETTYPEREQ = _descriptor.Descriptor( - name='RpbGetBucketTypeReq', - full_name='RpbGetBucketTypeReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='type', full_name='RpbGetBucketTypeReq.type', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=397, - serialized_end=432, -) - - -_RPBSETBUCKETTYPEREQ = _descriptor.Descriptor( - name='RpbSetBucketTypeReq', - full_name='RpbSetBucketTypeReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='type', full_name='RpbSetBucketTypeReq.type', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='props', full_name='RpbSetBucketTypeReq.props', index=1, - number=2, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=434, - serialized_end=501, -) - - -_RPBMODFUN = _descriptor.Descriptor( - name='RpbModFun', - full_name='RpbModFun', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='module', full_name='RpbModFun.module', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='function', full_name='RpbModFun.function', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=503, - serialized_end=548, -) - - -_RPBCOMMITHOOK = _descriptor.Descriptor( - name='RpbCommitHook', - full_name='RpbCommitHook', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='modfun', full_name='RpbCommitHook.modfun', index=0, - number=1, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='name', full_name='RpbCommitHook.name', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=550, - serialized_end=607, -) - - -_RPBBUCKETPROPS = _descriptor.Descriptor( - name='RpbBucketProps', - full_name='RpbBucketProps', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='n_val', full_name='RpbBucketProps.n_val', index=0, - number=1, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='allow_mult', full_name='RpbBucketProps.allow_mult', index=1, - number=2, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='last_write_wins', full_name='RpbBucketProps.last_write_wins', index=2, - number=3, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='precommit', full_name='RpbBucketProps.precommit', index=3, - number=4, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='has_precommit', full_name='RpbBucketProps.has_precommit', index=4, - number=5, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='postcommit', full_name='RpbBucketProps.postcommit', index=5, - number=6, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='has_postcommit', full_name='RpbBucketProps.has_postcommit', index=6, - number=7, type=8, cpp_type=7, label=1, - has_default_value=True, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='chash_keyfun', full_name='RpbBucketProps.chash_keyfun', index=7, - number=8, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='linkfun', full_name='RpbBucketProps.linkfun', index=8, - number=9, type=11, cpp_type=10, label=1, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='old_vclock', full_name='RpbBucketProps.old_vclock', index=9, - number=10, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='young_vclock', full_name='RpbBucketProps.young_vclock', index=10, - number=11, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='big_vclock', full_name='RpbBucketProps.big_vclock', index=11, - number=12, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='small_vclock', full_name='RpbBucketProps.small_vclock', index=12, - number=13, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pr', full_name='RpbBucketProps.pr', index=13, - number=14, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='r', full_name='RpbBucketProps.r', index=14, - number=15, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='w', full_name='RpbBucketProps.w', index=15, - number=16, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='pw', full_name='RpbBucketProps.pw', index=16, - number=17, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='dw', full_name='RpbBucketProps.dw', index=17, - number=18, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='rw', full_name='RpbBucketProps.rw', index=18, - number=19, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='basic_quorum', full_name='RpbBucketProps.basic_quorum', index=19, - number=20, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='notfound_ok', full_name='RpbBucketProps.notfound_ok', index=20, - number=21, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='backend', full_name='RpbBucketProps.backend', index=21, - number=22, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='search', full_name='RpbBucketProps.search', index=22, - number=23, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='repl', full_name='RpbBucketProps.repl', index=23, - number=24, type=14, cpp_type=8, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='search_index', full_name='RpbBucketProps.search_index', index=24, - number=25, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='datatype', full_name='RpbBucketProps.datatype', index=25, - number=26, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='consistent', full_name='RpbBucketProps.consistent', index=26, - number=27, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='write_once', full_name='RpbBucketProps.write_once', index=27, - number=28, type=8, cpp_type=7, label=1, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - _RPBBUCKETPROPS_RPBREPLMODE, - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=610, - serialized_end=1298, -) - - -_RPBAUTHREQ = _descriptor.Descriptor( - name='RpbAuthReq', - full_name='RpbAuthReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='user', full_name='RpbAuthReq.user', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='password', full_name='RpbAuthReq.password', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1300, - serialized_end=1344, -) - -_RPBGETBUCKETRESP.fields_by_name['props'].message_type = _RPBBUCKETPROPS -_RPBSETBUCKETREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS -_RPBSETBUCKETTYPEREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS -_RPBCOMMITHOOK.fields_by_name['modfun'].message_type = _RPBMODFUN -_RPBBUCKETPROPS.fields_by_name['precommit'].message_type = _RPBCOMMITHOOK -_RPBBUCKETPROPS.fields_by_name['postcommit'].message_type = _RPBCOMMITHOOK -_RPBBUCKETPROPS.fields_by_name['chash_keyfun'].message_type = _RPBMODFUN -_RPBBUCKETPROPS.fields_by_name['linkfun'].message_type = _RPBMODFUN -_RPBBUCKETPROPS.fields_by_name['repl'].enum_type = _RPBBUCKETPROPS_RPBREPLMODE -_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS; -DESCRIPTOR.message_types_by_name['RpbErrorResp'] = _RPBERRORRESP -DESCRIPTOR.message_types_by_name['RpbGetServerInfoResp'] = _RPBGETSERVERINFORESP -DESCRIPTOR.message_types_by_name['RpbPair'] = _RPBPAIR -DESCRIPTOR.message_types_by_name['RpbGetBucketReq'] = _RPBGETBUCKETREQ -DESCRIPTOR.message_types_by_name['RpbGetBucketResp'] = _RPBGETBUCKETRESP -DESCRIPTOR.message_types_by_name['RpbSetBucketReq'] = _RPBSETBUCKETREQ -DESCRIPTOR.message_types_by_name['RpbResetBucketReq'] = _RPBRESETBUCKETREQ -DESCRIPTOR.message_types_by_name['RpbGetBucketTypeReq'] = _RPBGETBUCKETTYPEREQ -DESCRIPTOR.message_types_by_name['RpbSetBucketTypeReq'] = _RPBSETBUCKETTYPEREQ -DESCRIPTOR.message_types_by_name['RpbModFun'] = _RPBMODFUN -DESCRIPTOR.message_types_by_name['RpbCommitHook'] = _RPBCOMMITHOOK -DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS -DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbErrorResp(_message.Message): - DESCRIPTOR = _RPBERRORRESP - - # @@protoc_insertion_point(class_scope:RpbErrorResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetServerInfoResp(_message.Message): - DESCRIPTOR = _RPBGETSERVERINFORESP - - # @@protoc_insertion_point(class_scope:RpbGetServerInfoResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbPair(_message.Message): - DESCRIPTOR = _RPBPAIR - - # @@protoc_insertion_point(class_scope:RpbPair) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketReq(_message.Message): - DESCRIPTOR = _RPBGETBUCKETREQ - - # @@protoc_insertion_point(class_scope:RpbGetBucketReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketResp(_message.Message): - DESCRIPTOR = _RPBGETBUCKETRESP - - # @@protoc_insertion_point(class_scope:RpbGetBucketResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSetBucketReq(_message.Message): - DESCRIPTOR = _RPBSETBUCKETREQ - - # @@protoc_insertion_point(class_scope:RpbSetBucketReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbResetBucketReq(_message.Message): - DESCRIPTOR = _RPBRESETBUCKETREQ - - # @@protoc_insertion_point(class_scope:RpbResetBucketReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketTypeReq(_message.Message): - DESCRIPTOR = _RPBGETBUCKETTYPEREQ - - # @@protoc_insertion_point(class_scope:RpbGetBucketTypeReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSetBucketTypeReq(_message.Message): - DESCRIPTOR = _RPBSETBUCKETTYPEREQ - - # @@protoc_insertion_point(class_scope:RpbSetBucketTypeReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbModFun(_message.Message): - DESCRIPTOR = _RPBMODFUN - - # @@protoc_insertion_point(class_scope:RpbModFun) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCommitHook(_message.Message): - DESCRIPTOR = _RPBCOMMITHOOK - - # @@protoc_insertion_point(class_scope:RpbCommitHook) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbBucketProps(_message.Message): - DESCRIPTOR = _RPBBUCKETPROPS - - # @@protoc_insertion_point(class_scope:RpbBucketProps) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbAuthReq(_message.Message): - DESCRIPTOR = _RPBAUTHREQ - - # @@protoc_insertion_point(class_scope:RpbAuthReq) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\006RiakPB') -# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_search_pb2.py b/riak/riak_pb/riak_search_pb2.py deleted file mode 100644 index 1608f575..00000000 --- a/riak/riak_pb/riak_search_pb2.py +++ /dev/null @@ -1,210 +0,0 @@ -from six import * -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: riak_search.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - - -import riak.riak_pb.riak_pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='riak_search.proto', - package='', - serialized_pb='\n\x11riak_search.proto\x1a\nriak.proto\"(\n\x0cRpbSearchDoc\x12\x18\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x08.RpbPair\"\x9d\x01\n\x11RpbSearchQueryReq\x12\t\n\x01q\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12\x0c\n\x04rows\x18\x03 \x01(\r\x12\r\n\x05start\x18\x04 \x01(\r\x12\x0c\n\x04sort\x18\x05 \x01(\x0c\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\x0c\x12\n\n\x02\x64\x66\x18\x07 \x01(\x0c\x12\n\n\x02op\x18\x08 \x01(\x0c\x12\n\n\x02\x66l\x18\t \x03(\x0c\x12\x0f\n\x07presort\x18\n \x01(\x0c\"W\n\x12RpbSearchQueryResp\x12\x1b\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\r.RpbSearchDoc\x12\x11\n\tmax_score\x18\x02 \x01(\x02\x12\x11\n\tnum_found\x18\x03 \x01(\rB\'\n\x17\x63om.basho.riak.protobufB\x0cRiakSearchPB') - - - - -_RPBSEARCHDOC = _descriptor.Descriptor( - name='RpbSearchDoc', - full_name='RpbSearchDoc', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='fields', full_name='RpbSearchDoc.fields', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=33, - serialized_end=73, -) - - -_RPBSEARCHQUERYREQ = _descriptor.Descriptor( - name='RpbSearchQueryReq', - full_name='RpbSearchQueryReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='q', full_name='RpbSearchQueryReq.q', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='index', full_name='RpbSearchQueryReq.index', index=1, - number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='rows', full_name='RpbSearchQueryReq.rows', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='start', full_name='RpbSearchQueryReq.start', index=3, - number=4, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='sort', full_name='RpbSearchQueryReq.sort', index=4, - number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='filter', full_name='RpbSearchQueryReq.filter', index=5, - number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='df', full_name='RpbSearchQueryReq.df', index=6, - number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='op', full_name='RpbSearchQueryReq.op', index=7, - number=8, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='fl', full_name='RpbSearchQueryReq.fl', index=8, - number=9, type=12, cpp_type=9, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='presort', full_name='RpbSearchQueryReq.presort', index=9, - number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=76, - serialized_end=233, -) - - -_RPBSEARCHQUERYRESP = _descriptor.Descriptor( - name='RpbSearchQueryResp', - full_name='RpbSearchQueryResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='docs', full_name='RpbSearchQueryResp.docs', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='max_score', full_name='RpbSearchQueryResp.max_score', index=1, - number=2, type=2, cpp_type=6, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='num_found', full_name='RpbSearchQueryResp.num_found', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=235, - serialized_end=322, -) - -_RPBSEARCHDOC.fields_by_name['fields'].message_type = riak.riak_pb.riak_pb2._RPBPAIR -_RPBSEARCHQUERYRESP.fields_by_name['docs'].message_type = _RPBSEARCHDOC -DESCRIPTOR.message_types_by_name['RpbSearchDoc'] = _RPBSEARCHDOC -DESCRIPTOR.message_types_by_name['RpbSearchQueryReq'] = _RPBSEARCHQUERYREQ -DESCRIPTOR.message_types_by_name['RpbSearchQueryResp'] = _RPBSEARCHQUERYRESP - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSearchDoc(_message.Message): - DESCRIPTOR = _RPBSEARCHDOC - - # @@protoc_insertion_point(class_scope:RpbSearchDoc) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSearchQueryReq(_message.Message): - DESCRIPTOR = _RPBSEARCHQUERYREQ - - # @@protoc_insertion_point(class_scope:RpbSearchQueryReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSearchQueryResp(_message.Message): - DESCRIPTOR = _RPBSEARCHQUERYRESP - - # @@protoc_insertion_point(class_scope:RpbSearchQueryResp) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\014RiakSearchPB') -# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_yokozuna_pb2.py b/riak/riak_pb/riak_yokozuna_pb2.py deleted file mode 100644 index 1673f538..00000000 --- a/riak/riak_pb/riak_yokozuna_pb2.py +++ /dev/null @@ -1,372 +0,0 @@ -from six import * -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: riak_yokozuna.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import descriptor_pb2 -# @@protoc_insertion_point(imports) - - - - -DESCRIPTOR = _descriptor.FileDescriptor( - name='riak_yokozuna.proto', - package='', - serialized_pb='\n\x13riak_yokozuna.proto\"?\n\x10RpbYokozunaIndex\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0e\n\x06schema\x18\x02 \x01(\x0c\x12\r\n\x05n_val\x18\x03 \x01(\r\"&\n\x16RpbYokozunaIndexGetReq\x12\x0c\n\x04name\x18\x01 \x01(\x0c\";\n\x17RpbYokozunaIndexGetResp\x12 \n\x05index\x18\x01 \x03(\x0b\x32\x11.RpbYokozunaIndex\"K\n\x16RpbYokozunaIndexPutReq\x12 \n\x05index\x18\x01 \x02(\x0b\x32\x11.RpbYokozunaIndex\x12\x0f\n\x07timeout\x18\x02 \x01(\r\")\n\x19RpbYokozunaIndexDeleteReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\"2\n\x11RpbYokozunaSchema\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"=\n\x17RpbYokozunaSchemaPutReq\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchema\"\'\n\x17RpbYokozunaSchemaGetReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\">\n\x18RpbYokozunaSchemaGetResp\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchemaB)\n\x17\x63om.basho.riak.protobufB\x0eRiakYokozunaPB') - - - - -_RPBYOKOZUNAINDEX = _descriptor.Descriptor( - name='RpbYokozunaIndex', - full_name='RpbYokozunaIndex', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='RpbYokozunaIndex.name', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='schema', full_name='RpbYokozunaIndex.schema', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='n_val', full_name='RpbYokozunaIndex.n_val', index=2, - number=3, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=23, - serialized_end=86, -) - - -_RPBYOKOZUNAINDEXGETREQ = _descriptor.Descriptor( - name='RpbYokozunaIndexGetReq', - full_name='RpbYokozunaIndexGetReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='RpbYokozunaIndexGetReq.name', index=0, - number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=88, - serialized_end=126, -) - - -_RPBYOKOZUNAINDEXGETRESP = _descriptor.Descriptor( - name='RpbYokozunaIndexGetResp', - full_name='RpbYokozunaIndexGetResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='index', full_name='RpbYokozunaIndexGetResp.index', index=0, - number=1, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=128, - serialized_end=187, -) - - -_RPBYOKOZUNAINDEXPUTREQ = _descriptor.Descriptor( - name='RpbYokozunaIndexPutReq', - full_name='RpbYokozunaIndexPutReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='index', full_name='RpbYokozunaIndexPutReq.index', index=0, - number=1, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='timeout', full_name='RpbYokozunaIndexPutReq.timeout', index=1, - number=2, type=13, cpp_type=3, label=1, - has_default_value=False, default_value=0, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=189, - serialized_end=264, -) - - -_RPBYOKOZUNAINDEXDELETEREQ = _descriptor.Descriptor( - name='RpbYokozunaIndexDeleteReq', - full_name='RpbYokozunaIndexDeleteReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='RpbYokozunaIndexDeleteReq.name', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=266, - serialized_end=307, -) - - -_RPBYOKOZUNASCHEMA = _descriptor.Descriptor( - name='RpbYokozunaSchema', - full_name='RpbYokozunaSchema', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='RpbYokozunaSchema.name', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='content', full_name='RpbYokozunaSchema.content', index=1, - number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=309, - serialized_end=359, -) - - -_RPBYOKOZUNASCHEMAPUTREQ = _descriptor.Descriptor( - name='RpbYokozunaSchemaPutReq', - full_name='RpbYokozunaSchemaPutReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='schema', full_name='RpbYokozunaSchemaPutReq.schema', index=0, - number=1, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=361, - serialized_end=422, -) - - -_RPBYOKOZUNASCHEMAGETREQ = _descriptor.Descriptor( - name='RpbYokozunaSchemaGetReq', - full_name='RpbYokozunaSchemaGetReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='name', full_name='RpbYokozunaSchemaGetReq.name', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=424, - serialized_end=463, -) - - -_RPBYOKOZUNASCHEMAGETRESP = _descriptor.Descriptor( - name='RpbYokozunaSchemaGetResp', - full_name='RpbYokozunaSchemaGetResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='schema', full_name='RpbYokozunaSchemaGetResp.schema', index=0, - number=1, type=11, cpp_type=10, label=2, - has_default_value=False, default_value=None, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=465, - serialized_end=527, -) - -_RPBYOKOZUNAINDEXGETRESP.fields_by_name['index'].message_type = _RPBYOKOZUNAINDEX -_RPBYOKOZUNAINDEXPUTREQ.fields_by_name['index'].message_type = _RPBYOKOZUNAINDEX -_RPBYOKOZUNASCHEMAPUTREQ.fields_by_name['schema'].message_type = _RPBYOKOZUNASCHEMA -_RPBYOKOZUNASCHEMAGETRESP.fields_by_name['schema'].message_type = _RPBYOKOZUNASCHEMA -DESCRIPTOR.message_types_by_name['RpbYokozunaIndex'] = _RPBYOKOZUNAINDEX -DESCRIPTOR.message_types_by_name['RpbYokozunaIndexGetReq'] = _RPBYOKOZUNAINDEXGETREQ -DESCRIPTOR.message_types_by_name['RpbYokozunaIndexGetResp'] = _RPBYOKOZUNAINDEXGETRESP -DESCRIPTOR.message_types_by_name['RpbYokozunaIndexPutReq'] = _RPBYOKOZUNAINDEXPUTREQ -DESCRIPTOR.message_types_by_name['RpbYokozunaIndexDeleteReq'] = _RPBYOKOZUNAINDEXDELETEREQ -DESCRIPTOR.message_types_by_name['RpbYokozunaSchema'] = _RPBYOKOZUNASCHEMA -DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaPutReq'] = _RPBYOKOZUNASCHEMAPUTREQ -DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetReq'] = _RPBYOKOZUNASCHEMAGETREQ -DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetResp'] = _RPBYOKOZUNASCHEMAGETRESP - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndex(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEX - - # @@protoc_insertion_point(class_scope:RpbYokozunaIndex) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexGetReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXGETREQ - - # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexGetResp(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXGETRESP - - # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetResp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexPutReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXPUTREQ - - # @@protoc_insertion_point(class_scope:RpbYokozunaIndexPutReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexDeleteReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXDELETEREQ - - # @@protoc_insertion_point(class_scope:RpbYokozunaIndexDeleteReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchema(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMA - - # @@protoc_insertion_point(class_scope:RpbYokozunaSchema) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchemaPutReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMAPUTREQ - - # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaPutReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchemaGetReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMAGETREQ - - # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchemaGetResp(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMAGETRESP - - # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetResp) - - -DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\016RiakYokozunaPB') -# @@protoc_insertion_point(module_scope) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index ec5f9a58..1afcc7ba 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,7 +1,7 @@ -import riak.riak_pb -import riak.riak_pb.riak_pb2 -import riak.riak_pb.riak_dt_pb2 -import riak.riak_pb.riak_kv_pb2 +import riak.pb +import riak.pb.riak_pb2 +import riak.pb.riak_dt_pb2 +import riak.pb.riak_kv_pb2 from riak import RiakError from riak.content import RiakContent @@ -19,10 +19,10 @@ def _invert(d): return out REPL_TO_PY = { - riak.riak_pb.riak_pb2.RpbBucketProps.FALSE: False, - riak.riak_pb.riak_pb2.RpbBucketProps.TRUE: True, - riak.riak_pb.riak_pb2.RpbBucketProps.REALTIME: 'realtime', - riak.riak_pb.riak_pb2.RpbBucketProps.FULLSYNC: 'fullsync' + riak.pb.riak_pb2.RpbBucketProps.FALSE: False, + riak.pb.riak_pb2.RpbBucketProps.TRUE: True, + riak.pb.riak_pb2.RpbBucketProps.REALTIME: 'realtime', + riak.pb.riak_pb2.RpbBucketProps.FULLSYNC: 'fullsync' } REPL_TO_PB = _invert(REPL_TO_PY) @@ -48,22 +48,22 @@ def _invert(d): QUORUM_PROPS = ['r', 'pr', 'w', 'pw', 'dw', 'rw'] MAP_FIELD_TYPES = { - riak.riak_pb.riak_dt_pb2.MapField.COUNTER: 'counter', - riak.riak_pb.riak_dt_pb2.MapField.SET: 'set', - riak.riak_pb.riak_dt_pb2.MapField.REGISTER: 'register', - riak.riak_pb.riak_dt_pb2.MapField.FLAG: 'flag', - riak.riak_pb.riak_dt_pb2.MapField.MAP: 'map', - 'counter': riak.riak_pb.riak_dt_pb2.MapField.COUNTER, - 'set': riak.riak_pb.riak_dt_pb2.MapField.SET, - 'register': riak.riak_pb.riak_dt_pb2.MapField.REGISTER, - 'flag': riak.riak_pb.riak_dt_pb2.MapField.FLAG, - 'map': riak.riak_pb.riak_dt_pb2.MapField.MAP + riak.pb.riak_dt_pb2.MapField.COUNTER: 'counter', + riak.pb.riak_dt_pb2.MapField.SET: 'set', + riak.pb.riak_dt_pb2.MapField.REGISTER: 'register', + riak.pb.riak_dt_pb2.MapField.FLAG: 'flag', + riak.pb.riak_dt_pb2.MapField.MAP: 'map', + 'counter': riak.pb.riak_dt_pb2.MapField.COUNTER, + 'set': riak.pb.riak_dt_pb2.MapField.SET, + 'register': riak.pb.riak_dt_pb2.MapField.REGISTER, + 'flag': riak.pb.riak_dt_pb2.MapField.FLAG, + 'map': riak.pb.riak_dt_pb2.MapField.MAP } DT_FETCH_TYPES = { - riak.riak_pb.riak_dt_pb2.DtFetchResp.COUNTER: 'counter', - riak.riak_pb.riak_dt_pb2.DtFetchResp.SET: 'set', - riak.riak_pb.riak_dt_pb2.DtFetchResp.MAP: 'map' + riak.pb.riak_dt_pb2.DtFetchResp.COUNTER: 'counter', + riak.pb.riak_dt_pb2.DtFetchResp.SET: 'set', + riak.pb.riak_dt_pb2.DtFetchResp.MAP: 'map' } @@ -73,7 +73,7 @@ class RiakPbcCodec(object): """ def __init__(self, **unused_args): - if riak.riak_pb is None: + if riak.pb is None: raise NotImplementedError("this transport is not available") super(RiakPbcCodec, self).__init__(**unused_args) @@ -131,7 +131,7 @@ def _decode_content(self, rpb_content, sibling): a RiakObject. :param rpb_content: a single RpbContent message - :type rpb_content: riak.riak_pb.riak_pb2.RpbContent + :type rpb_content: riak.pb.riak_pb2.RpbContent :param sibling: a RiakContent sibling container :type sibling: RiakContent :rtype: RiakContent @@ -176,7 +176,7 @@ def _encode_content(self, robj, rpb_content): :param robj: a RiakObject :type robj: RiakObject :param rpb_content: the protobuf message to fill - :type rpb_content: riak.riak_pb.riak_pb2.RpbContent + :type rpb_content: riak.pb.riak_pb2.RpbContent """ if robj.content_type: rpb_content.content_type = str_to_bytes(robj.content_type) @@ -218,7 +218,7 @@ def _decode_link(self, link): Decodes an RpbLink message into a tuple :param link: an RpbLink message - :type link: riak.riak_pb.riak_pb2.RpbLink + :type link: riak.pb.riak_pb2.RpbLink :rtype tuple """ @@ -258,7 +258,7 @@ def _encode_bucket_props(self, props, msg): :param props: bucket properties :type props: dict :param msg: the protobuf message to fill - :type msg: riak.riak_pb.riak_pb2.RpbSetBucketReq + :type msg: riak.pb.riak_pb2.RpbSetBucketReq """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: @@ -291,7 +291,7 @@ def _decode_bucket_props(self, msg): Decodes the protobuf bucket properties message into a dict. :param msg: the protobuf message to decode - :type msg: riak.riak_pb.riak_pb2.RpbBucketProps + :type msg: riak.pb.riak_pb2.RpbBucketProps :rtype dict """ props = {} @@ -321,7 +321,7 @@ def _decode_modfun(self, modfun): 'fun' keys. Used in bucket properties. :param modfun: the protobuf message to decode - :type modfun: riak.riak_pb.riak_pb2.RpbModFun + :type modfun: riak.pb.riak_pb2.RpbModFun :rtype dict """ return {'mod': bytes_to_str(modfun.module), @@ -335,11 +335,11 @@ def _encode_modfun(self, props, msg=None): :param props: the module/function pair :type props: dict :param msg: the protobuf message to fill - :type msg: riak.riak_pb.riak_pb2.RpbModFun - :rtype riak.riak_pb.riak_pb2.RpbModFun + :type msg: riak.pb.riak_pb2.RpbModFun + :rtype riak.pb.riak_pb2.RpbModFun """ if msg is None: - msg = riak.riak_pb.riak_pb2.RpbModFun() + msg = riak.pb.riak_pb2.RpbModFun() msg.module = str_to_bytes(props['mod']) msg.function = str_to_bytes(props['fun']) return msg @@ -374,7 +374,7 @@ def _decode_hook(self, hook): bucket properties. :param hook: the hook to decode - :type hook: riak.riak_pb.riak_pb2.RpbCommitHook + :type hook: riak.pb.riak_pb2.RpbCommitHook :rtype dict """ if hook.HasField('modfun'): @@ -390,8 +390,8 @@ def _encode_hook(self, hook, msg): :param hook: the hook to encode :type hook: dict :param msg: the protobuf message to fill - :type msg: riak.riak_pb.riak_pb2.RpbCommitHook - :rtype riak.riak_pb.riak_pb2.RpbCommitHook + :type msg: riak.pb.riak_pb2.RpbCommitHook + :rtype riak.pb.riak_pb2.RpbCommitHook """ if 'name' in hook: msg.name = str_to_bytes(hook['name']) @@ -424,18 +424,18 @@ def _encode_index_req(self, bucket, index, startkey, endkey=None, :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string - :rtype riak.riak_pb.riak_kv_pb2.RpbIndexReq + :rtype riak.pb.riak_kv_pb2.RpbIndexReq """ - req = riak.riak_pb.riak_kv_pb2.RpbIndexReq( + req = riak.pb.riak_kv_pb2.RpbIndexReq( bucket=str_to_bytes(bucket.name), index=str_to_bytes(index)) self._add_bucket_type(req, bucket.bucket_type) if endkey is not None: - req.qtype = riak.riak_pb.riak_kv_pb2.RpbIndexReq.range + req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.range req.range_min = str_to_bytes(str(startkey)) req.range_max = str_to_bytes(str(endkey)) else: - req.qtype = riak.riak_pb.riak_kv_pb2.RpbIndexReq.eq + req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.eq req.key = str_to_bytes(str(startkey)) if return_terms is not None: req.return_terms = return_terms @@ -457,7 +457,7 @@ def _decode_search_index(self, index): Fills an RpbYokozunaIndex message with the appropriate data. :param index: a yz index message - :type index: riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndex + :type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex :rtype dict """ result = {} @@ -611,9 +611,9 @@ def _encode_map_update(self, dtype, msg, op): msg.register_op = str_to_bytes(op[1]) elif dtype == 'flag': if op == 'enable': - msg.flag_op = riak.riak_pb.riak_dt_pb2.MapUpdate.ENABLE + msg.flag_op = riak.pb.riak_dt_pb2.MapUpdate.ENABLE else: - msg.flag_op = riak.riak_pb.riak_dt_pb2.MapUpdate.DISABLE + msg.flag_op = riak.pb.riak_dt_pb2.MapUpdate.DISABLE def _decode_preflist(self, item): """ @@ -621,7 +621,7 @@ def _decode_preflist(self, item): :param preflist: a bucket/key preflist :type preflist: list of - riak.riak_pb.riak_kv_pb2.RpbBucketKeyPreflistItem + riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem :rtype dict """ result = {'partition': item.partition, diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index f864dc1c..6d9e3b94 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -1,7 +1,7 @@ import socket import struct -import riak.riak_pb.riak_pb2 -import riak.riak_pb.messages +import riak.pb.riak_pb2 +import riak.pb.messages from riak.security import SecurityError, USE_STDLIB_SSL from riak import RiakError @@ -69,8 +69,8 @@ def _starttls(self): return True is Riak responds with a STARTTLS response, False otherwise """ msg_code, _ = self._non_connect_request( - riak.riak_pb.messages.MSG_CODE_START_TLS) - if msg_code == riak.riak_pb.messages.MSG_CODE_START_TLS: + riak.pb.messages.MSG_CODE_START_TLS) + if msg_code == riak.pb.messages.MSG_CODE_START_TLS: return True else: return False @@ -82,14 +82,14 @@ def _auth(self): Note: Riak will sleep for a short period of time upon a failed auth request/response to prevent denial of service attacks """ - req = riak.riak_pb.riak_pb2.RpbAuthReq() + req = riak.pb.riak_pb2.RpbAuthReq() req.user = str_to_bytes(self._client._credentials.username) req.password = str_to_bytes(self._client._credentials.password) msg_code, _ = self._non_connect_request( - riak.riak_pb.messages.MSG_CODE_AUTH_REQ, + riak.pb.messages.MSG_CODE_AUTH_REQ, req, - riak.riak_pb.messages.MSG_CODE_AUTH_RESP) - if msg_code == riak.riak_pb.messages.MSG_CODE_AUTH_RESP: + riak.pb.messages.MSG_CODE_AUTH_RESP) + if msg_code == riak.pb.messages.MSG_CODE_AUTH_RESP: return True else: return False @@ -154,10 +154,10 @@ def _ssl_handshake(self): def _recv_msg(self, expect=None): self._recv_pkt() msg_code, = struct.unpack("B", self._inbuf[:1]) - if msg_code is riak.riak_pb.messages.MSG_CODE_ERROR_RESP: + if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: err = self._parse_msg(msg_code, self._inbuf[1:]) raise RiakError(bytes_to_str(err.errmsg)) - elif msg_code in riak.riak_pb.messages.MESSAGE_CLASSES: + elif msg_code in riak.pb.messages.MESSAGE_CLASSES: msg = self._parse_msg(msg_code, self._inbuf[1:]) else: raise Exception("unknown msg code %s" % msg_code) @@ -214,7 +214,7 @@ def close(self): def _parse_msg(self, code, packet): try: - pbclass = riak.riak_pb.messages.MESSAGE_CLASSES[code] + pbclass = riak.pb.messages.MESSAGE_CLASSES[code] except KeyError: pbclass = None diff --git a/riak/transports/pbc/stream.py b/riak/transports/pbc/stream.py index 5cb71cbb..ca645e17 100644 --- a/riak/transports/pbc/stream.py +++ b/riak/transports/pbc/stream.py @@ -1,5 +1,5 @@ import json -import riak.riak_pb.messages +import riak.pb.messages from riak.util import decode_index_value, bytes_to_str from riak.client.index_page import CONTINUATION from six import PY2 @@ -65,7 +65,7 @@ class RiakPbcKeyStream(RiakPbcStream): Used internally by RiakPbcTransport to implement key-list streams. """ - _expect = riak.riak_pb.messages.MSG_CODE_LIST_KEYS_RESP + _expect = riak.pb.messages.MSG_CODE_LIST_KEYS_RESP def next(self): response = super(RiakPbcKeyStream, self).next() @@ -86,7 +86,7 @@ class RiakPbcMapredStream(RiakPbcStream): streams. """ - _expect = riak.riak_pb.messages.MSG_CODE_MAP_RED_RESP + _expect = riak.pb.messages.MSG_CODE_MAP_RED_RESP def next(self): response = super(RiakPbcMapredStream, self).next() @@ -106,7 +106,7 @@ class RiakPbcBucketStream(RiakPbcStream): Used internally by RiakPbcTransport to implement key-list streams. """ - _expect = riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_RESP + _expect = riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP def next(self): response = super(RiakPbcBucketStream, self).next() @@ -127,7 +127,7 @@ class RiakPbcIndexStream(RiakPbcStream): streams. """ - _expect = riak.riak_pb.messages.MSG_CODE_INDEX_RESP + _expect = riak.pb.messages.MSG_CODE_INDEX_RESP def __init__(self, transport, index, return_terms=False): super(RiakPbcIndexStream, self).__init__(transport) diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index a158a284..64086883 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -1,6 +1,6 @@ -import riak.riak_pb.messages -import riak.riak_pb.riak_pb2 -import riak.riak_pb.riak_kv_pb2 +import riak.pb.messages +import riak.pb.riak_pb2 +import riak.pb.riak_kv_pb2 from riak import RiakError from riak.transports.transport import RiakTransport @@ -46,8 +46,8 @@ def ping(self): Ping the remote server """ - msg_code, msg = self._request(riak.riak_pb.messages.MSG_CODE_PING_REQ) - if msg_code == riak.riak_pb.messages.MSG_CODE_PING_RESP: + msg_code, msg = self._request(riak.pb.messages.MSG_CODE_PING_REQ) + if msg_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: return False @@ -57,24 +57,24 @@ def get_server_info(self): Get information about the server """ msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, - expect=riak.riak_pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) + riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, + expect=riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) return {'node': bytes_to_str(resp.node), 'server_version': bytes_to_str(resp.server_version)} def _get_client_id(self): msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_GET_CLIENT_ID_REQ, - expect=riak.riak_pb.messages.MSG_CODE_GET_CLIENT_ID_RESP) + riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ, + expect=riak.pb.messages.MSG_CODE_GET_CLIENT_ID_RESP) return bytes_to_str(resp.client_id) def _set_client_id(self, client_id): - req = riak.riak_pb.riak_kv_pb2.RpbSetClientIdReq() + req = riak.pb.riak_kv_pb2.RpbSetClientIdReq() req.client_id = str_to_bytes(client_id) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_SET_CLIENT_ID_REQ, req, - riak.riak_pb.messages.MSG_CODE_SET_CLIENT_ID_RESP) + riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ, req, + riak.pb.messages.MSG_CODE_SET_CLIENT_ID_RESP) self._client_id = client_id @@ -88,7 +88,7 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, """ bucket = robj.bucket - req = riak.riak_pb.riak_kv_pb2.RpbGetReq() + req = riak.pb.riak_kv_pb2.RpbGetReq() if r: req.r = self._encode_quorum(r) if self.quorum_controls(): @@ -109,8 +109,8 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, req.key = str_to_bytes(robj.key) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_GET_REQ, req, - riak.riak_pb.messages.MSG_CODE_GET_RESP) + riak.pb.messages.MSG_CODE_GET_REQ, req, + riak.pb.messages.MSG_CODE_GET_RESP) if resp is not None: if resp.HasField('vclock'): @@ -129,7 +129,7 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): bucket = robj.bucket - req = riak.riak_pb.riak_kv_pb2.RpbPutReq() + req = riak.pb.riak_kv_pb2.RpbPutReq() if w: req.w = self._encode_quorum(w) if dw: @@ -155,8 +155,8 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, self._encode_content(robj, req.content) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_PUT_REQ, req, - riak.riak_pb.messages.MSG_CODE_PUT_RESP) + riak.pb.messages.MSG_CODE_PUT_REQ, req, + riak.pb.messages.MSG_CODE_PUT_RESP) if resp is not None: if resp.HasField('key'): @@ -172,7 +172,7 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): - req = riak.riak_pb.riak_kv_pb2.RpbDelReq() + req = riak.pb.riak_kv_pb2.RpbDelReq() if rw: req.rw = self._encode_quorum(rw) if r: @@ -202,8 +202,8 @@ def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, req.key = str_to_bytes(robj.key) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_DEL_REQ, req, - riak.riak_pb.messages.MSG_CODE_DEL_RESP) + riak.pb.messages.MSG_CODE_DEL_REQ, req, + riak.pb.messages.MSG_CODE_DEL_RESP) return self def get_keys(self, bucket, timeout=None): @@ -222,13 +222,13 @@ def stream_keys(self, bucket, timeout=None): Streams keys from a bucket, returning an iterator that yields lists of keys. """ - req = riak.riak_pb.riak_kv_pb2.RpbListKeysReq() + req = riak.pb.riak_kv_pb2.RpbListKeysReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) if self.client_timeouts() and timeout: req.timeout = timeout - self._send_msg(riak.riak_pb.messages.MSG_CODE_LIST_KEYS_REQ, req) + self._send_msg(riak.pb.messages.MSG_CODE_LIST_KEYS_REQ, req) return RiakPbcKeyStream(self) @@ -236,15 +236,15 @@ def get_buckets(self, bucket_type=None, timeout=None): """ Serialize bucket listing request and deserialize response """ - req = riak.riak_pb.riak_kv_pb2.RpbListBucketsReq() + req = riak.pb.riak_kv_pb2.RpbListBucketsReq() self._add_bucket_type(req, bucket_type) if self.client_timeouts() and timeout: req.timeout = timeout msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req, - riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_RESP) + riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req, + riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP) return resp.buckets def stream_buckets(self, bucket_type=None, timeout=None): @@ -256,7 +256,7 @@ def stream_buckets(self, bucket_type=None, timeout=None): raise NotImplementedError('Streaming list-buckets is not ' 'supported') - req = riak.riak_pb.riak_kv_pb2.RpbListBucketsReq() + req = riak.pb.riak_kv_pb2.RpbListBucketsReq() req.stream = True self._add_bucket_type(req, bucket_type) # Bucket streaming landed in the same release as timeouts, so @@ -264,7 +264,7 @@ def stream_buckets(self, bucket_type=None, timeout=None): if timeout: req.timeout = timeout - self._send_msg(riak.riak_pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req) + self._send_msg(riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req) return RiakPbcBucketStream(self) @@ -272,13 +272,13 @@ def get_bucket_props(self, bucket): """ Serialize bucket property request and deserialize response """ - req = riak.riak_pb.riak_pb2.RpbGetBucketReq() + req = riak.pb.riak_pb2.RpbGetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_GET_BUCKET_REQ, req, - riak.riak_pb.messages.MSG_CODE_GET_BUCKET_RESP) + riak.pb.messages.MSG_CODE_GET_BUCKET_REQ, req, + riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) return self._decode_bucket_props(resp.props) @@ -286,7 +286,7 @@ def set_bucket_props(self, bucket, props): """ Serialize set bucket property request and deserialize response """ - req = riak.riak_pb.riak_pb2.RpbSetBucketReq() + req = riak.pb.riak_pb2.RpbSetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) @@ -299,8 +299,8 @@ def set_bucket_props(self, bucket, props): self._encode_bucket_props(props, req) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_SET_BUCKET_REQ, req, - riak.riak_pb.messages.MSG_CODE_SET_BUCKET_RESP) + riak.pb.messages.MSG_CODE_SET_BUCKET_REQ, req, + riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) return True def clear_bucket_props(self, bucket): @@ -310,12 +310,12 @@ def clear_bucket_props(self, bucket): if not self.pb_clear_bucket_props(): return False - req = riak.riak_pb.riak_pb2.RpbResetBucketReq() + req = riak.pb.riak_pb2.RpbResetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) self._request( - riak.riak_pb.messages.MSG_CODE_RESET_BUCKET_REQ, req, - riak.riak_pb.messages.MSG_CODE_RESET_BUCKET_RESP) + riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ, req, + riak.pb.messages.MSG_CODE_RESET_BUCKET_RESP) return True def get_bucket_type_props(self, bucket_type): @@ -324,12 +324,12 @@ def get_bucket_type_props(self, bucket_type): """ self._check_bucket_types(bucket_type) - req = riak.riak_pb.riak_pb2.RpbGetBucketTypeReq() + req = riak.pb.riak_pb2.RpbGetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ, req, - riak.riak_pb.messages.MSG_CODE_GET_BUCKET_RESP) + riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ, req, + riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) return self._decode_bucket_props(resp.props) @@ -339,14 +339,14 @@ def set_bucket_type_props(self, bucket_type, props): """ self._check_bucket_types(bucket_type) - req = riak.riak_pb.riak_pb2.RpbSetBucketTypeReq() + req = riak.pb.riak_pb2.RpbSetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) self._encode_bucket_props(props, req) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ, req, - riak.riak_pb.messages.MSG_CODE_SET_BUCKET_RESP) + riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ, req, + riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) return True @@ -373,11 +373,11 @@ def stream_mapred(self, inputs, query, timeout=None): # Construct the job, optionally set the timeout... content = self._construct_mapred_json(inputs, query, timeout) - req = riak.riak_pb.riak_kv_pb2.RpbMapRedReq() + req = riak.pb.riak_kv_pb2.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") - self._send_msg(riak.riak_pb.messages.MSG_CODE_MAP_RED_REQ, req) + self._send_msg(riak.pb.messages.MSG_CODE_MAP_RED_REQ, req) return RiakPbcMapredStream(self) @@ -396,8 +396,8 @@ def get_index(self, bucket, index, startkey, endkey=None, timeout, term_regex) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_INDEX_REQ, req, - riak.riak_pb.messages.MSG_CODE_INDEX_RESP) + riak.pb.messages.MSG_CODE_INDEX_REQ, req, + riak.pb.messages.MSG_CODE_INDEX_RESP) if return_terms and resp.results: results = [(decode_index_value(index, pair.key), @@ -429,7 +429,7 @@ def stream_index(self, bucket, index, startkey, endkey=None, timeout, term_regex) req.stream = True - self._send_msg(riak.riak_pb.messages.MSG_CODE_INDEX_REQ, req) + self._send_msg(riak.pb.messages.MSG_CODE_INDEX_REQ, req) return RiakPbcIndexStream(self, index, return_terms) @@ -439,18 +439,18 @@ def create_search_index(self, index, schema=None, n_val=None, raise NotImplementedError("Search 2.0 administration is not " "supported for this version") index = str_to_bytes(index) - idx = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) + idx = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) if schema: idx.schema = str_to_bytes(schema) if n_val: idx.n_val = n_val - req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) if timeout is not None: req.timeout = timeout self._request( - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, - riak.riak_pb.messages.MSG_CODE_PUT_RESP) + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, + riak.pb.messages.MSG_CODE_PUT_RESP) return True @@ -458,12 +458,12 @@ def get_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( name=str_to_bytes(index)) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) if len(resp.index) > 0: return self._decode_search_index(resp.index[0]) else: @@ -473,11 +473,11 @@ def list_search_indexes(self): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) return [self._decode_search_index(index) for index in resp.index] @@ -485,12 +485,12 @@ def delete_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( name=str_to_bytes(index)) self._request( - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, req, - riak.riak_pb.messages.MSG_CODE_DEL_RESP) + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, req, + riak.pb.messages.MSG_CODE_DEL_RESP) return True @@ -498,15 +498,15 @@ def create_search_schema(self, schema, content): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - scma = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchema( + scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( name=str_to_bytes(schema), content=str_to_bytes(content)) - req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( schema=scma) self._request( - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, req, - riak.riak_pb.messages.MSG_CODE_PUT_RESP) + riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, req, + riak.pb.messages.MSG_CODE_PUT_RESP) return True @@ -514,12 +514,12 @@ def get_search_schema(self, schema): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( name=str_to_bytes(schema)) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req, - riak.riak_pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) + riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) result = {} result['name'] = bytes_to_str(resp.schema.name) @@ -533,14 +533,14 @@ def search(self, index, query, **params): if PY2 and isinstance(query, unicode): # noqa query = query.encode('utf8') - req = riak.riak_pb.riak_search_pb2.RpbSearchQueryReq( + req = riak.pb.riak_search_pb2.RpbSearchQueryReq( index=str_to_bytes(index), q=str_to_bytes(query)) self._encode_search_query(req, params) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_SEARCH_QUERY_REQ, req, - riak.riak_pb.messages.MSG_CODE_SEARCH_QUERY_RESP) + riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ, req, + riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP) result = {} if resp.HasField('max_score'): @@ -559,7 +559,7 @@ def get_counter(self, bucket, key, **params): if not self.counters(): raise NotImplementedError("Counters are not supported") - req = riak.riak_pb.riak_kv_pb2.RpbCounterGetReq() + req = riak.pb.riak_kv_pb2.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) if params.get('r') is not None: @@ -572,8 +572,8 @@ def get_counter(self, bucket, key, **params): req.notfound_ok = params['notfound_ok'] msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_COUNTER_GET_REQ, req, - riak.riak_pb.messages.MSG_CODE_COUNTER_GET_RESP) + riak.pb.messages.MSG_CODE_COUNTER_GET_REQ, req, + riak.pb.messages.MSG_CODE_COUNTER_GET_RESP) if resp.HasField('value'): return resp.value else: @@ -588,7 +588,7 @@ def update_counter(self, bucket, key, value, **params): if not self.counters(): raise NotImplementedError("Counters are not supported") - req = riak.riak_pb.riak_kv_pb2.RpbCounterUpdateReq() + req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.amount = value @@ -602,8 +602,8 @@ def update_counter(self, bucket, key, value, **params): req.returnvalue = params['returnvalue'] msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, req, - riak.riak_pb.messages.MSG_CODE_COUNTER_UPDATE_RESP) + riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, req, + riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP) if resp.HasField('value'): return resp.value else: @@ -618,15 +618,15 @@ def fetch_datatype(self, bucket, key, **options): if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") - req = riak.riak_pb.riak_dt_pb2.DtFetchReq() + req = riak.pb.riak_dt_pb2.DtFetchReq() req.type = str_to_bytes(bucket.bucket_type.name) req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) self._encode_dt_options(req, options) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_DT_FETCH_REQ, req, - riak.riak_pb.messages.MSG_CODE_DT_FETCH_RESP) + riak.pb.messages.MSG_CODE_DT_FETCH_REQ, req, + riak.pb.messages.MSG_CODE_DT_FETCH_RESP) return self._decode_dt_fetch(resp) @@ -645,7 +645,7 @@ def update_datatype(self, datatype, **options): raise ValueError("No operation to send on datatype {!r}". format(datatype)) - req = riak.riak_pb.riak_dt_pb2.DtUpdateReq() + req = riak.pb.riak_dt_pb2.DtUpdateReq() req.bucket = str_to_bytes(datatype.bucket.name) req.type = str_to_bytes(datatype.bucket.bucket_type.name) @@ -659,8 +659,8 @@ def update_datatype(self, datatype, **options): self._encode_dt_op(type_name, req, op) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_DT_UPDATE_REQ, req, - riak.riak_pb.messages.MSG_CODE_DT_UPDATE_RESP) + riak.pb.messages.MSG_CODE_DT_UPDATE_REQ, req, + riak.pb.messages.MSG_CODE_DT_UPDATE_RESP) if resp.HasField('key'): datatype.key = resp.key[:] if resp.HasField('context'): @@ -681,13 +681,13 @@ def get_preflist(self, bucket, key): :type key: string :rtype: list of dicts """ - req = riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() + req = riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.type = str_to_bytes(bucket.bucket_type.name) msg_code, resp = self._request( - riak.riak_pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, req, - riak.riak_pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) + riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, req, + riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) return [self._decode_preflist(item) for item in resp.preflist] From a48cb919612e39da6929f94d4e725970b928dfd4 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 9 Dec 2015 09:42:56 -0800 Subject: [PATCH 046/324] Add ListKeys encoding unit test --- riak/tests/test_timeseries.py | 6 +++ riak/transports/pbc/codec.py | 97 ++++++++++++++++++----------------- 2 files changed, 57 insertions(+), 46 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 7baf310f..9a74b6d4 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -81,6 +81,12 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) + def test_encode_data_for_listkeys(self): + req = riak_pb.TsListKeysReq() + self.c._encode_timeseries_listkeysreq(self.table, req, 1234) + self.assertEqual(self.table.name, bytes_to_str(req.table)) + self.assertEqual(1234, req.timeout) + def test_decode_data_from_query(self): tqr = riak_pb.TsQueryResp() diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 98668345..e56adce7 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -663,24 +663,29 @@ def _encode_timeseries_keyreq(self, table, key, req): ts_cell = req.key.add() self._encode_to_ts_cell(cell, ts_cell) - def _encode_timeseries_put(self, tsobj, ts_put_req): + def _encode_timeseries_listkeysreq(self, table, req, timeout=None): + req.table = str_to_bytes(table.name) + if timeout is not None: + req.timeout = timeout + + def _encode_timeseries_put(self, tsobj, req): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. :param tsobj: a TsObject :type tsobj: TsObject - :param ts_put_req: the protobuf message to fill - :type ts_put_req: riak_pb.TsPutReq + :param req: the protobuf message to fill + :type req: riak_pb.TsPutReq """ - ts_put_req.table = str_to_bytes(tsobj.table.name) + req.table = str_to_bytes(tsobj.table.name) if tsobj.columns: raise NotImplementedError("columns are not implemented yet") if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: - tsr = ts_put_req.rows.add() # NB: type riak_pb.TsRow + tsr = req.rows.add() # NB: type riak_pb.TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: @@ -689,68 +694,68 @@ def _encode_timeseries_put(self, tsobj, ts_put_req): else: raise RiakError("TsObject requires a list of rows") - def _decode_timeseries(self, ts_rsp, tsobj): + def _decode_timeseries(self, resp, tsobj): """ Fills an TsObject with the appropriate data and metadata from a TsQueryResp. - :param ts_rsp: the protobuf message from which to process data - :type ts_rsp: riak_pb.TsQueryRsp or riak_pb.TsGetResp + :param resp: the protobuf message from which to process data + :type resp: riak_pb.TsQueryRsp or riak_pb.TsGetResp :param tsobj: a TsObject :type tsobj: TsObject """ if tsobj.columns is not None: - for ts_col in ts_rsp.columns: - col_name = bytes_to_str(ts_col.name) - col_type = ts_col.type + for col in resp.columns: + col_name = bytes_to_str(col.name) + col_type = col.type col = (col_name, col_type) logging.debug("column: '%s'", col) tsobj.columns.append(col) - for ts_row in ts_rsp.rows: - tsobj.rows.append(self._decode_timeseries_row(ts_row, - ts_rsp.columns)) + for row in resp.rows: + tsobj.rows.append( + self._decode_timeseries_row(row, resp.columns)) - def _decode_timeseries_row(self, ts_row, ts_columns): + def _decode_timeseries_row(self, tsrow, tscols): """ Decodes a TsRow into a list - :param ts_row: the protobuf TsRow to decode. - :type ts_row: riak_pb.TsRow - :param ts_columns: the protobuf TsColumn data to help decode. - :type ts_columns: list + :param tsrow: the protobuf TsRow to decode. + :type tsrow: riak_pb.TsRow + :param tscols: the protobuf TsColumn data to help decode. + :type tscols: list :rtype list """ row = [] - for i, ts_cell in enumerate(ts_row.cells): - ts_col = ts_columns[i] - logging.debug("ts_cell: '%s', ts_col: '%d'", ts_cell, ts_col.type) - if ts_col.type == riak_pb.TsColumnType.Value('VARCHAR')\ - and ts_cell.HasField('varchar_value'): - logging.debug("ts_cell.varchar_value: '%s'", - ts_cell.varchar_value) - row.append(ts_cell.varchar_value) - elif ts_col.type == riak_pb.TsColumnType.Value('SINT64')\ - and ts_cell.HasField('sint64_value'): - logging.debug("ts_cell.sint64_value: '%s'", - ts_cell.sint64_value) - row.append(ts_cell.sint64_value) - elif ts_col.type == riak_pb.TsColumnType.Value('DOUBLE')\ - and ts_cell.HasField('double_value'): - logging.debug("ts_cell.double_value: '%d'", - ts_cell.double_value) - row.append(ts_cell.double_value) - elif ts_col.type == riak_pb.TsColumnType.Value('TIMESTAMP')\ - and ts_cell.HasField('timestamp_value'): + for i, cell in enumerate(tsrow.cells): + col = tscols[i] + logging.debug("cell: '%s', col: '%d'", cell, col.type) + if col.type == riak_pb.TsColumnType.Value('VARCHAR')\ + and cell.HasField('varchar_value'): + logging.debug("cell.varchar_value: '%s'", + cell.varchar_value) + row.append(cell.varchar_value) + elif col.type == riak_pb.TsColumnType.Value('SINT64')\ + and cell.HasField('sint64_value'): + logging.debug("cell.sint64_value: '%s'", + cell.sint64_value) + row.append(cell.sint64_value) + elif col.type == riak_pb.TsColumnType.Value('DOUBLE')\ + and cell.HasField('double_value'): + logging.debug("cell.double_value: '%d'", + cell.double_value) + row.append(cell.double_value) + elif col.type == riak_pb.TsColumnType.Value('TIMESTAMP')\ + and cell.HasField('timestamp_value'): dt = self._datetime_from_unix_time_millis( - ts_cell.timestamp_value) - logging.debug("ts_cell datetime: '%s'", dt) + cell.timestamp_value) + logging.debug("cell datetime: '%s'", dt) row.append(dt) - elif ts_col.type == riak_pb.TsColumnType.Value('BOOLEAN')\ - and ts_cell.HasField('boolean_value'): - logging.debug("ts_cell.boolean_value: '%s'", - ts_cell.boolean_value) - row.append(ts_cell.boolean_value) + elif col.type == riak_pb.TsColumnType.Value('BOOLEAN')\ + and cell.HasField('boolean_value'): + logging.debug("cell.boolean_value: '%s'", + cell.boolean_value) + row.append(cell.boolean_value) else: row.append(None) return row From 33862c177171ae95318515f57eecffc5da377e21 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 9 Dec 2015 13:52:22 -0800 Subject: [PATCH 047/324] TS streaming list keys done and with integration test --- riak/client/operations.py | 42 +++++++++++++++++++ riak/table.py | 23 ++++++----- riak/tests/test_timeseries.py | 25 +++++++++-- riak/transports/pbc/codec.py | 71 ++++++++++++++------------------ riak/transports/pbc/stream.py | 49 ++++++++++++---------- riak/transports/pbc/transport.py | 19 ++++++++- riak/transports/transport.py | 6 +++ 7 files changed, 161 insertions(+), 74 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index 0d0c7848..064c1d16 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -615,6 +615,48 @@ def ts_query(self, transport, table, query, interpolations=None): t = Table(self, table) return transport.ts_query(t, query, interpolations) + def ts_stream_keys(self, table, timeout=None): + """ + Lists all keys in a time series table via a stream. This is a + generator method which should be iterated over. + + The caller should explicitly close the returned iterator, + either using :func:`contextlib.closing` or calling ``close()`` + explicitly. Consuming the entire iterator will also close the + stream. If it does not, the associated connection might + not be returned to the pool. Example:: + + from contextlib import closing + + # Using contextlib.closing + with closing(client.ts_stream_keys(mytable)) as keys: + for key_list in keys: + do_something(key_list) + + # Explicit close() + stream = client.ts_stream_keys(mytable) + for key_list in stream: + do_something(key_list) + stream.close() + + :param table: the table from which to stream keys + :type table: Table + :param timeout: a timeout value in milliseconds + :type timeout: int + :rtype: iterator + """ + _validate_timeout(timeout) + resource = self._acquire() + transport = resource.object + stream = transport.ts_stream_keys(table, timeout) + stream.attach(resource) + try: + for keylist in stream: + if len(keylist) > 0: + yield keylist + finally: + stream.close() + @retryable def get(self, transport, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None): diff --git a/riak/table.py b/riak/table.py index 27312d66..c477a32b 100644 --- a/riak/table.py +++ b/riak/table.py @@ -16,7 +16,6 @@ def __init__(self, client, name): :param name: The table's name :type name: string """ - if not isinstance(name, string_types): raise TypeError('Table name must be a string') @@ -50,29 +49,25 @@ def new(self, rows, columns=None): return TsObject(self._client, self, rows, columns) - def get(self, table, key): + def get(self, key): """ Gets a value from a timeseries table. - :param table: The timeseries table. - :type table: string or :class:`Table ` :param key: The timeseries value's key. :type key: list :rtype: :class:`TsObject ` """ - return self.client.ts_get(self, table, key) + return self._client.ts_get(self, key) - def delete(self, table, key): + def delete(self, key): """ Deletes a value from a timeseries table. - :param table: The timeseries table. - :type table: string or :class:`Table ` :param key: The timeseries value's key. :type key: list or dict :rtype: boolean """ - return self.client.ts_delete(self, table, key) + return self._client.ts_delete(self, key) def query(self, query, interpolations=None): """ @@ -82,4 +77,12 @@ def query(self, query, interpolations=None): :type query: string :rtype: :class:`TsObject ` """ - return self.client.ts_query(self, query, interpolations) + return self._client.ts_query(self, query, interpolations) + + def stream_keys(self, timeout=None): + """ + Streams keys from a timeseries table. + + :rtype: list + """ + return self._client.ts_stream_keys(self, timeout) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 9a74b6d4..76f4bf22 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import datetime -import logging import os import platform import riak_pb @@ -249,7 +248,6 @@ def test_query_that_matches_more_data(self): table=table_name, t1=self.twentyMinsAgoMsec, t2=self.nowMsec) - logging.debug("all data query: %s", query) ts_obj = self.client.ts_query('GeoCheckin', query) j = 0 for i, want in enumerate(self.rows): @@ -257,7 +255,6 @@ def test_query_that_matches_more_data(self): continue got = ts_obj.rows[j] j += 1 - logging.debug("got: %s want: %s", got, want) self.assertListEqual(got, want) def test_get_with_invalid_key(self): @@ -271,6 +268,28 @@ def test_get_single_value(self): self.assertIsNotNone(ts_obj) self.validate_data(ts_obj) + def test_get_single_value_via_table(self): + key = ['hash1', 'user2', self.fiveMinsAgo] + table = Table(self.client, 'GeoCheckin') + ts_obj = table.get(key) + self.assertIsNotNone(ts_obj) + self.validate_data(ts_obj) + + def test_stream_keys(self): + table = Table(self.client, 'GeoCheckin') + streamed_keys = [] + for keylist in table.stream_keys(): + self.assertNotEqual([], keylist) + streamed_keys += keylist + for key in keylist: + self.assertIsInstance(key, list) + self.assertEqual(len(key), 3) + self.assertEqual('hash1', key[0]) + self.assertEqual('user2', key[1]) + # TODO RTS-367 ENABLE + # self.assertIsInstance(key[2], datetime.datetime) + self.assertEqual(len(streamed_keys), 5) + def test_delete_single_value(self): key = ['hash1', 'user2', self.twentyFiveMinsAgo] rslt = self.client.ts_delete('GeoCheckin', key) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index e56adce7..0a870ab7 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,5 +1,4 @@ import datetime -import logging import riak_pb from riak import RiakError @@ -628,23 +627,16 @@ def _encode_map_update(self, dtype, msg, op): def _encode_to_ts_cell(self, cell, ts_cell): if cell is not None: if isinstance(cell, bytes) or isinstance(cell, bytearray): - logging.debug("cell -> varchar_value: '%s'", cell) ts_cell.varchar_value = cell elif isinstance(cell, datetime.datetime): ts_cell.timestamp_value = self._unix_time_millis(cell) - logging.debug("cell -> timestamp: '%s', timestamp_value '%d'", - cell, ts_cell.timestamp_value) elif isinstance(cell, bool): - logging.debug("cell -> boolean: '%s'", cell) ts_cell.boolean_value = cell elif isinstance(cell, str): - logging.debug("cell -> str: '%s'", cell) ts_cell.varchar_value = str_to_bytes(cell) elif isinstance(cell, int) or isinstance(cell, long): # noqa - logging.debug("cell -> int/long: '%s'", cell) ts_cell.sint64_value = cell elif isinstance(cell, float): - logging.debug("cell -> double: '%s'", cell) ts_cell.double_value = cell else: t = type(cell) @@ -665,7 +657,7 @@ def _encode_timeseries_keyreq(self, table, key, req): def _encode_timeseries_listkeysreq(self, table, req, timeout=None): req.table = str_to_bytes(table.name) - if timeout is not None: + if timeout: req.timeout = timeout def _encode_timeseries_put(self, tsobj, req): @@ -709,14 +701,13 @@ def _decode_timeseries(self, resp, tsobj): col_name = bytes_to_str(col.name) col_type = col.type col = (col_name, col_type) - logging.debug("column: '%s'", col) tsobj.columns.append(col) for row in resp.rows: tsobj.rows.append( self._decode_timeseries_row(row, resp.columns)) - def _decode_timeseries_row(self, tsrow, tscols): + def _decode_timeseries_row(self, tsrow, tscols=None): """ Decodes a TsRow into a list @@ -728,34 +719,36 @@ def _decode_timeseries_row(self, tsrow, tscols): """ row = [] for i, cell in enumerate(tsrow.cells): - col = tscols[i] - logging.debug("cell: '%s', col: '%d'", cell, col.type) - if col.type == riak_pb.TsColumnType.Value('VARCHAR')\ - and cell.HasField('varchar_value'): - logging.debug("cell.varchar_value: '%s'", - cell.varchar_value) - row.append(cell.varchar_value) - elif col.type == riak_pb.TsColumnType.Value('SINT64')\ - and cell.HasField('sint64_value'): - logging.debug("cell.sint64_value: '%s'", - cell.sint64_value) - row.append(cell.sint64_value) - elif col.type == riak_pb.TsColumnType.Value('DOUBLE')\ - and cell.HasField('double_value'): - logging.debug("cell.double_value: '%d'", - cell.double_value) - row.append(cell.double_value) - elif col.type == riak_pb.TsColumnType.Value('TIMESTAMP')\ - and cell.HasField('timestamp_value'): - dt = self._datetime_from_unix_time_millis( - cell.timestamp_value) - logging.debug("cell datetime: '%s'", dt) - row.append(dt) - elif col.type == riak_pb.TsColumnType.Value('BOOLEAN')\ - and cell.HasField('boolean_value'): - logging.debug("cell.boolean_value: '%s'", - cell.boolean_value) - row.append(cell.boolean_value) + col = None + if tscols is not None: + col = tscols[i] + if cell.HasField('varchar_value'): + if col and col.type != riak_pb.TsColumnType.Value('VARCHAR'): + raise TypeError('expected VARCHAR column') + else: + row.append(cell.varchar_value) + elif cell.HasField('sint64_value'): + if col and col.type != riak_pb.TsColumnType.Value('SINT64'): + raise TypeError('expected SINT64 column') + else: + row.append(cell.sint64_value) + elif cell.HasField('double_value'): + if col and col.type != riak_pb.TsColumnType.Value('DOUBLE'): + raise TypeError('expected DOUBLE column') + else: + row.append(cell.double_value) + elif cell.HasField('timestamp_value'): + if col and col.type != riak_pb.TsColumnType.Value('TIMESTAMP'): + raise TypeError('expected TIMESTAMP column') + else: + dt = self._datetime_from_unix_time_millis( + cell.timestamp_value) + row.append(dt) + elif cell.HasField('boolean_value'): + if col and col.type != riak_pb.TsColumnType.Value('BOOLEAN'): + raise TypeError('expected BOOLEAN column') + else: + row.append(cell.boolean_value) else: row.append(None) return row diff --git a/riak/transports/pbc/stream.py b/riak/transports/pbc/stream.py index 88e7abac..6231d481 100644 --- a/riak/transports/pbc/stream.py +++ b/riak/transports/pbc/stream.py @@ -1,31 +1,14 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - - import json from riak_pb.messages import ( MSG_CODE_LIST_KEYS_RESP, MSG_CODE_MAP_RED_RESP, MSG_CODE_LIST_BUCKETS_RESP, - MSG_CODE_INDEX_RESP + MSG_CODE_INDEX_RESP, + MSG_CODE_TS_LIST_KEYS_RESP, ) from riak.util import decode_index_value, bytes_to_str from riak.client.index_page import CONTINUATION +from riak.transports.pbc.codec import RiakPbcCodec from six import PY2 @@ -55,7 +38,7 @@ def next(self): self.finished = True raise - if(self._is_done(resp)): + if self._is_done(resp): self.finished = True return resp @@ -181,3 +164,27 @@ def next(self): def __next__(self): # Python 3.x Version return self.next() + + +class RiakPbcTsKeyStream(RiakPbcStream, RiakPbcCodec): + """ + Used internally by RiakPbcTransport to implement key-list streams. + """ + + _expect = MSG_CODE_TS_LIST_KEYS_RESP + + def next(self): + response = super(RiakPbcTsKeyStream, self).next() + + if response.done and len(response.keys) is 0: + raise StopIteration + + keys = [] + for tsrow in response.keys: + keys.append(self._decode_timeseries_row(tsrow)) + + return keys + + def __next__(self): + # Python 3.x Version + return self.next() diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 4d642fce..5038310b 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -8,7 +8,8 @@ from riak.transports.pbc.stream import (RiakPbcKeyStream, RiakPbcMapredStream, RiakPbcBucketStream, - RiakPbcIndexStream) + RiakPbcIndexStream, + RiakPbcTsKeyStream) from riak.transports.pbc.codec import RiakPbcCodec from six import PY2, PY3 @@ -64,6 +65,7 @@ MSG_CODE_TS_PUT_RESP, MSG_CODE_TS_QUERY_REQ, MSG_CODE_TS_QUERY_RESP, + MSG_CODE_TS_LIST_KEYS_REQ, MSG_CODE_TS_GET_REQ, MSG_CODE_TS_GET_RESP, MSG_CODE_TS_DEL_REQ, @@ -267,6 +269,21 @@ def ts_query(self, table, query, interpolations=None): self._decode_timeseries(ts_query_resp, tsobj) return tsobj + def ts_stream_keys(self, table, timeout=None): + """ + Streams keys from a timeseries table, returning an iterator that + yields lists of keys. + """ + req = riak_pb.TsListKeysReq() + t = None + if self.client_timeouts() and timeout: + t = timeout + self._encode_timeseries_listkeysreq(table, req, t) + + self._send_msg(MSG_CODE_TS_LIST_KEYS_REQ, req) + + return RiakPbcTsKeyStream(self) + def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): req = riak_pb.RpbDelReq() diff --git a/riak/transports/transport.py b/riak/transports/transport.py index 5e2e5f3e..f9fcae6d 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -104,6 +104,12 @@ def ts_query(self, table, query, interpolations=None): """ raise NotImplementedError + def ts_stream_keys(self, table, timeout=None): + """ + Streams the list of keys for the table through an iterator. + """ + raise NotImplementedError + def get_buckets(self, bucket_type=None, timeout=None): """ Gets the list of buckets as strings. From e2266839776482e5c4633a8ba23c930aa489910a Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Thu, 10 Dec 2015 15:28:45 -0700 Subject: [PATCH 048/324] Tweak support for Python 3 by removing ability to have binary varchars due to Python 3's in limitation on binary encoded strings. Also tweak a few version compatibility features and clean up PEP8/pyflakes warnings. --- riak/client/operations.py | 6 +++--- riak/tests/test_timeseries.py | 15 ++++++++------- riak/transports/pbc/codec.py | 16 +++++++++------- riak/transports/pbc/transport.py | 2 +- 4 files changed, 21 insertions(+), 18 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index 064c1d16..aaecae7d 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -553,7 +553,7 @@ def ts_get(self, transport, table, key): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, str): + if isinstance(t, string_types): t = Table(self, table) return transport.ts_get(t, key) @@ -590,7 +590,7 @@ def ts_delete(self, transport, table, key): :rtype: boolean """ t = table - if isinstance(t, str): + if isinstance(t, string_types): t = Table(self, table) return transport.ts_delete(t, key) @@ -611,7 +611,7 @@ def ts_query(self, transport, table, query, interpolations=None): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, str): + if isinstance(t, string_types): t = Table(self, table) return transport.ts_query(t, query, interpolations) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 76f4bf22..8835e21d 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- import datetime -import os import platform import riak_pb @@ -19,8 +18,8 @@ table_name = 'GeoCheckin' -bd0 = os.urandom(16) -bd1 = os.urandom(16) +bd0 = '时间序列' +bd1 = 'временные ряды' fiveMins = datetime.timedelta(0, 300) ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) @@ -67,14 +66,16 @@ def test_encode_data_for_put(self): self.assertEqual(len(self.rows), len(ts_put_req.rows)) r0 = ts_put_req.rows[0] - self.assertEqual(r0.cells[0].varchar_value, self.rows[0][0]) + self.assertEqual(bytes_to_str(r0.cells[0].varchar_value), + self.rows[0][0]) self.assertEqual(r0.cells[1].sint64_value, self.rows[0][1]) self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) r1 = ts_put_req.rows[1] - self.assertEqual(r1.cells[0].varchar_value, self.rows[1][0]) + self.assertEqual(bytes_to_str(r1.cells[0].varchar_value), + self.rows[1][0]) self.assertEqual(r1.cells[1].sint64_value, self.rows[1][1]) self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) @@ -107,7 +108,7 @@ def test_decode_data_from_query(self): r0 = tqr.rows.add() r0c0 = r0.cells.add() - r0c0.varchar_value = self.rows[0][0] + r0c0.varchar_value = str_to_bytes(self.rows[0][0]) r0c1 = r0.cells.add() r0c1.sint64_value = self.rows[0][1] r0c2 = r0.cells.add() @@ -119,7 +120,7 @@ def test_decode_data_from_query(self): r1 = tqr.rows.add() r1c0 = r1.cells.add() - r1c0.varchar_value = self.rows[1][0] + r1c0.varchar_value = str_to_bytes(self.rows[1][0]) r1c1 = r1.cells.add() r1c1.sint64_value = self.rows[1][1] r1c2 = r1.cells.add() diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 0a870ab7..6ec030b7 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,5 +1,6 @@ -import datetime +import logging import riak_pb +import datetime from riak import RiakError from riak.content import RiakContent @@ -626,15 +627,16 @@ def _encode_map_update(self, dtype, msg, op): def _encode_to_ts_cell(self, cell, ts_cell): if cell is not None: - if isinstance(cell, bytes) or isinstance(cell, bytearray): - ts_cell.varchar_value = cell - elif isinstance(cell, datetime.datetime): + if isinstance(cell, datetime.datetime): ts_cell.timestamp_value = self._unix_time_millis(cell) elif isinstance(cell, bool): ts_cell.boolean_value = cell - elif isinstance(cell, str): + elif isinstance(cell, string_types): + logging.debug("cell -> str: '%s'", cell) ts_cell.varchar_value = str_to_bytes(cell) - elif isinstance(cell, int) or isinstance(cell, long): # noqa + elif (isinstance(cell, int) or + (PY2 and isinstance(cell, long))): # noqa + logging.debug("cell -> int/long: '%s'", cell) ts_cell.sint64_value = cell elif isinstance(cell, float): ts_cell.double_value = cell @@ -726,7 +728,7 @@ def _decode_timeseries_row(self, tsrow, tscols=None): if col and col.type != riak_pb.TsColumnType.Value('VARCHAR'): raise TypeError('expected VARCHAR column') else: - row.append(cell.varchar_value) + row.append(bytes_to_str(cell.varchar_value)) elif cell.HasField('sint64_value'): if col and col.type != riak_pb.TsColumnType.Value('SINT64'): raise TypeError('expected SINT64 column') diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 5038310b..517df987 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -260,7 +260,7 @@ def ts_delete(self, table, key): def ts_query(self, table, query, interpolations=None): req = riak_pb.TsQueryReq() - req.query.base = bytes_to_str(query) + req.query.base = str_to_bytes(query) msg_code, ts_query_resp = self._request(MSG_CODE_TS_QUERY_REQ, req, MSG_CODE_TS_QUERY_RESP) From 19454a2b052d6bf7417092e4d8bcaa02438cb816 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 10 Dec 2015 14:52:13 -0800 Subject: [PATCH 049/324] Python 2.6 changes --- buildbot/Makefile | 8 ++--- buildbot/tox_cleanup.sh | 13 +++++++ riak/tests/test_timeseries.py | 68 +++++++++++++++++------------------ setup.py | 2 +- tox.ini | 6 ++++ 5 files changed, 57 insertions(+), 40 deletions(-) create mode 100755 buildbot/tox_cleanup.sh diff --git a/buildbot/Makefile b/buildbot/Makefile index 890e3984..c57f5c9e 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -26,14 +26,14 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @../setup.py disable_security --riak-admin=${RIAK_ADMIN} - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=0 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=0 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=${RIAK_ADMIN} - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=0 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=0 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. # These are required to actually build all the Python versions: # * pip install tox diff --git a/buildbot/tox_cleanup.sh b/buildbot/tox_cleanup.sh new file mode 100755 index 00000000..bd5324c7 --- /dev/null +++ b/buildbot/tox_cleanup.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +for pbin in .tox/*/bin +do + echo $pbin + pip="$pbin/pip" + $pip uninstall riak_pb --yes + $pip uninstall riak --yes + $pip uninstall protobuf --yes + $pip uninstall python3-riak-pb --yes + $pip uninstall python3-protobuf --yes + echo ----- +done diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 8835e21d..7e15b7d9 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -25,6 +25,26 @@ ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) ts1 = ts0 + fiveMins +now = datetime.datetime.utcfromtimestamp(144379690) +fiveMinsAgo = now - fiveMins +tenMinsAgo = fiveMinsAgo - fiveMins +fifteenMinsAgo = tenMinsAgo - fiveMins +twentyMinsAgo = fifteenMinsAgo - fiveMins +twentyFiveMinsAgo = twentyMinsAgo - fiveMins + +codec = RiakPbcCodec() +nowMsec = codec._unix_time_millis(now) +tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) +twentyMinsAgoMsec = codec._unix_time_millis(twentyMinsAgo) + +rows = [ + ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], + ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], + ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], + ['hash1', 'user2', fiveMinsAgo, 'wind', None], + ['hash1', 'user2', now, 'snow', 20.1] +] + @unittest.skipUnless(RUN_TIMESERIES, 'RUN_TIMESERIES is 0') class TimeseriesUnitTests(unittest.TestCase): @@ -169,46 +189,23 @@ class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): super(TimeseriesTests, cls).setUpClass() - cls.now = datetime.datetime.utcfromtimestamp(144379690) - fiveMinsAgo = cls.now - fiveMins - tenMinsAgo = fiveMinsAgo - fiveMins - fifteenMinsAgo = tenMinsAgo - fiveMins - twentyMinsAgo = fifteenMinsAgo - fiveMins - twentyFiveMinsAgo = twentyMinsAgo - fiveMins client = cls.create_client() table = client.table(table_name) - rows = [ - ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], - ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], - ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], - ['hash1', 'user2', fiveMinsAgo, 'wind', None], - ['hash1', 'user2', cls.now, 'snow', 20.1] - ] ts_obj = table.new(rows) result = ts_obj.store() if not result: raise AssertionError("expected success") client.close() - codec = RiakPbcCodec() - cls.nowMsec = codec._unix_time_millis(cls.now) - cls.fiveMinsAgo = fiveMinsAgo - cls.twentyMinsAgo = twentyMinsAgo - cls.twentyFiveMinsAgo = twentyFiveMinsAgo - cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) - cls.twentyMinsAgoMsec = codec._unix_time_millis(twentyMinsAgo) - cls.numCols = len(rows[0]) - cls.rows = rows - def validate_data(self, ts_obj): if ts_obj.columns is not None: - self.assertEqual(len(ts_obj.columns), self.numCols) + self.assertEqual(len(ts_obj.columns), 5) self.assertEqual(len(ts_obj.rows), 1) row = ts_obj.rows[0] self.assertEqual(row[0], 'hash1') self.assertEqual(row[1], 'user2') - self.assertEqual(row[2], self.fiveMinsAgo) + self.assertEqual(row[2], fiveMinsAgo) self.assertEqual(row[3], 'wind') self.assertIsNone(row[4]) @@ -233,8 +230,8 @@ def test_query_that_matches_some_data(self): """ query = fmt.format( table=table_name, - t1=self.tenMinsAgoMsec, - t2=self.nowMsec) + t1=tenMinsAgoMsec, + t2=nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) @@ -247,12 +244,12 @@ def test_query_that_matches_more_data(self): """ query = fmt.format( table=table_name, - t1=self.twentyMinsAgoMsec, - t2=self.nowMsec) + t1=twentyMinsAgoMsec, + t2=nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) j = 0 - for i, want in enumerate(self.rows): - if want[2] == self.twentyFiveMinsAgo: + for i, want in enumerate(rows): + if want[2] == twentyFiveMinsAgo: continue got = ts_obj.rows[j] j += 1 @@ -264,13 +261,13 @@ def test_get_with_invalid_key(self): self.client.ts_get('GeoCheckin', key) def test_get_single_value(self): - key = ['hash1', 'user2', self.fiveMinsAgo] + key = ['hash1', 'user2', fiveMinsAgo] ts_obj = self.client.ts_get('GeoCheckin', key) self.assertIsNotNone(ts_obj) self.validate_data(ts_obj) def test_get_single_value_via_table(self): - key = ['hash1', 'user2', self.fiveMinsAgo] + key = ['hash1', 'user2', fiveMinsAgo] table = Table(self.client, 'GeoCheckin') ts_obj = table.get(key) self.assertIsNotNone(ts_obj) @@ -289,10 +286,11 @@ def test_stream_keys(self): self.assertEqual('user2', key[1]) # TODO RTS-367 ENABLE # self.assertIsInstance(key[2], datetime.datetime) - self.assertEqual(len(streamed_keys), 5) + keylen = len(streamed_keys) + self.assertTrue(keylen == 5 or keylen == 4) def test_delete_single_value(self): - key = ['hash1', 'user2', self.twentyFiveMinsAgo] + key = ['hash1', 'user2', twentyFiveMinsAgo] rslt = self.client.ts_delete('GeoCheckin', key) self.assertTrue(rslt) ts_obj = self.client.ts_get('GeoCheckin', key) diff --git a/setup.py b/setup.py index 3e2b84d1..d97f75b4 100755 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ os_env_pythonpath = os.environ.get('PYTHONPATH') if os_env_pythonpath is not None: for ppath in os_env_pythonpath.split(os.pathsep): - if ppath.find('riak_pb/python/lib') != -1: + if ppath.find('riak_pb/python/lib') != -1 or ppath.find('riak_pb/python3/lib') != -1: riak_pb_messages = os.path.join(ppath, 'riak_pb', 'messages.py') if os.path.exists(riak_pb_messages): riak_pb_in_pythonpath = True diff --git a/tox.ini b/tox.ini index 1bb27de4..03c15cbd 100644 --- a/tox.ini +++ b/tox.ini @@ -7,6 +7,12 @@ envlist = py26, py279, py27, py33, py34 [testenv] +basepython = + py26: python2.6 + py279: {env:HOME}/.pyenv/versions/riak-py279/bin/python2.7 + py27: python2.7 + py33: python3.3 + py34: python3.4 install_command = pip install --upgrade {packages} commands = {envpython} setup.py test deps = six From f4e111e31567de4ea3d7351c45da34b1b97669f4 Mon Sep 17 00:00:00 2001 From: Patrick Ellul Date: Fri, 11 Dec 2015 14:31:54 +1100 Subject: [PATCH 050/324] adding keepalive options to pbc transport connection socket --- riak/transports/pbc/connection.py | 8 ++++++++ riak/transports/pbc/transport.py | 4 ++++ 2 files changed, 12 insertions(+) diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 0bc58232..4d15387b 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -220,6 +220,12 @@ def _connect(self): self._timeout) else: self._socket = socket.create_connection(self._address) + if self._socket_keepalive: + self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + ka_opts = self._socket_keepalive_options or {} + for k, v in ka_opts.iteritems(): + self._socket.setsockopt(socket.SOL_TCP, k, v) + if self._client._credentials: self._init_security() @@ -247,3 +253,5 @@ def _parse_msg(self, code, packet): # These are set in the RiakPbcTransport initializer _address = None _timeout = None + _socket_keepalive = None + _socket_keepalive_options = None diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index e385c698..14ef2f75 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -93,6 +93,8 @@ def __init__(self, node=None, client=None, timeout=None, + socket_keepalive=False, + socket_keepalive_options=None, *unused_options): """ Construct a new RiakPbcTransport object. @@ -104,6 +106,8 @@ def __init__(self, self._address = (node.host, node.pb_port) self._timeout = timeout self._socket = None + self._socket_keepalive = socket_keepalive + self._socket_keepalive_options = socket_keepalive_options # FeatureDetection API def _server_version(self): From a48020d860b4225a9cf2d52e39e6c794c09839bf Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Fri, 11 Dec 2015 04:08:56 +0000 Subject: [PATCH 051/324] Update supported Python versions - Retire Python 2.6 - Swap 2.7.8 for 2.7.9 (PyOpenSSL version) - Add in Python 3.5.1 --- buildbot/Makefile | 13 +++++++++---- buildbot/tox_setup.sh | 20 ++++++++++---------- 2 files changed, 19 insertions(+), 14 deletions(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index c57f5c9e..4be4564d 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -26,14 +26,19 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @../setup.py disable_security --riak-admin=${RIAK_ADMIN} - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=0 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=0 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=${RIAK_ADMIN} - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=0 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=0 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + +test_timeseries: + @echo "Testing Riak Python Client (timeseries)" + @../setup.py disable_security --riak-admin=${RIAK_ADMIN} + @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=0 RUN_DATATYPES=0 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. # These are required to actually build all the Python versions: # * pip install tox diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 1dc3f72c..05a1fe49 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -47,19 +47,19 @@ if [[ -z $(pyenv versions | grep riak_3.3.6) ]]; then VERSION_ALIAS="riak_3.3.6" pyenv install 3.3.6 pyenv virtualenv riak_3.3.6 riak-py33 fi -if [[ -z $(pyenv versions | grep riak_2.7.10) ]]; then - VERSION_ALIAS="riak_2.7.10" pyenv install 2.7.10 - pyenv virtualenv riak_2.7.10 riak-py27 +if [[ -z $(pyenv versions | grep riak_3.5.1) ]]; then + VERSION_ALIAS="riak_3.5.1" pyenv install 3.5.1 + pyenv virtualenv riak_3.5.1 riak-py35 fi -if [[ -z $(pyenv versions | grep riak_2.7.9) ]]; then - VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 - pyenv virtualenv riak_2.7.9 riak-py279 +if [[ -z $(pyenv versions | grep riak_2.7.11) ]]; then + VERSION_ALIAS="riak_2.7.11" pyenv install 2.7.11 + pyenv virtualenv riak_2.7.11 riak-py27 fi -if [[ -z $(pyenv versions | grep riak_2.6.9) ]]; then - VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 - pyenv virtualenv riak_2.6.9 riak-py26 +if [[ -z $(pyenv versions | grep riak_2.7.8) ]]; then + VERSION_ALIAS="riak_2.7.8" pyenv install 2.7.8 + pyenv virtualenv riak_2.7.8 riak-py278 fi -pyenv global riak-py34 riak-py33 riak-py27 riak-py279 riak-py26 +pyenv global riak-py34 riak-py33 riak-py35 riak-py27 riak-py278 pyenv versions # Now install tox From 6ae0494e5583b525619dca40dacf5057f7ebfb6f Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Fri, 11 Dec 2015 04:10:43 +0000 Subject: [PATCH 052/324] Update supported Python versions - Retire Python 2.6 - Swap 2.7.8 for 2.7.9 (PyOpenSSL version) - Add in Python 3.5.1 --- setup.py | 2 +- tox.ini | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index d97f75b4..3e2b84d1 100755 --- a/setup.py +++ b/setup.py @@ -16,7 +16,7 @@ os_env_pythonpath = os.environ.get('PYTHONPATH') if os_env_pythonpath is not None: for ppath in os_env_pythonpath.split(os.pathsep): - if ppath.find('riak_pb/python/lib') != -1 or ppath.find('riak_pb/python3/lib') != -1: + if ppath.find('riak_pb/python/lib') != -1: riak_pb_messages = os.path.join(ppath, 'riak_pb', 'messages.py') if os.path.exists(riak_pb_messages): riak_pb_in_pythonpath = True diff --git a/tox.ini b/tox.ini index 03c15cbd..4950d459 100644 --- a/tox.ini +++ b/tox.ini @@ -4,15 +4,15 @@ # and then run "tox" from this directory. [tox] -envlist = py26, py279, py27, py33, py34 +envlist = py278, py27, py33, py34, py35 [testenv] basepython = - py26: python2.6 - py279: {env:HOME}/.pyenv/versions/riak-py279/bin/python2.7 + py278: {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 py27: python2.7 py33: python3.3 py34: python3.4 + py35: python3.5 install_command = pip install --upgrade {packages} commands = {envpython} setup.py test deps = six From fdec4412bf6b87adf95e0682bd90f309b9b6d698 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Fri, 11 Dec 2015 04:15:57 +0000 Subject: [PATCH 053/324] Revert to 2.7+ version since 2.6 has been retired --- riak/tests/test_timeseries.py | 68 ++++++++++++++++++----------------- 1 file changed, 35 insertions(+), 33 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 7e15b7d9..8835e21d 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -25,26 +25,6 @@ ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) ts1 = ts0 + fiveMins -now = datetime.datetime.utcfromtimestamp(144379690) -fiveMinsAgo = now - fiveMins -tenMinsAgo = fiveMinsAgo - fiveMins -fifteenMinsAgo = tenMinsAgo - fiveMins -twentyMinsAgo = fifteenMinsAgo - fiveMins -twentyFiveMinsAgo = twentyMinsAgo - fiveMins - -codec = RiakPbcCodec() -nowMsec = codec._unix_time_millis(now) -tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) -twentyMinsAgoMsec = codec._unix_time_millis(twentyMinsAgo) - -rows = [ - ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], - ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], - ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], - ['hash1', 'user2', fiveMinsAgo, 'wind', None], - ['hash1', 'user2', now, 'snow', 20.1] -] - @unittest.skipUnless(RUN_TIMESERIES, 'RUN_TIMESERIES is 0') class TimeseriesUnitTests(unittest.TestCase): @@ -189,23 +169,46 @@ class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): super(TimeseriesTests, cls).setUpClass() + cls.now = datetime.datetime.utcfromtimestamp(144379690) + fiveMinsAgo = cls.now - fiveMins + tenMinsAgo = fiveMinsAgo - fiveMins + fifteenMinsAgo = tenMinsAgo - fiveMins + twentyMinsAgo = fifteenMinsAgo - fiveMins + twentyFiveMinsAgo = twentyMinsAgo - fiveMins client = cls.create_client() table = client.table(table_name) + rows = [ + ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], + ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], + ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], + ['hash1', 'user2', fiveMinsAgo, 'wind', None], + ['hash1', 'user2', cls.now, 'snow', 20.1] + ] ts_obj = table.new(rows) result = ts_obj.store() if not result: raise AssertionError("expected success") client.close() + codec = RiakPbcCodec() + cls.nowMsec = codec._unix_time_millis(cls.now) + cls.fiveMinsAgo = fiveMinsAgo + cls.twentyMinsAgo = twentyMinsAgo + cls.twentyFiveMinsAgo = twentyFiveMinsAgo + cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) + cls.twentyMinsAgoMsec = codec._unix_time_millis(twentyMinsAgo) + cls.numCols = len(rows[0]) + cls.rows = rows + def validate_data(self, ts_obj): if ts_obj.columns is not None: - self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.columns), self.numCols) self.assertEqual(len(ts_obj.rows), 1) row = ts_obj.rows[0] self.assertEqual(row[0], 'hash1') self.assertEqual(row[1], 'user2') - self.assertEqual(row[2], fiveMinsAgo) + self.assertEqual(row[2], self.fiveMinsAgo) self.assertEqual(row[3], 'wind') self.assertIsNone(row[4]) @@ -230,8 +233,8 @@ def test_query_that_matches_some_data(self): """ query = fmt.format( table=table_name, - t1=tenMinsAgoMsec, - t2=nowMsec) + t1=self.tenMinsAgoMsec, + t2=self.nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) @@ -244,12 +247,12 @@ def test_query_that_matches_more_data(self): """ query = fmt.format( table=table_name, - t1=twentyMinsAgoMsec, - t2=nowMsec) + t1=self.twentyMinsAgoMsec, + t2=self.nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) j = 0 - for i, want in enumerate(rows): - if want[2] == twentyFiveMinsAgo: + for i, want in enumerate(self.rows): + if want[2] == self.twentyFiveMinsAgo: continue got = ts_obj.rows[j] j += 1 @@ -261,13 +264,13 @@ def test_get_with_invalid_key(self): self.client.ts_get('GeoCheckin', key) def test_get_single_value(self): - key = ['hash1', 'user2', fiveMinsAgo] + key = ['hash1', 'user2', self.fiveMinsAgo] ts_obj = self.client.ts_get('GeoCheckin', key) self.assertIsNotNone(ts_obj) self.validate_data(ts_obj) def test_get_single_value_via_table(self): - key = ['hash1', 'user2', fiveMinsAgo] + key = ['hash1', 'user2', self.fiveMinsAgo] table = Table(self.client, 'GeoCheckin') ts_obj = table.get(key) self.assertIsNotNone(ts_obj) @@ -286,11 +289,10 @@ def test_stream_keys(self): self.assertEqual('user2', key[1]) # TODO RTS-367 ENABLE # self.assertIsInstance(key[2], datetime.datetime) - keylen = len(streamed_keys) - self.assertTrue(keylen == 5 or keylen == 4) + self.assertEqual(len(streamed_keys), 5) def test_delete_single_value(self): - key = ['hash1', 'user2', twentyFiveMinsAgo] + key = ['hash1', 'user2', self.twentyFiveMinsAgo] rslt = self.client.ts_delete('GeoCheckin', key) self.assertTrue(rslt) ts_obj = self.client.ts_get('GeoCheckin', key) From c4eaa20564593673473e1d2ea46646584cbc6b17 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Fri, 11 Dec 2015 15:40:30 +0000 Subject: [PATCH 054/324] Simplify tox.ini for Python 2.7.8 --- tox.ini | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/tox.ini b/tox.ini index 4950d459..87e74bb1 100644 --- a/tox.ini +++ b/tox.ini @@ -6,13 +6,10 @@ [tox] envlist = py278, py27, py33, py34, py35 +[testenv:py278] +basepython = {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 + [testenv] -basepython = - py278: {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 - py27: python2.7 - py33: python3.3 - py34: python3.4 - py35: python3.5 install_command = pip install --upgrade {packages} commands = {envpython} setup.py test deps = six From e8ff23d73d1f967237dfd2ba0761f92012e8929e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 10:07:32 -0800 Subject: [PATCH 055/324] Revert file deletion. --- riak/riak_pb/__init__.py | 0 riak/riak_pb/messages.py | 152 +++ riak/riak_pb/riak_dt_pb2.py | 863 ++++++++++++++ riak/riak_pb/riak_kv_pb2.py | 1747 +++++++++++++++++++++++++++++ riak/riak_pb/riak_pb2.py | 786 +++++++++++++ riak/riak_pb/riak_search_pb2.py | 210 ++++ riak/riak_pb/riak_yokozuna_pb2.py | 372 ++++++ 7 files changed, 4130 insertions(+) create mode 100644 riak/riak_pb/__init__.py create mode 100644 riak/riak_pb/messages.py create mode 100644 riak/riak_pb/riak_dt_pb2.py create mode 100644 riak/riak_pb/riak_kv_pb2.py create mode 100644 riak/riak_pb/riak_pb2.py create mode 100644 riak/riak_pb/riak_search_pb2.py create mode 100644 riak/riak_pb/riak_yokozuna_pb2.py diff --git a/riak/riak_pb/__init__.py b/riak/riak_pb/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/riak/riak_pb/messages.py b/riak/riak_pb/messages.py new file mode 100644 index 00000000..7d7f8b91 --- /dev/null +++ b/riak/riak_pb/messages.py @@ -0,0 +1,152 @@ +# Copyright 2015 Basho Technologies, Inc. +# +# This file is provided to you under the Apache License, +# Version 2.0 (the "License"); you may not use this file +# except in compliance with the License. You may obtain +# a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# This is a generated file. DO NOT EDIT. + +""" +Constants and mappings between Riak protocol codes and messages. +""" + +import riak.riak_pb.riak_dt_pb2 +import riak.riak_pb.riak_kv_pb2 +import riak.riak_pb.riak_pb2 +import riak.riak_pb.riak_search_pb2 +import riak.riak_pb.riak_yokozuna_pb2 + +# Protocol codes +MSG_CODE_ERROR_RESP = 0 +MSG_CODE_PING_REQ = 1 +MSG_CODE_PING_RESP = 2 +MSG_CODE_GET_CLIENT_ID_REQ = 3 +MSG_CODE_GET_CLIENT_ID_RESP = 4 +MSG_CODE_SET_CLIENT_ID_REQ = 5 +MSG_CODE_SET_CLIENT_ID_RESP = 6 +MSG_CODE_GET_SERVER_INFO_REQ = 7 +MSG_CODE_GET_SERVER_INFO_RESP = 8 +MSG_CODE_GET_REQ = 9 +MSG_CODE_GET_RESP = 10 +MSG_CODE_PUT_REQ = 11 +MSG_CODE_PUT_RESP = 12 +MSG_CODE_DEL_REQ = 13 +MSG_CODE_DEL_RESP = 14 +MSG_CODE_LIST_BUCKETS_REQ = 15 +MSG_CODE_LIST_BUCKETS_RESP = 16 +MSG_CODE_LIST_KEYS_REQ = 17 +MSG_CODE_LIST_KEYS_RESP = 18 +MSG_CODE_GET_BUCKET_REQ = 19 +MSG_CODE_GET_BUCKET_RESP = 20 +MSG_CODE_SET_BUCKET_REQ = 21 +MSG_CODE_SET_BUCKET_RESP = 22 +MSG_CODE_MAP_RED_REQ = 23 +MSG_CODE_MAP_RED_RESP = 24 +MSG_CODE_INDEX_REQ = 25 +MSG_CODE_INDEX_RESP = 26 +MSG_CODE_SEARCH_QUERY_REQ = 27 +MSG_CODE_SEARCH_QUERY_RESP = 28 +MSG_CODE_RESET_BUCKET_REQ = 29 +MSG_CODE_RESET_BUCKET_RESP = 30 +MSG_CODE_GET_BUCKET_TYPE_REQ = 31 +MSG_CODE_SET_BUCKET_TYPE_REQ = 32 +MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ = 33 +MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP = 34 +MSG_CODE_CS_BUCKET_REQ = 40 +MSG_CODE_CS_BUCKET_RESP = 41 +MSG_CODE_COUNTER_UPDATE_REQ = 50 +MSG_CODE_COUNTER_UPDATE_RESP = 51 +MSG_CODE_COUNTER_GET_REQ = 52 +MSG_CODE_COUNTER_GET_RESP = 53 +MSG_CODE_YOKOZUNA_INDEX_GET_REQ = 54 +MSG_CODE_YOKOZUNA_INDEX_GET_RESP = 55 +MSG_CODE_YOKOZUNA_INDEX_PUT_REQ = 56 +MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ = 57 +MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ = 58 +MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP = 59 +MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ = 60 +MSG_CODE_DT_FETCH_REQ = 80 +MSG_CODE_DT_FETCH_RESP = 81 +MSG_CODE_DT_UPDATE_REQ = 82 +MSG_CODE_DT_UPDATE_RESP = 83 +MSG_CODE_AUTH_REQ = 253 +MSG_CODE_AUTH_RESP = 254 +MSG_CODE_START_TLS = 255 + +# Mapping from code to protobuf class +MESSAGE_CLASSES = { + MSG_CODE_ERROR_RESP: riak.riak_pb.riak_pb2.RpbErrorResp, + MSG_CODE_PING_REQ: None, + MSG_CODE_PING_RESP: None, + MSG_CODE_GET_CLIENT_ID_REQ: None, + MSG_CODE_GET_CLIENT_ID_RESP: riak.riak_pb.riak_kv_pb2.RpbGetClientIdResp, + MSG_CODE_SET_CLIENT_ID_REQ: riak.riak_pb.riak_kv_pb2.RpbSetClientIdReq, + MSG_CODE_SET_CLIENT_ID_RESP: None, + MSG_CODE_GET_SERVER_INFO_REQ: None, + MSG_CODE_GET_SERVER_INFO_RESP: riak.riak_pb.riak_pb2.RpbGetServerInfoResp, + MSG_CODE_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbGetReq, + MSG_CODE_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbGetResp, + MSG_CODE_PUT_REQ: riak.riak_pb.riak_kv_pb2.RpbPutReq, + MSG_CODE_PUT_RESP: riak.riak_pb.riak_kv_pb2.RpbPutResp, + MSG_CODE_DEL_REQ: riak.riak_pb.riak_kv_pb2.RpbDelReq, + MSG_CODE_DEL_RESP: None, + MSG_CODE_LIST_BUCKETS_REQ: riak.riak_pb.riak_kv_pb2.RpbListBucketsReq, + MSG_CODE_LIST_BUCKETS_RESP: riak.riak_pb.riak_kv_pb2.RpbListBucketsResp, + MSG_CODE_LIST_KEYS_REQ: riak.riak_pb.riak_kv_pb2.RpbListKeysReq, + MSG_CODE_LIST_KEYS_RESP: riak.riak_pb.riak_kv_pb2.RpbListKeysResp, + MSG_CODE_GET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbGetBucketReq, + MSG_CODE_GET_BUCKET_RESP: riak.riak_pb.riak_pb2.RpbGetBucketResp, + MSG_CODE_SET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbSetBucketReq, + MSG_CODE_SET_BUCKET_RESP: None, + MSG_CODE_MAP_RED_REQ: riak.riak_pb.riak_kv_pb2.RpbMapRedReq, + MSG_CODE_MAP_RED_RESP: riak.riak_pb.riak_kv_pb2.RpbMapRedResp, + MSG_CODE_INDEX_REQ: riak.riak_pb.riak_kv_pb2.RpbIndexReq, + MSG_CODE_INDEX_RESP: riak.riak_pb.riak_kv_pb2.RpbIndexResp, + MSG_CODE_SEARCH_QUERY_REQ: riak.riak_pb.riak_search_pb2.RpbSearchQueryReq, + MSG_CODE_SEARCH_QUERY_RESP: riak.riak_pb.riak_search_pb2.RpbSearchQueryResp, + MSG_CODE_RESET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbResetBucketReq, + MSG_CODE_RESET_BUCKET_RESP: None, + MSG_CODE_GET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbGetBucketTypeReq, + MSG_CODE_SET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbSetBucketTypeReq, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ: + riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP: + riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistResp, + MSG_CODE_CS_BUCKET_REQ: riak.riak_pb.riak_kv_pb2.RpbCSBucketReq, + MSG_CODE_CS_BUCKET_RESP: riak.riak_pb.riak_kv_pb2.RpbCSBucketResp, + MSG_CODE_COUNTER_UPDATE_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateReq, + MSG_CODE_COUNTER_UPDATE_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateResp, + MSG_CODE_COUNTER_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterGetReq, + MSG_CODE_COUNTER_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterGetResp, + MSG_CODE_YOKOZUNA_INDEX_GET_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq, + MSG_CODE_YOKOZUNA_INDEX_GET_RESP: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetResp, + MSG_CODE_YOKOZUNA_INDEX_PUT_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq, + MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq, + MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq, + MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetResp, + MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ: + riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq, + MSG_CODE_DT_FETCH_REQ: riak.riak_pb.riak_dt_pb2.DtFetchReq, + MSG_CODE_DT_FETCH_RESP: riak.riak_pb.riak_dt_pb2.DtFetchResp, + MSG_CODE_DT_UPDATE_REQ: riak.riak_pb.riak_dt_pb2.DtUpdateReq, + MSG_CODE_DT_UPDATE_RESP: riak.riak_pb.riak_dt_pb2.DtUpdateResp, + MSG_CODE_AUTH_REQ: riak.riak_pb.riak_pb2.RpbAuthReq, + MSG_CODE_AUTH_RESP: None, + MSG_CODE_START_TLS: None +} diff --git a/riak/riak_pb/riak_dt_pb2.py b/riak/riak_pb/riak_dt_pb2.py new file mode 100644 index 00000000..58a2f54b --- /dev/null +++ b/riak/riak_pb/riak_dt_pb2.py @@ -0,0 +1,863 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_dt.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_dt.proto', + package='', + serialized_pb='\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"Q\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\"\x87\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\")\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"V\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"t\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntryB#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') + + + +_MAPFIELD_MAPFIELDTYPE = _descriptor.EnumDescriptor( + name='MapFieldType', + full_name='MapField.MapFieldType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='COUNTER', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SET', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REGISTER', index=2, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FLAG', index=3, number=4, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAP', index=4, number=5, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=82, + serialized_end=151, +) + +_DTFETCHRESP_DATATYPE = _descriptor.EnumDescriptor( + name='DataType', + full_name='DtFetchResp.DataType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='COUNTER', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SET', index=1, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='MAP', index=2, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=696, + serialized_end=737, +) + +_MAPUPDATE_FLAGOP = _descriptor.EnumDescriptor( + name='FlagOp', + full_name='MapUpdate.FlagOp', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='ENABLE', index=0, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DISABLE', index=1, number=2, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=988, + serialized_end=1021, +) + + +_MAPFIELD = _descriptor.Descriptor( + name='MapField', + full_name='MapField', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='MapField.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='MapField.type', index=1, + number=2, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _MAPFIELD_MAPFIELDTYPE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=18, + serialized_end=151, +) + + +_MAPENTRY = _descriptor.Descriptor( + name='MapEntry', + full_name='MapEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field', full_name='MapEntry.field', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='counter_value', full_name='MapEntry.counter_value', index=1, + number=2, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_value', full_name='MapEntry.set_value', index=2, + number=3, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='register_value', full_name='MapEntry.register_value', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='flag_value', full_name='MapEntry.flag_value', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_value', full_name='MapEntry.map_value', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=154, + serialized_end=306, +) + + +_DTFETCHREQ = _descriptor.Descriptor( + name='DtFetchReq', + full_name='DtFetchReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='DtFetchReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='DtFetchReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='DtFetchReq.type', index=2, + number=3, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='DtFetchReq.r', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='DtFetchReq.pr', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='DtFetchReq.basic_quorum', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='DtFetchReq.notfound_ok', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='DtFetchReq.timeout', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='DtFetchReq.sloppy_quorum', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='DtFetchReq.n_val', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='include_context', full_name='DtFetchReq.include_context', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=309, + serialized_end=516, +) + + +_DTVALUE = _descriptor.Descriptor( + name='DtValue', + full_name='DtValue', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='counter_value', full_name='DtValue.counter_value', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_value', full_name='DtValue.set_value', index=1, + number=2, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_value', full_name='DtValue.map_value', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=518, + serialized_end=599, +) + + +_DTFETCHRESP = _descriptor.Descriptor( + name='DtFetchResp', + full_name='DtFetchResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='context', full_name='DtFetchResp.context', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='DtFetchResp.type', index=1, + number=2, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='DtFetchResp.value', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _DTFETCHRESP_DATATYPE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=602, + serialized_end=737, +) + + +_COUNTEROP = _descriptor.Descriptor( + name='CounterOp', + full_name='CounterOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='increment', full_name='CounterOp.increment', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=739, + serialized_end=769, +) + + +_SETOP = _descriptor.Descriptor( + name='SetOp', + full_name='SetOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='adds', full_name='SetOp.adds', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='removes', full_name='SetOp.removes', index=1, + number=2, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=771, + serialized_end=809, +) + + +_MAPUPDATE = _descriptor.Descriptor( + name='MapUpdate', + full_name='MapUpdate', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field', full_name='MapUpdate.field', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='counter_op', full_name='MapUpdate.counter_op', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_op', full_name='MapUpdate.set_op', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='register_op', full_name='MapUpdate.register_op', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='flag_op', full_name='MapUpdate.flag_op', index=4, + number=5, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=1, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_op', full_name='MapUpdate.map_op', index=5, + number=6, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _MAPUPDATE_FLAGOP, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=812, + serialized_end=1021, +) + + +_MAPOP = _descriptor.Descriptor( + name='MapOp', + full_name='MapOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='removes', full_name='MapOp.removes', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='updates', full_name='MapOp.updates', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1023, + serialized_end=1087, +) + + +_DTOP = _descriptor.Descriptor( + name='DtOp', + full_name='DtOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='counter_op', full_name='DtOp.counter_op', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_op', full_name='DtOp.set_op', index=1, + number=2, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_op', full_name='DtOp.map_op', index=2, + number=3, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1089, + serialized_end=1175, +) + + +_DTUPDATEREQ = _descriptor.Descriptor( + name='DtUpdateReq', + full_name='DtUpdateReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='DtUpdateReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='DtUpdateReq.key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='DtUpdateReq.type', index=2, + number=3, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='context', full_name='DtUpdateReq.context', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='op', full_name='DtUpdateReq.op', index=4, + number=5, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='DtUpdateReq.w', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='DtUpdateReq.dw', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='DtUpdateReq.pw', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_body', full_name='DtUpdateReq.return_body', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='DtUpdateReq.timeout', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='DtUpdateReq.sloppy_quorum', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='DtUpdateReq.n_val', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='include_context', full_name='DtUpdateReq.include_context', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1178, + serialized_end=1419, +) + + +_DTUPDATERESP = _descriptor.Descriptor( + name='DtUpdateResp', + full_name='DtUpdateResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='DtUpdateResp.key', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='context', full_name='DtUpdateResp.context', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='counter_value', full_name='DtUpdateResp.counter_value', index=2, + number=3, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='set_value', full_name='DtUpdateResp.set_value', index=3, + number=4, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='map_value', full_name='DtUpdateResp.map_value', index=4, + number=5, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1421, + serialized_end=1537, +) + +_MAPFIELD.fields_by_name['type'].enum_type = _MAPFIELD_MAPFIELDTYPE +_MAPFIELD_MAPFIELDTYPE.containing_type = _MAPFIELD; +_MAPENTRY.fields_by_name['field'].message_type = _MAPFIELD +_MAPENTRY.fields_by_name['map_value'].message_type = _MAPENTRY +_DTVALUE.fields_by_name['map_value'].message_type = _MAPENTRY +_DTFETCHRESP.fields_by_name['type'].enum_type = _DTFETCHRESP_DATATYPE +_DTFETCHRESP.fields_by_name['value'].message_type = _DTVALUE +_DTFETCHRESP_DATATYPE.containing_type = _DTFETCHRESP; +_MAPUPDATE.fields_by_name['field'].message_type = _MAPFIELD +_MAPUPDATE.fields_by_name['counter_op'].message_type = _COUNTEROP +_MAPUPDATE.fields_by_name['set_op'].message_type = _SETOP +_MAPUPDATE.fields_by_name['flag_op'].enum_type = _MAPUPDATE_FLAGOP +_MAPUPDATE.fields_by_name['map_op'].message_type = _MAPOP +_MAPUPDATE_FLAGOP.containing_type = _MAPUPDATE; +_MAPOP.fields_by_name['removes'].message_type = _MAPFIELD +_MAPOP.fields_by_name['updates'].message_type = _MAPUPDATE +_DTOP.fields_by_name['counter_op'].message_type = _COUNTEROP +_DTOP.fields_by_name['set_op'].message_type = _SETOP +_DTOP.fields_by_name['map_op'].message_type = _MAPOP +_DTUPDATEREQ.fields_by_name['op'].message_type = _DTOP +_DTUPDATERESP.fields_by_name['map_value'].message_type = _MAPENTRY +DESCRIPTOR.message_types_by_name['MapField'] = _MAPFIELD +DESCRIPTOR.message_types_by_name['MapEntry'] = _MAPENTRY +DESCRIPTOR.message_types_by_name['DtFetchReq'] = _DTFETCHREQ +DESCRIPTOR.message_types_by_name['DtValue'] = _DTVALUE +DESCRIPTOR.message_types_by_name['DtFetchResp'] = _DTFETCHRESP +DESCRIPTOR.message_types_by_name['CounterOp'] = _COUNTEROP +DESCRIPTOR.message_types_by_name['SetOp'] = _SETOP +DESCRIPTOR.message_types_by_name['MapUpdate'] = _MAPUPDATE +DESCRIPTOR.message_types_by_name['MapOp'] = _MAPOP +DESCRIPTOR.message_types_by_name['DtOp'] = _DTOP +DESCRIPTOR.message_types_by_name['DtUpdateReq'] = _DTUPDATEREQ +DESCRIPTOR.message_types_by_name['DtUpdateResp'] = _DTUPDATERESP + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapField(_message.Message): + DESCRIPTOR = _MAPFIELD + + # @@protoc_insertion_point(class_scope:MapField) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapEntry(_message.Message): + DESCRIPTOR = _MAPENTRY + + # @@protoc_insertion_point(class_scope:MapEntry) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtFetchReq(_message.Message): + DESCRIPTOR = _DTFETCHREQ + + # @@protoc_insertion_point(class_scope:DtFetchReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtValue(_message.Message): + DESCRIPTOR = _DTVALUE + + # @@protoc_insertion_point(class_scope:DtValue) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtFetchResp(_message.Message): + DESCRIPTOR = _DTFETCHRESP + + # @@protoc_insertion_point(class_scope:DtFetchResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class CounterOp(_message.Message): + DESCRIPTOR = _COUNTEROP + + # @@protoc_insertion_point(class_scope:CounterOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class SetOp(_message.Message): + DESCRIPTOR = _SETOP + + # @@protoc_insertion_point(class_scope:SetOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapUpdate(_message.Message): + DESCRIPTOR = _MAPUPDATE + + # @@protoc_insertion_point(class_scope:MapUpdate) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapOp(_message.Message): + DESCRIPTOR = _MAPOP + + # @@protoc_insertion_point(class_scope:MapOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtOp(_message.Message): + DESCRIPTOR = _DTOP + + # @@protoc_insertion_point(class_scope:DtOp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtUpdateReq(_message.Message): + DESCRIPTOR = _DTUPDATEREQ + + # @@protoc_insertion_point(class_scope:DtUpdateReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtUpdateResp(_message.Message): + DESCRIPTOR = _DTUPDATERESP + + # @@protoc_insertion_point(class_scope:DtUpdateResp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakDtPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_kv_pb2.py b/riak/riak_pb/riak_kv_pb2.py new file mode 100644 index 00000000..c8411e06 --- /dev/null +++ b/riak/riak_pb/riak_kv_pb2.py @@ -0,0 +1,1747 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_kv.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + +import riak.riak_pb.riak_pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_kv.proto', + package='', + serialized_pb='\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xcd\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"\xc1\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') + + + +_RPBINDEXREQ_INDEXQUERYTYPE = _descriptor.EnumDescriptor( + name='IndexQueryType', + full_name='RpbIndexReq.IndexQueryType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='eq', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='range', index=1, number=1, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1644, + serialized_end=1679, +) + + +_RPBGETCLIENTIDRESP = _descriptor.Descriptor( + name='RpbGetClientIdResp', + full_name='RpbGetClientIdResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='client_id', full_name='RpbGetClientIdResp.client_id', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=29, + serialized_end=68, +) + + +_RPBSETCLIENTIDREQ = _descriptor.Descriptor( + name='RpbSetClientIdReq', + full_name='RpbSetClientIdReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='client_id', full_name='RpbSetClientIdReq.client_id', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=70, + serialized_end=108, +) + + +_RPBGETREQ = _descriptor.Descriptor( + name='RpbGetReq', + full_name='RpbGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbGetReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbGetReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbGetReq.r', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbGetReq.pr', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='RpbGetReq.basic_quorum', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='RpbGetReq.notfound_ok', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='if_modified', full_name='RpbGetReq.if_modified', index=6, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='head', full_name='RpbGetReq.head', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deletedvclock', full_name='RpbGetReq.deletedvclock', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbGetReq.timeout', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='RpbGetReq.sloppy_quorum', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbGetReq.n_val', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetReq.type', index=12, + number=13, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=111, + serialized_end=344, +) + + +_RPBGETRESP = _descriptor.Descriptor( + name='RpbGetResp', + full_name='RpbGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='RpbGetResp.content', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbGetResp.vclock', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unchanged', full_name='RpbGetResp.unchanged', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=346, + serialized_end=423, +) + + +_RPBPUTREQ = _descriptor.Descriptor( + name='RpbPutReq', + full_name='RpbPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbPutReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbPutReq.key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbPutReq.vclock', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='RpbPutReq.content', index=3, + number=4, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbPutReq.w', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbPutReq.dw', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_body', full_name='RpbPutReq.return_body', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbPutReq.pw', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='if_not_modified', full_name='RpbPutReq.if_not_modified', index=8, + number=9, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='if_none_match', full_name='RpbPutReq.if_none_match', index=9, + number=10, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_head', full_name='RpbPutReq.return_head', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbPutReq.timeout', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='asis', full_name='RpbPutReq.asis', index=12, + number=13, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='RpbPutReq.sloppy_quorum', index=13, + number=14, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbPutReq.n_val', index=14, + number=15, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbPutReq.type', index=15, + number=16, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=426, + serialized_end=720, +) + + +_RPBPUTRESP = _descriptor.Descriptor( + name='RpbPutResp', + full_name='RpbPutResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='content', full_name='RpbPutResp.content', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbPutResp.vclock', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbPutResp.key', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=722, + serialized_end=793, +) + + +_RPBDELREQ = _descriptor.Descriptor( + name='RpbDelReq', + full_name='RpbDelReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbDelReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbDelReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rw', full_name='RpbDelReq.rw', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='RpbDelReq.vclock', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbDelReq.r', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbDelReq.w', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbDelReq.pr', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbDelReq.pw', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbDelReq.dw', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbDelReq.timeout', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sloppy_quorum', full_name='RpbDelReq.sloppy_quorum', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbDelReq.n_val', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbDelReq.type', index=12, + number=13, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=796, + serialized_end=991, +) + + +_RPBLISTBUCKETSREQ = _descriptor.Descriptor( + name='RpbListBucketsReq', + full_name='RpbListBucketsReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbListBucketsReq.timeout', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream', full_name='RpbListBucketsReq.stream', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbListBucketsReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=993, + serialized_end=1059, +) + + +_RPBLISTBUCKETSRESP = _descriptor.Descriptor( + name='RpbListBucketsResp', + full_name='RpbListBucketsResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='buckets', full_name='RpbListBucketsResp.buckets', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbListBucketsResp.done', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1061, + serialized_end=1112, +) + + +_RPBLISTKEYSREQ = _descriptor.Descriptor( + name='RpbListKeysReq', + full_name='RpbListKeysReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbListKeysReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbListKeysReq.timeout', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbListKeysReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1114, + serialized_end=1177, +) + + +_RPBLISTKEYSRESP = _descriptor.Descriptor( + name='RpbListKeysResp', + full_name='RpbListKeysResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='keys', full_name='RpbListKeysResp.keys', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbListKeysResp.done', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1179, + serialized_end=1224, +) + + +_RPBMAPREDREQ = _descriptor.Descriptor( + name='RpbMapRedReq', + full_name='RpbMapRedReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='request', full_name='RpbMapRedReq.request', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content_type', full_name='RpbMapRedReq.content_type', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1226, + serialized_end=1279, +) + + +_RPBMAPREDRESP = _descriptor.Descriptor( + name='RpbMapRedResp', + full_name='RpbMapRedResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='phase', full_name='RpbMapRedResp.phase', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='response', full_name='RpbMapRedResp.response', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbMapRedResp.done', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1281, + serialized_end=1343, +) + + +_RPBINDEXREQ = _descriptor.Descriptor( + name='RpbIndexReq', + full_name='RpbIndexReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbIndexReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='index', full_name='RpbIndexReq.index', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='qtype', full_name='RpbIndexReq.qtype', index=2, + number=3, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbIndexReq.key', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='range_min', full_name='RpbIndexReq.range_min', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='range_max', full_name='RpbIndexReq.range_max', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_terms', full_name='RpbIndexReq.return_terms', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream', full_name='RpbIndexReq.stream', index=7, + number=8, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_results', full_name='RpbIndexReq.max_results', index=8, + number=9, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbIndexReq.continuation', index=9, + number=10, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbIndexReq.timeout', index=10, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbIndexReq.type', index=11, + number=12, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='term_regex', full_name='RpbIndexReq.term_regex', index=12, + number=13, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pagination_sort', full_name='RpbIndexReq.pagination_sort', index=13, + number=14, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _RPBINDEXREQ_INDEXQUERYTYPE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1346, + serialized_end=1679, +) + + +_RPBINDEXRESP = _descriptor.Descriptor( + name='RpbIndexResp', + full_name='RpbIndexResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='keys', full_name='RpbIndexResp.keys', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='results', full_name='RpbIndexResp.results', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbIndexResp.continuation', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbIndexResp.done', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1681, + serialized_end=1772, +) + + +_RPBCSBUCKETREQ = _descriptor.Descriptor( + name='RpbCSBucketReq', + full_name='RpbCSBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbCSBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_key', full_name='RpbCSBucketReq.start_key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_key', full_name='RpbCSBucketReq.end_key', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start_incl', full_name='RpbCSBucketReq.start_incl', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='end_incl', full_name='RpbCSBucketReq.end_incl', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbCSBucketReq.continuation', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_results', full_name='RpbCSBucketReq.max_results', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbCSBucketReq.timeout', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbCSBucketReq.type', index=8, + number=9, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1775, + serialized_end=1968, +) + + +_RPBCSBUCKETRESP = _descriptor.Descriptor( + name='RpbCSBucketResp', + full_name='RpbCSBucketResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='objects', full_name='RpbCSBucketResp.objects', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbCSBucketResp.continuation', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbCSBucketResp.done', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1970, + serialized_end=2057, +) + + +_RPBINDEXOBJECT = _descriptor.Descriptor( + name='RpbIndexObject', + full_name='RpbIndexObject', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='RpbIndexObject.key', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='object', full_name='RpbIndexObject.object', index=1, + number=2, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2059, + serialized_end=2117, +) + + +_RPBCONTENT = _descriptor.Descriptor( + name='RpbContent', + full_name='RpbContent', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='RpbContent.value', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content_type', full_name='RpbContent.content_type', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='charset', full_name='RpbContent.charset', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content_encoding', full_name='RpbContent.content_encoding', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vtag', full_name='RpbContent.vtag', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='links', full_name='RpbContent.links', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_mod', full_name='RpbContent.last_mod', index=6, + number=7, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_mod_usecs', full_name='RpbContent.last_mod_usecs', index=7, + number=8, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='usermeta', full_name='RpbContent.usermeta', index=8, + number=9, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='indexes', full_name='RpbContent.indexes', index=9, + number=10, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='deleted', full_name='RpbContent.deleted', index=10, + number=11, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2120, + serialized_end=2365, +) + + +_RPBLINK = _descriptor.Descriptor( + name='RpbLink', + full_name='RpbLink', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbLink.bucket', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbLink.key', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='tag', full_name='RpbLink.tag', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2367, + serialized_end=2418, +) + + +_RPBCOUNTERUPDATEREQ = _descriptor.Descriptor( + name='RpbCounterUpdateReq', + full_name='RpbCounterUpdateReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbCounterUpdateReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbCounterUpdateReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='amount', full_name='RpbCounterUpdateReq.amount', index=2, + number=3, type=18, cpp_type=2, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbCounterUpdateReq.w', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbCounterUpdateReq.dw', index=4, + number=5, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbCounterUpdateReq.pw', index=5, + number=6, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='returnvalue', full_name='RpbCounterUpdateReq.returnvalue', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2420, + serialized_end=2542, +) + + +_RPBCOUNTERUPDATERESP = _descriptor.Descriptor( + name='RpbCounterUpdateResp', + full_name='RpbCounterUpdateResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='RpbCounterUpdateResp.value', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2544, + serialized_end=2581, +) + + +_RPBCOUNTERGETREQ = _descriptor.Descriptor( + name='RpbCounterGetReq', + full_name='RpbCounterGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbCounterGetReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbCounterGetReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbCounterGetReq.r', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbCounterGetReq.pr', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='RpbCounterGetReq.basic_quorum', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='RpbCounterGetReq.notfound_ok', index=5, + number=6, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2583, + serialized_end=2696, +) + + +_RPBCOUNTERGETRESP = _descriptor.Descriptor( + name='RpbCounterGetResp', + full_name='RpbCounterGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='value', full_name='RpbCounterGetResp.value', index=0, + number=1, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2698, + serialized_end=2732, +) + + +_RPBGETBUCKETKEYPREFLISTREQ = _descriptor.Descriptor( + name='RpbGetBucketKeyPreflistReq', + full_name='RpbGetBucketKeyPreflistReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbGetBucketKeyPreflistReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='RpbGetBucketKeyPreflistReq.key', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetBucketKeyPreflistReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2734, + serialized_end=2805, +) + + +_RPBGETBUCKETKEYPREFLISTRESP = _descriptor.Descriptor( + name='RpbGetBucketKeyPreflistResp', + full_name='RpbGetBucketKeyPreflistResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='preflist', full_name='RpbGetBucketKeyPreflistResp.preflist', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2807, + serialized_end=2881, +) + + +_RPBBUCKETKEYPREFLISTITEM = _descriptor.Descriptor( + name='RpbBucketKeyPreflistItem', + full_name='RpbBucketKeyPreflistItem', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='partition', full_name='RpbBucketKeyPreflistItem.partition', index=0, + number=1, type=3, cpp_type=2, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='node', full_name='RpbBucketKeyPreflistItem.node', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='primary', full_name='RpbBucketKeyPreflistItem.primary', index=2, + number=3, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=2883, + serialized_end=2959, +) + +_RPBGETRESP.fields_by_name['content'].message_type = _RPBCONTENT +_RPBPUTREQ.fields_by_name['content'].message_type = _RPBCONTENT +_RPBPUTRESP.fields_by_name['content'].message_type = _RPBCONTENT +_RPBINDEXREQ.fields_by_name['qtype'].enum_type = _RPBINDEXREQ_INDEXQUERYTYPE +_RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ; +_RPBINDEXRESP.fields_by_name['results'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBCSBUCKETRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT +_RPBINDEXOBJECT.fields_by_name['object'].message_type = _RPBGETRESP +_RPBCONTENT.fields_by_name['links'].message_type = _RPBLINK +_RPBCONTENT.fields_by_name['usermeta'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBCONTENT.fields_by_name['indexes'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBGETBUCKETKEYPREFLISTRESP.fields_by_name['preflist'].message_type = _RPBBUCKETKEYPREFLISTITEM +DESCRIPTOR.message_types_by_name['RpbGetClientIdResp'] = _RPBGETCLIENTIDRESP +DESCRIPTOR.message_types_by_name['RpbSetClientIdReq'] = _RPBSETCLIENTIDREQ +DESCRIPTOR.message_types_by_name['RpbGetReq'] = _RPBGETREQ +DESCRIPTOR.message_types_by_name['RpbGetResp'] = _RPBGETRESP +DESCRIPTOR.message_types_by_name['RpbPutReq'] = _RPBPUTREQ +DESCRIPTOR.message_types_by_name['RpbPutResp'] = _RPBPUTRESP +DESCRIPTOR.message_types_by_name['RpbDelReq'] = _RPBDELREQ +DESCRIPTOR.message_types_by_name['RpbListBucketsReq'] = _RPBLISTBUCKETSREQ +DESCRIPTOR.message_types_by_name['RpbListBucketsResp'] = _RPBLISTBUCKETSRESP +DESCRIPTOR.message_types_by_name['RpbListKeysReq'] = _RPBLISTKEYSREQ +DESCRIPTOR.message_types_by_name['RpbListKeysResp'] = _RPBLISTKEYSRESP +DESCRIPTOR.message_types_by_name['RpbMapRedReq'] = _RPBMAPREDREQ +DESCRIPTOR.message_types_by_name['RpbMapRedResp'] = _RPBMAPREDRESP +DESCRIPTOR.message_types_by_name['RpbIndexReq'] = _RPBINDEXREQ +DESCRIPTOR.message_types_by_name['RpbIndexResp'] = _RPBINDEXRESP +DESCRIPTOR.message_types_by_name['RpbCSBucketReq'] = _RPBCSBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbCSBucketResp'] = _RPBCSBUCKETRESP +DESCRIPTOR.message_types_by_name['RpbIndexObject'] = _RPBINDEXOBJECT +DESCRIPTOR.message_types_by_name['RpbContent'] = _RPBCONTENT +DESCRIPTOR.message_types_by_name['RpbLink'] = _RPBLINK +DESCRIPTOR.message_types_by_name['RpbCounterUpdateReq'] = _RPBCOUNTERUPDATEREQ +DESCRIPTOR.message_types_by_name['RpbCounterUpdateResp'] = _RPBCOUNTERUPDATERESP +DESCRIPTOR.message_types_by_name['RpbCounterGetReq'] = _RPBCOUNTERGETREQ +DESCRIPTOR.message_types_by_name['RpbCounterGetResp'] = _RPBCOUNTERGETRESP +DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistReq'] = _RPBGETBUCKETKEYPREFLISTREQ +DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistResp'] = _RPBGETBUCKETKEYPREFLISTRESP +DESCRIPTOR.message_types_by_name['RpbBucketKeyPreflistItem'] = _RPBBUCKETKEYPREFLISTITEM + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetClientIdResp(_message.Message): + DESCRIPTOR = _RPBGETCLIENTIDRESP + + # @@protoc_insertion_point(class_scope:RpbGetClientIdResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetClientIdReq(_message.Message): + DESCRIPTOR = _RPBSETCLIENTIDREQ + + # @@protoc_insertion_point(class_scope:RpbSetClientIdReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetReq(_message.Message): + DESCRIPTOR = _RPBGETREQ + + # @@protoc_insertion_point(class_scope:RpbGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetResp(_message.Message): + DESCRIPTOR = _RPBGETRESP + + # @@protoc_insertion_point(class_scope:RpbGetResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPutReq(_message.Message): + DESCRIPTOR = _RPBPUTREQ + + # @@protoc_insertion_point(class_scope:RpbPutReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPutResp(_message.Message): + DESCRIPTOR = _RPBPUTRESP + + # @@protoc_insertion_point(class_scope:RpbPutResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbDelReq(_message.Message): + DESCRIPTOR = _RPBDELREQ + + # @@protoc_insertion_point(class_scope:RpbDelReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListBucketsReq(_message.Message): + DESCRIPTOR = _RPBLISTBUCKETSREQ + + # @@protoc_insertion_point(class_scope:RpbListBucketsReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListBucketsResp(_message.Message): + DESCRIPTOR = _RPBLISTBUCKETSRESP + + # @@protoc_insertion_point(class_scope:RpbListBucketsResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListKeysReq(_message.Message): + DESCRIPTOR = _RPBLISTKEYSREQ + + # @@protoc_insertion_point(class_scope:RpbListKeysReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListKeysResp(_message.Message): + DESCRIPTOR = _RPBLISTKEYSRESP + + # @@protoc_insertion_point(class_scope:RpbListKeysResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbMapRedReq(_message.Message): + DESCRIPTOR = _RPBMAPREDREQ + + # @@protoc_insertion_point(class_scope:RpbMapRedReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbMapRedResp(_message.Message): + DESCRIPTOR = _RPBMAPREDRESP + + # @@protoc_insertion_point(class_scope:RpbMapRedResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexReq(_message.Message): + DESCRIPTOR = _RPBINDEXREQ + + # @@protoc_insertion_point(class_scope:RpbIndexReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexResp(_message.Message): + DESCRIPTOR = _RPBINDEXRESP + + # @@protoc_insertion_point(class_scope:RpbIndexResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCSBucketReq(_message.Message): + DESCRIPTOR = _RPBCSBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbCSBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCSBucketResp(_message.Message): + DESCRIPTOR = _RPBCSBUCKETRESP + + # @@protoc_insertion_point(class_scope:RpbCSBucketResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexObject(_message.Message): + DESCRIPTOR = _RPBINDEXOBJECT + + # @@protoc_insertion_point(class_scope:RpbIndexObject) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbContent(_message.Message): + DESCRIPTOR = _RPBCONTENT + + # @@protoc_insertion_point(class_scope:RpbContent) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbLink(_message.Message): + DESCRIPTOR = _RPBLINK + + # @@protoc_insertion_point(class_scope:RpbLink) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterUpdateReq(_message.Message): + DESCRIPTOR = _RPBCOUNTERUPDATEREQ + + # @@protoc_insertion_point(class_scope:RpbCounterUpdateReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterUpdateResp(_message.Message): + DESCRIPTOR = _RPBCOUNTERUPDATERESP + + # @@protoc_insertion_point(class_scope:RpbCounterUpdateResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterGetReq(_message.Message): + DESCRIPTOR = _RPBCOUNTERGETREQ + + # @@protoc_insertion_point(class_scope:RpbCounterGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterGetResp(_message.Message): + DESCRIPTOR = _RPBCOUNTERGETRESP + + # @@protoc_insertion_point(class_scope:RpbCounterGetResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketKeyPreflistReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTREQ + + # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketKeyPreflistResp(_message.Message): + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTRESP + + # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbBucketKeyPreflistItem(_message.Message): + DESCRIPTOR = _RPBBUCKETKEYPREFLISTITEM + + # @@protoc_insertion_point(class_scope:RpbBucketKeyPreflistItem) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakKvPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_pb2.py b/riak/riak_pb/riak_pb2.py new file mode 100644 index 00000000..a757940a --- /dev/null +++ b/riak/riak_pb/riak_pb2.py @@ -0,0 +1,786 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak.proto', + package='', + serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') + + + +_RPBBUCKETPROPS_RPBREPLMODE = _descriptor.EnumDescriptor( + name='RpbReplMode', + full_name='RpbBucketProps.RpbReplMode', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='FALSE', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='REALTIME', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='FULLSYNC', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TRUE', index=3, number=3, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=1236, + serialized_end=1298, +) + + +_RPBERRORRESP = _descriptor.Descriptor( + name='RpbErrorResp', + full_name='RpbErrorResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='errmsg', full_name='RpbErrorResp.errmsg', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='errcode', full_name='RpbErrorResp.errcode', index=1, + number=2, type=13, cpp_type=3, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=14, + serialized_end=61, +) + + +_RPBGETSERVERINFORESP = _descriptor.Descriptor( + name='RpbGetServerInfoResp', + full_name='RpbGetServerInfoResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='node', full_name='RpbGetServerInfoResp.node', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='server_version', full_name='RpbGetServerInfoResp.server_version', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=63, + serialized_end=123, +) + + +_RPBPAIR = _descriptor.Descriptor( + name='RpbPair', + full_name='RpbPair', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='key', full_name='RpbPair.key', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='value', full_name='RpbPair.value', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=125, + serialized_end=162, +) + + +_RPBGETBUCKETREQ = _descriptor.Descriptor( + name='RpbGetBucketReq', + full_name='RpbGetBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbGetBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetBucketReq.type', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=164, + serialized_end=211, +) + + +_RPBGETBUCKETRESP = _descriptor.Descriptor( + name='RpbGetBucketResp', + full_name='RpbGetBucketResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='props', full_name='RpbGetBucketResp.props', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=213, + serialized_end=263, +) + + +_RPBSETBUCKETREQ = _descriptor.Descriptor( + name='RpbSetBucketReq', + full_name='RpbSetBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbSetBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='props', full_name='RpbSetBucketReq.props', index=1, + number=2, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbSetBucketReq.type', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=265, + serialized_end=344, +) + + +_RPBRESETBUCKETREQ = _descriptor.Descriptor( + name='RpbResetBucketReq', + full_name='RpbResetBucketReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbResetBucketReq.bucket', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='RpbResetBucketReq.type', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=346, + serialized_end=395, +) + + +_RPBGETBUCKETTYPEREQ = _descriptor.Descriptor( + name='RpbGetBucketTypeReq', + full_name='RpbGetBucketTypeReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='RpbGetBucketTypeReq.type', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=397, + serialized_end=432, +) + + +_RPBSETBUCKETTYPEREQ = _descriptor.Descriptor( + name='RpbSetBucketTypeReq', + full_name='RpbSetBucketTypeReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='RpbSetBucketTypeReq.type', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='props', full_name='RpbSetBucketTypeReq.props', index=1, + number=2, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=434, + serialized_end=501, +) + + +_RPBMODFUN = _descriptor.Descriptor( + name='RpbModFun', + full_name='RpbModFun', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='module', full_name='RpbModFun.module', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='function', full_name='RpbModFun.function', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=503, + serialized_end=548, +) + + +_RPBCOMMITHOOK = _descriptor.Descriptor( + name='RpbCommitHook', + full_name='RpbCommitHook', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='modfun', full_name='RpbCommitHook.modfun', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='name', full_name='RpbCommitHook.name', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=550, + serialized_end=607, +) + + +_RPBBUCKETPROPS = _descriptor.Descriptor( + name='RpbBucketProps', + full_name='RpbBucketProps', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbBucketProps.n_val', index=0, + number=1, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='allow_mult', full_name='RpbBucketProps.allow_mult', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='last_write_wins', full_name='RpbBucketProps.last_write_wins', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='precommit', full_name='RpbBucketProps.precommit', index=3, + number=4, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_precommit', full_name='RpbBucketProps.has_precommit', index=4, + number=5, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='postcommit', full_name='RpbBucketProps.postcommit', index=5, + number=6, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='has_postcommit', full_name='RpbBucketProps.has_postcommit', index=6, + number=7, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='chash_keyfun', full_name='RpbBucketProps.chash_keyfun', index=7, + number=8, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='linkfun', full_name='RpbBucketProps.linkfun', index=8, + number=9, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='old_vclock', full_name='RpbBucketProps.old_vclock', index=9, + number=10, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='young_vclock', full_name='RpbBucketProps.young_vclock', index=10, + number=11, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='big_vclock', full_name='RpbBucketProps.big_vclock', index=11, + number=12, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='small_vclock', full_name='RpbBucketProps.small_vclock', index=12, + number=13, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pr', full_name='RpbBucketProps.pr', index=13, + number=14, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='r', full_name='RpbBucketProps.r', index=14, + number=15, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='w', full_name='RpbBucketProps.w', index=15, + number=16, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='pw', full_name='RpbBucketProps.pw', index=16, + number=17, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='dw', full_name='RpbBucketProps.dw', index=17, + number=18, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rw', full_name='RpbBucketProps.rw', index=18, + number=19, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='basic_quorum', full_name='RpbBucketProps.basic_quorum', index=19, + number=20, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='notfound_ok', full_name='RpbBucketProps.notfound_ok', index=20, + number=21, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='backend', full_name='RpbBucketProps.backend', index=21, + number=22, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='search', full_name='RpbBucketProps.search', index=22, + number=23, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='repl', full_name='RpbBucketProps.repl', index=23, + number=24, type=14, cpp_type=8, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='search_index', full_name='RpbBucketProps.search_index', index=24, + number=25, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='datatype', full_name='RpbBucketProps.datatype', index=25, + number=26, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='consistent', full_name='RpbBucketProps.consistent', index=26, + number=27, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='write_once', full_name='RpbBucketProps.write_once', index=27, + number=28, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + _RPBBUCKETPROPS_RPBREPLMODE, + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=610, + serialized_end=1298, +) + + +_RPBAUTHREQ = _descriptor.Descriptor( + name='RpbAuthReq', + full_name='RpbAuthReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='user', full_name='RpbAuthReq.user', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='password', full_name='RpbAuthReq.password', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1300, + serialized_end=1344, +) + +_RPBGETBUCKETRESP.fields_by_name['props'].message_type = _RPBBUCKETPROPS +_RPBSETBUCKETREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS +_RPBSETBUCKETTYPEREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS +_RPBCOMMITHOOK.fields_by_name['modfun'].message_type = _RPBMODFUN +_RPBBUCKETPROPS.fields_by_name['precommit'].message_type = _RPBCOMMITHOOK +_RPBBUCKETPROPS.fields_by_name['postcommit'].message_type = _RPBCOMMITHOOK +_RPBBUCKETPROPS.fields_by_name['chash_keyfun'].message_type = _RPBMODFUN +_RPBBUCKETPROPS.fields_by_name['linkfun'].message_type = _RPBMODFUN +_RPBBUCKETPROPS.fields_by_name['repl'].enum_type = _RPBBUCKETPROPS_RPBREPLMODE +_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS; +DESCRIPTOR.message_types_by_name['RpbErrorResp'] = _RPBERRORRESP +DESCRIPTOR.message_types_by_name['RpbGetServerInfoResp'] = _RPBGETSERVERINFORESP +DESCRIPTOR.message_types_by_name['RpbPair'] = _RPBPAIR +DESCRIPTOR.message_types_by_name['RpbGetBucketReq'] = _RPBGETBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbGetBucketResp'] = _RPBGETBUCKETRESP +DESCRIPTOR.message_types_by_name['RpbSetBucketReq'] = _RPBSETBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbResetBucketReq'] = _RPBRESETBUCKETREQ +DESCRIPTOR.message_types_by_name['RpbGetBucketTypeReq'] = _RPBGETBUCKETTYPEREQ +DESCRIPTOR.message_types_by_name['RpbSetBucketTypeReq'] = _RPBSETBUCKETTYPEREQ +DESCRIPTOR.message_types_by_name['RpbModFun'] = _RPBMODFUN +DESCRIPTOR.message_types_by_name['RpbCommitHook'] = _RPBCOMMITHOOK +DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS +DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbErrorResp(_message.Message): + DESCRIPTOR = _RPBERRORRESP + + # @@protoc_insertion_point(class_scope:RpbErrorResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetServerInfoResp(_message.Message): + DESCRIPTOR = _RPBGETSERVERINFORESP + + # @@protoc_insertion_point(class_scope:RpbGetServerInfoResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPair(_message.Message): + DESCRIPTOR = _RPBPAIR + + # @@protoc_insertion_point(class_scope:RpbPair) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbGetBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketResp(_message.Message): + DESCRIPTOR = _RPBGETBUCKETRESP + + # @@protoc_insertion_point(class_scope:RpbGetBucketResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetBucketReq(_message.Message): + DESCRIPTOR = _RPBSETBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbSetBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbResetBucketReq(_message.Message): + DESCRIPTOR = _RPBRESETBUCKETREQ + + # @@protoc_insertion_point(class_scope:RpbResetBucketReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketTypeReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETTYPEREQ + + # @@protoc_insertion_point(class_scope:RpbGetBucketTypeReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetBucketTypeReq(_message.Message): + DESCRIPTOR = _RPBSETBUCKETTYPEREQ + + # @@protoc_insertion_point(class_scope:RpbSetBucketTypeReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbModFun(_message.Message): + DESCRIPTOR = _RPBMODFUN + + # @@protoc_insertion_point(class_scope:RpbModFun) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCommitHook(_message.Message): + DESCRIPTOR = _RPBCOMMITHOOK + + # @@protoc_insertion_point(class_scope:RpbCommitHook) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbBucketProps(_message.Message): + DESCRIPTOR = _RPBBUCKETPROPS + + # @@protoc_insertion_point(class_scope:RpbBucketProps) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbAuthReq(_message.Message): + DESCRIPTOR = _RPBAUTHREQ + + # @@protoc_insertion_point(class_scope:RpbAuthReq) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\006RiakPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_search_pb2.py b/riak/riak_pb/riak_search_pb2.py new file mode 100644 index 00000000..1608f575 --- /dev/null +++ b/riak/riak_pb/riak_search_pb2.py @@ -0,0 +1,210 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_search.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + +import riak.riak_pb.riak_pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_search.proto', + package='', + serialized_pb='\n\x11riak_search.proto\x1a\nriak.proto\"(\n\x0cRpbSearchDoc\x12\x18\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x08.RpbPair\"\x9d\x01\n\x11RpbSearchQueryReq\x12\t\n\x01q\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12\x0c\n\x04rows\x18\x03 \x01(\r\x12\r\n\x05start\x18\x04 \x01(\r\x12\x0c\n\x04sort\x18\x05 \x01(\x0c\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\x0c\x12\n\n\x02\x64\x66\x18\x07 \x01(\x0c\x12\n\n\x02op\x18\x08 \x01(\x0c\x12\n\n\x02\x66l\x18\t \x03(\x0c\x12\x0f\n\x07presort\x18\n \x01(\x0c\"W\n\x12RpbSearchQueryResp\x12\x1b\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\r.RpbSearchDoc\x12\x11\n\tmax_score\x18\x02 \x01(\x02\x12\x11\n\tnum_found\x18\x03 \x01(\rB\'\n\x17\x63om.basho.riak.protobufB\x0cRiakSearchPB') + + + + +_RPBSEARCHDOC = _descriptor.Descriptor( + name='RpbSearchDoc', + full_name='RpbSearchDoc', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='fields', full_name='RpbSearchDoc.fields', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=33, + serialized_end=73, +) + + +_RPBSEARCHQUERYREQ = _descriptor.Descriptor( + name='RpbSearchQueryReq', + full_name='RpbSearchQueryReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='q', full_name='RpbSearchQueryReq.q', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='index', full_name='RpbSearchQueryReq.index', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows', full_name='RpbSearchQueryReq.rows', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='start', full_name='RpbSearchQueryReq.start', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sort', full_name='RpbSearchQueryReq.sort', index=4, + number=5, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='filter', full_name='RpbSearchQueryReq.filter', index=5, + number=6, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='df', full_name='RpbSearchQueryReq.df', index=6, + number=7, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='op', full_name='RpbSearchQueryReq.op', index=7, + number=8, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='fl', full_name='RpbSearchQueryReq.fl', index=8, + number=9, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='presort', full_name='RpbSearchQueryReq.presort', index=9, + number=10, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=76, + serialized_end=233, +) + + +_RPBSEARCHQUERYRESP = _descriptor.Descriptor( + name='RpbSearchQueryResp', + full_name='RpbSearchQueryResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='docs', full_name='RpbSearchQueryResp.docs', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='max_score', full_name='RpbSearchQueryResp.max_score', index=1, + number=2, type=2, cpp_type=6, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='num_found', full_name='RpbSearchQueryResp.num_found', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=235, + serialized_end=322, +) + +_RPBSEARCHDOC.fields_by_name['fields'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBSEARCHQUERYRESP.fields_by_name['docs'].message_type = _RPBSEARCHDOC +DESCRIPTOR.message_types_by_name['RpbSearchDoc'] = _RPBSEARCHDOC +DESCRIPTOR.message_types_by_name['RpbSearchQueryReq'] = _RPBSEARCHQUERYREQ +DESCRIPTOR.message_types_by_name['RpbSearchQueryResp'] = _RPBSEARCHQUERYRESP + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchDoc(_message.Message): + DESCRIPTOR = _RPBSEARCHDOC + + # @@protoc_insertion_point(class_scope:RpbSearchDoc) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchQueryReq(_message.Message): + DESCRIPTOR = _RPBSEARCHQUERYREQ + + # @@protoc_insertion_point(class_scope:RpbSearchQueryReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchQueryResp(_message.Message): + DESCRIPTOR = _RPBSEARCHQUERYRESP + + # @@protoc_insertion_point(class_scope:RpbSearchQueryResp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\014RiakSearchPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_yokozuna_pb2.py b/riak/riak_pb/riak_yokozuna_pb2.py new file mode 100644 index 00000000..1673f538 --- /dev/null +++ b/riak/riak_pb/riak_yokozuna_pb2.py @@ -0,0 +1,372 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_yokozuna.proto + +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_yokozuna.proto', + package='', + serialized_pb='\n\x13riak_yokozuna.proto\"?\n\x10RpbYokozunaIndex\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0e\n\x06schema\x18\x02 \x01(\x0c\x12\r\n\x05n_val\x18\x03 \x01(\r\"&\n\x16RpbYokozunaIndexGetReq\x12\x0c\n\x04name\x18\x01 \x01(\x0c\";\n\x17RpbYokozunaIndexGetResp\x12 \n\x05index\x18\x01 \x03(\x0b\x32\x11.RpbYokozunaIndex\"K\n\x16RpbYokozunaIndexPutReq\x12 \n\x05index\x18\x01 \x02(\x0b\x32\x11.RpbYokozunaIndex\x12\x0f\n\x07timeout\x18\x02 \x01(\r\")\n\x19RpbYokozunaIndexDeleteReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\"2\n\x11RpbYokozunaSchema\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"=\n\x17RpbYokozunaSchemaPutReq\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchema\"\'\n\x17RpbYokozunaSchemaGetReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\">\n\x18RpbYokozunaSchemaGetResp\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchemaB)\n\x17\x63om.basho.riak.protobufB\x0eRiakYokozunaPB') + + + + +_RPBYOKOZUNAINDEX = _descriptor.Descriptor( + name='RpbYokozunaIndex', + full_name='RpbYokozunaIndex', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaIndex.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='schema', full_name='RpbYokozunaIndex.schema', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='n_val', full_name='RpbYokozunaIndex.n_val', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=23, + serialized_end=86, +) + + +_RPBYOKOZUNAINDEXGETREQ = _descriptor.Descriptor( + name='RpbYokozunaIndexGetReq', + full_name='RpbYokozunaIndexGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaIndexGetReq.name', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=88, + serialized_end=126, +) + + +_RPBYOKOZUNAINDEXGETRESP = _descriptor.Descriptor( + name='RpbYokozunaIndexGetResp', + full_name='RpbYokozunaIndexGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='index', full_name='RpbYokozunaIndexGetResp.index', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=128, + serialized_end=187, +) + + +_RPBYOKOZUNAINDEXPUTREQ = _descriptor.Descriptor( + name='RpbYokozunaIndexPutReq', + full_name='RpbYokozunaIndexPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='index', full_name='RpbYokozunaIndexPutReq.index', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='RpbYokozunaIndexPutReq.timeout', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=189, + serialized_end=264, +) + + +_RPBYOKOZUNAINDEXDELETEREQ = _descriptor.Descriptor( + name='RpbYokozunaIndexDeleteReq', + full_name='RpbYokozunaIndexDeleteReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaIndexDeleteReq.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=266, + serialized_end=307, +) + + +_RPBYOKOZUNASCHEMA = _descriptor.Descriptor( + name='RpbYokozunaSchema', + full_name='RpbYokozunaSchema', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaSchema.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='content', full_name='RpbYokozunaSchema.content', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=309, + serialized_end=359, +) + + +_RPBYOKOZUNASCHEMAPUTREQ = _descriptor.Descriptor( + name='RpbYokozunaSchemaPutReq', + full_name='RpbYokozunaSchemaPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='schema', full_name='RpbYokozunaSchemaPutReq.schema', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=361, + serialized_end=422, +) + + +_RPBYOKOZUNASCHEMAGETREQ = _descriptor.Descriptor( + name='RpbYokozunaSchemaGetReq', + full_name='RpbYokozunaSchemaGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='RpbYokozunaSchemaGetReq.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=424, + serialized_end=463, +) + + +_RPBYOKOZUNASCHEMAGETRESP = _descriptor.Descriptor( + name='RpbYokozunaSchemaGetResp', + full_name='RpbYokozunaSchemaGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='schema', full_name='RpbYokozunaSchemaGetResp.schema', index=0, + number=1, type=11, cpp_type=10, label=2, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=465, + serialized_end=527, +) + +_RPBYOKOZUNAINDEXGETRESP.fields_by_name['index'].message_type = _RPBYOKOZUNAINDEX +_RPBYOKOZUNAINDEXPUTREQ.fields_by_name['index'].message_type = _RPBYOKOZUNAINDEX +_RPBYOKOZUNASCHEMAPUTREQ.fields_by_name['schema'].message_type = _RPBYOKOZUNASCHEMA +_RPBYOKOZUNASCHEMAGETRESP.fields_by_name['schema'].message_type = _RPBYOKOZUNASCHEMA +DESCRIPTOR.message_types_by_name['RpbYokozunaIndex'] = _RPBYOKOZUNAINDEX +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexGetReq'] = _RPBYOKOZUNAINDEXGETREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexGetResp'] = _RPBYOKOZUNAINDEXGETRESP +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexPutReq'] = _RPBYOKOZUNAINDEXPUTREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaIndexDeleteReq'] = _RPBYOKOZUNAINDEXDELETEREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaSchema'] = _RPBYOKOZUNASCHEMA +DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaPutReq'] = _RPBYOKOZUNASCHEMAPUTREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetReq'] = _RPBYOKOZUNASCHEMAGETREQ +DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetResp'] = _RPBYOKOZUNASCHEMAGETRESP + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndex(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEX + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndex) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexGetReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXGETREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexGetResp(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXGETRESP + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexPutReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXPUTREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexPutReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexDeleteReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXDELETEREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexDeleteReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchema(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMA + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchema) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaPutReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAPUTREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaPutReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaGetReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETREQ + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaGetResp(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETRESP + + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetResp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\016RiakYokozunaPB') +# @@protoc_insertion_point(module_scope) From 55dec060c149d7c940f0bb7b2357f179183d8a12 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 10:07:59 -0800 Subject: [PATCH 056/324] Working on Makefile --- Makefile | 72 ++++++++++++++++---------------------------------------- riak_pb | 2 +- 2 files changed, 21 insertions(+), 53 deletions(-) diff --git a/Makefile b/Makefile index 71c05631..5562a9f2 100644 --- a/Makefile +++ b/Makefile @@ -1,25 +1,21 @@ -.PHONY: all compile clean release -.PHONY: python_compile python_clean python_release python_install -.PHONY: python3_compile python3_clean python3_release python3_install +.PHONY: all pb_compile pb_clean release install +# TODO: git submodule -all: python_compile python3_compile +all: pb_compile -clean: python_clean python3_clean +clean: pb_clean -release: python_release python3_release +pb_compile: + echo "==> Python (compile)" + protoc -I riak_pb/src --python_out=riak/pb riak_pb/src/*.proto + python setup.py build_messages -# Python 2.x specific build steps -python_compile: - @echo "==> Python (compile)" - @protoc -I riak_pb/src --python_out=riak/pb riak_pb/src/*.proto - @python2 setup.py build_messages +pb_clean: + echo "==> Python (clean)" + rm -rf riak/pb/*.pyc riak/pb/*_pb2.py + rm -rf riak/pb/__pycache__ __pycache__ -python_clean: - @echo "==> Python (clean)" - @python2 setup.py clean_messages - @rm -rf riak/pb/*.pyc riak/pb/*_pb2.py riak/pb/*.pyc - -python_release: python_clean +release: pb_clean ifeq ($(RELEASE_GPG_KEYNAME),) @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" else @@ -27,53 +23,25 @@ else @protoc -Isrc --python_out=riak/pb src/*.proto @python2.7 setup.py build_messages build --build-base=riak @python2.7 setup.py build --build-base=python bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - @python2.7 setup.py clean --build-base=python clean_messages @rm -rf *.pyc riak_pb/*_pb2.py riak_pb/*.pyc riak_pb.egg-info python - @protoc -Isrc --python_out=riak/pb src/*.proto - @python2.7 setup.py build_messages build --build-base=riak - @python2.7 setup.py build --build-base=python sdist upload -s -i $(RELEASE_GPG_KEYNAME) - @python2.6 setup.py clean --build-base=python clean_messages - @rm -rf riak_pb/*_pb2.pyc *.pyc python_riak_pb.egg-info python - @protoc -Isrc --python_out=riak/pb src/*.proto - @python2.6 setup.py build_messages build --build-base=riak - @python2.6 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) -endif -python_install: python_compile - @echo "==> Python (install)" - @./setup.py build_messages build --build-base=riak install - -# Python 3.x specific build steps -python3_compile: - @echo "==> Python 3 (compile)" + @echo "==> Python 3.3 (release)" @protoc -Isrc --python_out=riak/pb src/*.proto - @python3 setup.py build_messages build --build-base=riak - -python3_clean: - @echo "==> Python 3 (clean)" - @python3 setup.py clean --build-base=riak clean_messages + @python3.3 setup.py build_messages build --build-base=riak + @python3.3 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 -python3_release: python3_clean -ifeq ($(RELEASE_GPG_KEYNAME),) - @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" -else - @echo "==> Python 3 (release)" @protoc -Isrc --python_out=riak/pb src/*.proto @python3.4 setup.py build_messages build --build-base=riak @python3.4 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - @python3.4 setup.py clean --build-base=riak clean_messages @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 + @protoc -Isrc --python_out=riak/pb src/*.proto @python3.4 setup.py build_messages build --build-base=riak @python3.4 setup.py build --build-base=riak sdist upload -s -i $(RELEASE_GPG_KEYNAME) - @python3.4 setup.py clean --build-base=riak clean_messages @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 - @protoc -Isrc --python_out=riak/pb src/*.proto - @python3.3 setup.py build_messages build --build-base=riak - @python3.3 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) endif -python3_install: python3_compile - @echo "==> Python 3 (install)" - @python3 setup.py build_messages build --build-base=riak install +install: pb_compile + @echo "==> Python (install)" + @python setup.py build_messages build --build-base=riak install diff --git a/riak_pb b/riak_pb index f4f30571..7fffa81b 160000 --- a/riak_pb +++ b/riak_pb @@ -1 +1 @@ -Subproject commit f4f30571ee14e3456416d0048f2b7c4d9fd84c59 +Subproject commit 7fffa81b38804c18fffbec8d1677966c37d49d55 From 0946771c8eca6c8bbed5a2bee63f61d3f5c1f1ce Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 10:10:33 -0800 Subject: [PATCH 057/324] Use pb subdir instead of riak_pb --- riak/{riak_pb => pb}/__init__.py | 0 riak/pb/messages.py | 167 ++++++ riak/{riak_pb => pb}/riak_dt_pb2.py | 0 riak/{riak_pb => pb}/riak_kv_pb2.py | 294 +++++++-- riak/{riak_pb => pb}/riak_pb2.py | 72 ++- riak/{riak_pb => pb}/riak_search_pb2.py | 4 +- riak/pb/riak_ts_pb2.py | 688 ++++++++++++++++++++++ riak/{riak_pb => pb}/riak_yokozuna_pb2.py | 0 riak/riak_pb/messages.py | 152 ----- 9 files changed, 1188 insertions(+), 189 deletions(-) rename riak/{riak_pb => pb}/__init__.py (100%) create mode 100644 riak/pb/messages.py rename riak/{riak_pb => pb}/riak_dt_pb2.py (100%) rename riak/{riak_pb => pb}/riak_kv_pb2.py (84%) rename riak/{riak_pb => pb}/riak_pb2.py (92%) rename riak/{riak_pb => pb}/riak_search_pb2.py (98%) create mode 100644 riak/pb/riak_ts_pb2.py rename riak/{riak_pb => pb}/riak_yokozuna_pb2.py (100%) delete mode 100644 riak/riak_pb/messages.py diff --git a/riak/riak_pb/__init__.py b/riak/pb/__init__.py similarity index 100% rename from riak/riak_pb/__init__.py rename to riak/pb/__init__.py diff --git a/riak/pb/messages.py b/riak/pb/messages.py new file mode 100644 index 00000000..0fea1f49 --- /dev/null +++ b/riak/pb/messages.py @@ -0,0 +1,167 @@ +# This is a generated file. DO NOT EDIT. + +""" +Constants and mappings between Riak protocol codes and messages. +""" + +import riak.pb.riak_dt_pb2 +import riak.pb.riak_kv_pb2 +import riak.pb.riak_pb2 +import riak.pb.riak_search_pb2 +import riak.pb.riak_ts_pb2 +import riak.pb.riak_yokozuna_pb2 + +# Protocol codes +MSG_CODE_ERROR_RESP = 0 +MSG_CODE_PING_REQ = 1 +MSG_CODE_PING_RESP = 2 +MSG_CODE_GET_CLIENT_ID_REQ = 3 +MSG_CODE_GET_CLIENT_ID_RESP = 4 +MSG_CODE_SET_CLIENT_ID_REQ = 5 +MSG_CODE_SET_CLIENT_ID_RESP = 6 +MSG_CODE_GET_SERVER_INFO_REQ = 7 +MSG_CODE_GET_SERVER_INFO_RESP = 8 +MSG_CODE_GET_REQ = 9 +MSG_CODE_GET_RESP = 10 +MSG_CODE_PUT_REQ = 11 +MSG_CODE_PUT_RESP = 12 +MSG_CODE_DEL_REQ = 13 +MSG_CODE_DEL_RESP = 14 +MSG_CODE_LIST_BUCKETS_REQ = 15 +MSG_CODE_LIST_BUCKETS_RESP = 16 +MSG_CODE_LIST_KEYS_REQ = 17 +MSG_CODE_LIST_KEYS_RESP = 18 +MSG_CODE_GET_BUCKET_REQ = 19 +MSG_CODE_GET_BUCKET_RESP = 20 +MSG_CODE_SET_BUCKET_REQ = 21 +MSG_CODE_SET_BUCKET_RESP = 22 +MSG_CODE_MAP_RED_REQ = 23 +MSG_CODE_MAP_RED_RESP = 24 +MSG_CODE_INDEX_REQ = 25 +MSG_CODE_INDEX_RESP = 26 +MSG_CODE_SEARCH_QUERY_REQ = 27 +MSG_CODE_SEARCH_QUERY_RESP = 28 +MSG_CODE_RESET_BUCKET_REQ = 29 +MSG_CODE_RESET_BUCKET_RESP = 30 +MSG_CODE_GET_BUCKET_TYPE_REQ = 31 +MSG_CODE_SET_BUCKET_TYPE_REQ = 32 +MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ = 33 +MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP = 34 +MSG_CODE_CS_BUCKET_REQ = 40 +MSG_CODE_CS_BUCKET_RESP = 41 +MSG_CODE_INDEX_BODY_RESP = 42 +MSG_CODE_COUNTER_UPDATE_REQ = 50 +MSG_CODE_COUNTER_UPDATE_RESP = 51 +MSG_CODE_COUNTER_GET_REQ = 52 +MSG_CODE_COUNTER_GET_RESP = 53 +MSG_CODE_YOKOZUNA_INDEX_GET_REQ = 54 +MSG_CODE_YOKOZUNA_INDEX_GET_RESP = 55 +MSG_CODE_YOKOZUNA_INDEX_PUT_REQ = 56 +MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ = 57 +MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ = 58 +MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP = 59 +MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ = 60 +MSG_CODE_COVERAGE_REQ = 70 +MSG_CODE_COVERAGE_RESP = 71 +MSG_CODE_DT_FETCH_REQ = 80 +MSG_CODE_DT_FETCH_RESP = 81 +MSG_CODE_DT_UPDATE_REQ = 82 +MSG_CODE_DT_UPDATE_RESP = 83 +MSG_CODE_TS_QUERY_REQ = 90 +MSG_CODE_TS_QUERY_RESP = 91 +MSG_CODE_TS_PUT_REQ = 92 +MSG_CODE_TS_PUT_RESP = 93 +MSG_CODE_TS_DEL_REQ = 94 +MSG_CODE_TS_DEL_RESP = 95 +MSG_CODE_TS_GET_REQ = 96 +MSG_CODE_TS_GET_RESP = 97 +MSG_CODE_TS_LIST_KEYS_REQ = 98 +MSG_CODE_TS_LIST_KEYS_RESP = 99 +MSG_CODE_TOGGLE_ENCODING_REQ = 110 +MSG_CODE_TOGGLE_ENCODING_RESP = 111 +MSG_CODE_AUTH_REQ = 253 +MSG_CODE_AUTH_RESP = 254 +MSG_CODE_START_TLS = 255 + +# Mapping from code to protobuf class +MESSAGE_CLASSES = { + MSG_CODE_ERROR_RESP: riak.pb.riak_pb2.RpbErrorResp, + MSG_CODE_PING_REQ: None, + MSG_CODE_PING_RESP: None, + MSG_CODE_GET_CLIENT_ID_REQ: None, + MSG_CODE_GET_CLIENT_ID_RESP: riak.pb.riak_kv_pb2.RpbGetClientIdResp, + MSG_CODE_SET_CLIENT_ID_REQ: riak.pb.riak_kv_pb2.RpbSetClientIdReq, + MSG_CODE_SET_CLIENT_ID_RESP: None, + MSG_CODE_GET_SERVER_INFO_REQ: None, + MSG_CODE_GET_SERVER_INFO_RESP: riak.pb.riak_pb2.RpbGetServerInfoResp, + MSG_CODE_GET_REQ: riak.pb.riak_kv_pb2.RpbGetReq, + MSG_CODE_GET_RESP: riak.pb.riak_kv_pb2.RpbGetResp, + MSG_CODE_PUT_REQ: riak.pb.riak_kv_pb2.RpbPutReq, + MSG_CODE_PUT_RESP: riak.pb.riak_kv_pb2.RpbPutResp, + MSG_CODE_DEL_REQ: riak.pb.riak_kv_pb2.RpbDelReq, + MSG_CODE_DEL_RESP: None, + MSG_CODE_LIST_BUCKETS_REQ: riak.pb.riak_kv_pb2.RpbListBucketsReq, + MSG_CODE_LIST_BUCKETS_RESP: riak.pb.riak_kv_pb2.RpbListBucketsResp, + MSG_CODE_LIST_KEYS_REQ: riak.pb.riak_kv_pb2.RpbListKeysReq, + MSG_CODE_LIST_KEYS_RESP: riak.pb.riak_kv_pb2.RpbListKeysResp, + MSG_CODE_GET_BUCKET_REQ: riak.pb.riak_pb2.RpbGetBucketReq, + MSG_CODE_GET_BUCKET_RESP: riak.pb.riak_pb2.RpbGetBucketResp, + MSG_CODE_SET_BUCKET_REQ: riak.pb.riak_pb2.RpbSetBucketReq, + MSG_CODE_SET_BUCKET_RESP: None, + MSG_CODE_MAP_RED_REQ: riak.pb.riak_kv_pb2.RpbMapRedReq, + MSG_CODE_MAP_RED_RESP: riak.pb.riak_kv_pb2.RpbMapRedResp, + MSG_CODE_INDEX_REQ: riak.pb.riak_kv_pb2.RpbIndexReq, + MSG_CODE_INDEX_RESP: riak.pb.riak_kv_pb2.RpbIndexResp, + MSG_CODE_SEARCH_QUERY_REQ: riak.pb.riak_search_pb2.RpbSearchQueryReq, + MSG_CODE_SEARCH_QUERY_RESP: riak.pb.riak_search_pb2.RpbSearchQueryResp, + MSG_CODE_RESET_BUCKET_REQ: riak.pb.riak_pb2.RpbResetBucketReq, + MSG_CODE_RESET_BUCKET_RESP: None, + MSG_CODE_GET_BUCKET_TYPE_REQ: riak.pb.riak_pb2.RpbGetBucketTypeReq, + MSG_CODE_SET_BUCKET_TYPE_REQ: riak.pb.riak_pb2.RpbSetBucketTypeReq, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ: + riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq, + MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP: + riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistResp, + MSG_CODE_CS_BUCKET_REQ: riak.pb.riak_kv_pb2.RpbCSBucketReq, + MSG_CODE_CS_BUCKET_RESP: riak.pb.riak_kv_pb2.RpbCSBucketResp, + MSG_CODE_INDEX_BODY_RESP: riak.pb.riak_kv_pb2.RpbIndexBodyResp, + MSG_CODE_COUNTER_UPDATE_REQ: riak.pb.riak_kv_pb2.RpbCounterUpdateReq, + MSG_CODE_COUNTER_UPDATE_RESP: riak.pb.riak_kv_pb2.RpbCounterUpdateResp, + MSG_CODE_COUNTER_GET_REQ: riak.pb.riak_kv_pb2.RpbCounterGetReq, + MSG_CODE_COUNTER_GET_RESP: riak.pb.riak_kv_pb2.RpbCounterGetResp, + MSG_CODE_YOKOZUNA_INDEX_GET_REQ: + riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq, + MSG_CODE_YOKOZUNA_INDEX_GET_RESP: + riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetResp, + MSG_CODE_YOKOZUNA_INDEX_PUT_REQ: + riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq, + MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ: + riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq, + MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ: + riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq, + MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP: + riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetResp, + MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ: + riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq, + MSG_CODE_COVERAGE_REQ: riak.pb.riak_kv_pb2.RpbCoverageReq, + MSG_CODE_COVERAGE_RESP: riak.pb.riak_kv_pb2.RpbCoverageResp, + MSG_CODE_DT_FETCH_REQ: riak.pb.riak_dt_pb2.DtFetchReq, + MSG_CODE_DT_FETCH_RESP: riak.pb.riak_dt_pb2.DtFetchResp, + MSG_CODE_DT_UPDATE_REQ: riak.pb.riak_dt_pb2.DtUpdateReq, + MSG_CODE_DT_UPDATE_RESP: riak.pb.riak_dt_pb2.DtUpdateResp, + MSG_CODE_TS_QUERY_REQ: riak.pb.riak_ts_pb2.TsQueryReq, + MSG_CODE_TS_QUERY_RESP: riak.pb.riak_ts_pb2.TsQueryResp, + MSG_CODE_TS_PUT_REQ: riak.pb.riak_ts_pb2.TsPutReq, + MSG_CODE_TS_PUT_RESP: riak.pb.riak_ts_pb2.TsPutResp, + MSG_CODE_TS_DEL_REQ: riak.pb.riak_ts_pb2.TsDelReq, + MSG_CODE_TS_DEL_RESP: riak.pb.riak_ts_pb2.TsDelResp, + MSG_CODE_TS_GET_REQ: riak.pb.riak_ts_pb2.TsGetReq, + MSG_CODE_TS_GET_RESP: riak.pb.riak_ts_pb2.TsGetResp, + MSG_CODE_TS_LIST_KEYS_REQ: riak.pb.riak_ts_pb2.TsListKeysReq, + MSG_CODE_TS_LIST_KEYS_RESP: riak.pb.riak_ts_pb2.TsListKeysResp, + MSG_CODE_TOGGLE_ENCODING_REQ: riak.pb.riak_pb2.RpbToggleEncodingReq, + MSG_CODE_TOGGLE_ENCODING_RESP: riak.pb.riak_pb2.RpbToggleEncodingResp, + MSG_CODE_AUTH_REQ: riak.pb.riak_pb2.RpbAuthReq, + MSG_CODE_AUTH_RESP: None, + MSG_CODE_START_TLS: None +} diff --git a/riak/riak_pb/riak_dt_pb2.py b/riak/pb/riak_dt_pb2.py similarity index 100% rename from riak/riak_pb/riak_dt_pb2.py rename to riak/pb/riak_dt_pb2.py diff --git a/riak/riak_pb/riak_kv_pb2.py b/riak/pb/riak_kv_pb2.py similarity index 84% rename from riak/riak_pb/riak_kv_pb2.py rename to riak/pb/riak_kv_pb2.py index c8411e06..09797052 100644 --- a/riak/riak_pb/riak_kv_pb2.py +++ b/riak/pb/riak_kv_pb2.py @@ -9,13 +9,13 @@ # @@protoc_insertion_point(imports) -import riak.riak_pb.riak_pb2 +import riak.pb.riak_pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='riak_kv.proto', package='', - serialized_pb='\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xcd\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"\xc1\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') + serialized_pb='\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xf9\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\x12\x15\n\rcover_context\x18\x0f \x01(\x0c\x12\x13\n\x0breturn_body\x18\x10 \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"X\n\x10RpbIndexBodyResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xd8\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\x12\x15\n\rcover_context\x18\n \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\"x\n\x0eRpbCoverageReq\x12\x0c\n\x04type\x18\x01 \x01(\x0c\x12\x0e\n\x06\x62ucket\x18\x02 \x02(\x0c\x12\x16\n\x0emin_partitions\x18\x03 \x01(\r\x12\x15\n\rreplace_cover\x18\x04 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x05 \x03(\x0c\"5\n\x0fRpbCoverageResp\x12\"\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x11.RpbCoverageEntry\"Z\n\x10RpbCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rkeyspace_desc\x18\x03 \x01(\x0c\x12\x15\n\rcover_context\x18\x04 \x02(\x0c\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') @@ -36,8 +36,8 @@ ], containing_type=None, options=None, - serialized_start=1644, - serialized_end=1679, + serialized_start=1688, + serialized_end=1723, ) @@ -874,6 +874,20 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='cover_context', full_name='RpbIndexReq.cover_context', index=14, + number=15, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='return_body', full_name='RpbIndexReq.return_body', index=15, + number=16, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -885,7 +899,7 @@ is_extendable=False, extension_ranges=[], serialized_start=1346, - serialized_end=1679, + serialized_end=1723, ) @@ -933,8 +947,50 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1681, - serialized_end=1772, + serialized_start=1725, + serialized_end=1816, +) + + +_RPBINDEXBODYRESP = _descriptor.Descriptor( + name='RpbIndexBodyResp', + full_name='RpbIndexBodyResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='objects', full_name='RpbIndexBodyResp.objects', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='continuation', full_name='RpbIndexBodyResp.continuation', index=1, + number=2, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='RpbIndexBodyResp.done', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1818, + serialized_end=1906, ) @@ -1008,6 +1064,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='cover_context', full_name='RpbCSBucketReq.cover_context', index=9, + number=10, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -1017,8 +1080,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1775, - serialized_end=1968, + serialized_start=1909, + serialized_end=2125, ) @@ -1059,8 +1122,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1970, - serialized_end=2057, + serialized_start=2127, + serialized_end=2214, ) @@ -1094,8 +1157,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2059, - serialized_end=2117, + serialized_start=2216, + serialized_end=2274, ) @@ -1192,8 +1255,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2120, - serialized_end=2365, + serialized_start=2277, + serialized_end=2522, ) @@ -1234,8 +1297,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2367, - serialized_end=2418, + serialized_start=2524, + serialized_end=2575, ) @@ -1304,8 +1367,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2420, - serialized_end=2542, + serialized_start=2577, + serialized_end=2699, ) @@ -1332,8 +1395,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2544, - serialized_end=2581, + serialized_start=2701, + serialized_end=2738, ) @@ -1395,8 +1458,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2583, - serialized_end=2696, + serialized_start=2740, + serialized_end=2853, ) @@ -1423,8 +1486,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2698, - serialized_end=2732, + serialized_start=2855, + serialized_end=2889, ) @@ -1465,8 +1528,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2734, - serialized_end=2805, + serialized_start=2891, + serialized_end=2962, ) @@ -1493,8 +1556,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2807, - serialized_end=2881, + serialized_start=2964, + serialized_end=3038, ) @@ -1535,8 +1598,141 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=2883, - serialized_end=2959, + serialized_start=3040, + serialized_end=3116, +) + + +_RPBCOVERAGEREQ = _descriptor.Descriptor( + name='RpbCoverageReq', + full_name='RpbCoverageReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='type', full_name='RpbCoverageReq.type', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='bucket', full_name='RpbCoverageReq.bucket', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='min_partitions', full_name='RpbCoverageReq.min_partitions', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='replace_cover', full_name='RpbCoverageReq.replace_cover', index=3, + number=4, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unavailable_cover', full_name='RpbCoverageReq.unavailable_cover', index=4, + number=5, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=3118, + serialized_end=3238, +) + + +_RPBCOVERAGERESP = _descriptor.Descriptor( + name='RpbCoverageResp', + full_name='RpbCoverageResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entries', full_name='RpbCoverageResp.entries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=3240, + serialized_end=3293, +) + + +_RPBCOVERAGEENTRY = _descriptor.Descriptor( + name='RpbCoverageEntry', + full_name='RpbCoverageEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ip', full_name='RpbCoverageEntry.ip', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='port', full_name='RpbCoverageEntry.port', index=1, + number=2, type=13, cpp_type=3, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='keyspace_desc', full_name='RpbCoverageEntry.keyspace_desc', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cover_context', full_name='RpbCoverageEntry.cover_context', index=3, + number=4, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=3295, + serialized_end=3385, ) _RPBGETRESP.fields_by_name['content'].message_type = _RPBCONTENT @@ -1544,13 +1740,15 @@ _RPBPUTRESP.fields_by_name['content'].message_type = _RPBCONTENT _RPBINDEXREQ.fields_by_name['qtype'].enum_type = _RPBINDEXREQ_INDEXQUERYTYPE _RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ; -_RPBINDEXRESP.fields_by_name['results'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBINDEXRESP.fields_by_name['results'].message_type = riak.pb.riak_pb2._RPBPAIR +_RPBINDEXBODYRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT _RPBCSBUCKETRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT _RPBINDEXOBJECT.fields_by_name['object'].message_type = _RPBGETRESP _RPBCONTENT.fields_by_name['links'].message_type = _RPBLINK -_RPBCONTENT.fields_by_name['usermeta'].message_type = riak.riak_pb.riak_pb2._RPBPAIR -_RPBCONTENT.fields_by_name['indexes'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBCONTENT.fields_by_name['usermeta'].message_type = riak.pb.riak_pb2._RPBPAIR +_RPBCONTENT.fields_by_name['indexes'].message_type = riak.pb.riak_pb2._RPBPAIR _RPBGETBUCKETKEYPREFLISTRESP.fields_by_name['preflist'].message_type = _RPBBUCKETKEYPREFLISTITEM +_RPBCOVERAGERESP.fields_by_name['entries'].message_type = _RPBCOVERAGEENTRY DESCRIPTOR.message_types_by_name['RpbGetClientIdResp'] = _RPBGETCLIENTIDRESP DESCRIPTOR.message_types_by_name['RpbSetClientIdReq'] = _RPBSETCLIENTIDREQ DESCRIPTOR.message_types_by_name['RpbGetReq'] = _RPBGETREQ @@ -1566,6 +1764,7 @@ DESCRIPTOR.message_types_by_name['RpbMapRedResp'] = _RPBMAPREDRESP DESCRIPTOR.message_types_by_name['RpbIndexReq'] = _RPBINDEXREQ DESCRIPTOR.message_types_by_name['RpbIndexResp'] = _RPBINDEXRESP +DESCRIPTOR.message_types_by_name['RpbIndexBodyResp'] = _RPBINDEXBODYRESP DESCRIPTOR.message_types_by_name['RpbCSBucketReq'] = _RPBCSBUCKETREQ DESCRIPTOR.message_types_by_name['RpbCSBucketResp'] = _RPBCSBUCKETRESP DESCRIPTOR.message_types_by_name['RpbIndexObject'] = _RPBINDEXOBJECT @@ -1578,6 +1777,9 @@ DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistReq'] = _RPBGETBUCKETKEYPREFLISTREQ DESCRIPTOR.message_types_by_name['RpbGetBucketKeyPreflistResp'] = _RPBGETBUCKETKEYPREFLISTRESP DESCRIPTOR.message_types_by_name['RpbBucketKeyPreflistItem'] = _RPBBUCKETKEYPREFLISTITEM +DESCRIPTOR.message_types_by_name['RpbCoverageReq'] = _RPBCOVERAGEREQ +DESCRIPTOR.message_types_by_name['RpbCoverageResp'] = _RPBCOVERAGERESP +DESCRIPTOR.message_types_by_name['RpbCoverageEntry'] = _RPBCOVERAGEENTRY @add_metaclass(_reflection.GeneratedProtocolMessageType) class RpbGetClientIdResp(_message.Message): @@ -1669,6 +1871,12 @@ class RpbIndexResp(_message.Message): # @@protoc_insertion_point(class_scope:RpbIndexResp) +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexBodyResp(_message.Message): + DESCRIPTOR = _RPBINDEXBODYRESP + + # @@protoc_insertion_point(class_scope:RpbIndexBodyResp) + @add_metaclass(_reflection.GeneratedProtocolMessageType) class RpbCSBucketReq(_message.Message): DESCRIPTOR = _RPBCSBUCKETREQ @@ -1741,6 +1949,24 @@ class RpbBucketKeyPreflistItem(_message.Message): # @@protoc_insertion_point(class_scope:RpbBucketKeyPreflistItem) +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCoverageReq(_message.Message): + DESCRIPTOR = _RPBCOVERAGEREQ + + # @@protoc_insertion_point(class_scope:RpbCoverageReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCoverageResp(_message.Message): + DESCRIPTOR = _RPBCOVERAGERESP + + # @@protoc_insertion_point(class_scope:RpbCoverageResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCoverageEntry(_message.Message): + DESCRIPTOR = _RPBCOVERAGEENTRY + + # @@protoc_insertion_point(class_scope:RpbCoverageEntry) + DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakKvPB') diff --git a/riak/riak_pb/riak_pb2.py b/riak/pb/riak_pb2.py similarity index 92% rename from riak/riak_pb/riak_pb2.py rename to riak/pb/riak_pb2.py index a757940a..d55a142c 100644 --- a/riak/riak_pb/riak_pb2.py +++ b/riak/pb/riak_pb2.py @@ -14,7 +14,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak.proto', package='', - serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') + serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\"*\n\x14RpbToggleEncodingReq\x12\x12\n\nuse_native\x18\x01 \x02(\x08\"+\n\x15RpbToggleEncodingResp\x12\x12\n\nuse_native\x18\x01 \x02(\x08\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') @@ -678,6 +678,62 @@ serialized_end=1344, ) + +_RPBTOGGLEENCODINGREQ = _descriptor.Descriptor( + name='RpbToggleEncodingReq', + full_name='RpbToggleEncodingReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='use_native', full_name='RpbToggleEncodingReq.use_native', index=0, + number=1, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1346, + serialized_end=1388, +) + + +_RPBTOGGLEENCODINGRESP = _descriptor.Descriptor( + name='RpbToggleEncodingResp', + full_name='RpbToggleEncodingResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='use_native', full_name='RpbToggleEncodingResp.use_native', index=0, + number=1, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1390, + serialized_end=1433, +) + _RPBGETBUCKETRESP.fields_by_name['props'].message_type = _RPBBUCKETPROPS _RPBSETBUCKETREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS _RPBSETBUCKETTYPEREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS @@ -701,6 +757,8 @@ DESCRIPTOR.message_types_by_name['RpbCommitHook'] = _RPBCOMMITHOOK DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ +DESCRIPTOR.message_types_by_name['RpbToggleEncodingReq'] = _RPBTOGGLEENCODINGREQ +DESCRIPTOR.message_types_by_name['RpbToggleEncodingResp'] = _RPBTOGGLEENCODINGRESP @add_metaclass(_reflection.GeneratedProtocolMessageType) class RpbErrorResp(_message.Message): @@ -780,6 +838,18 @@ class RpbAuthReq(_message.Message): # @@protoc_insertion_point(class_scope:RpbAuthReq) +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbToggleEncodingReq(_message.Message): + DESCRIPTOR = _RPBTOGGLEENCODINGREQ + + # @@protoc_insertion_point(class_scope:RpbToggleEncodingReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbToggleEncodingResp(_message.Message): + DESCRIPTOR = _RPBTOGGLEENCODINGRESP + + # @@protoc_insertion_point(class_scope:RpbToggleEncodingResp) + DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\006RiakPB') diff --git a/riak/riak_pb/riak_search_pb2.py b/riak/pb/riak_search_pb2.py similarity index 98% rename from riak/riak_pb/riak_search_pb2.py rename to riak/pb/riak_search_pb2.py index 1608f575..788b7cda 100644 --- a/riak/riak_pb/riak_search_pb2.py +++ b/riak/pb/riak_search_pb2.py @@ -9,7 +9,7 @@ # @@protoc_insertion_point(imports) -import riak.riak_pb.riak_pb2 +import riak.pb.riak_pb2 DESCRIPTOR = _descriptor.FileDescriptor( @@ -180,7 +180,7 @@ serialized_end=322, ) -_RPBSEARCHDOC.fields_by_name['fields'].message_type = riak.riak_pb.riak_pb2._RPBPAIR +_RPBSEARCHDOC.fields_by_name['fields'].message_type = riak.pb.riak_pb2._RPBPAIR _RPBSEARCHQUERYRESP.fields_by_name['docs'].message_type = _RPBSEARCHDOC DESCRIPTOR.message_types_by_name['RpbSearchDoc'] = _RPBSEARCHDOC DESCRIPTOR.message_types_by_name['RpbSearchQueryReq'] = _RPBSEARCHQUERYREQ diff --git a/riak/pb/riak_ts_pb2.py b/riak/pb/riak_ts_pb2.py new file mode 100644 index 00000000..b371fdea --- /dev/null +++ b/riak/pb/riak_ts_pb2.py @@ -0,0 +1,688 @@ +from six import * +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: riak_ts.proto + +from google.protobuf.internal import enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import descriptor_pb2 +# @@protoc_insertion_point(imports) + + +import riak.pb.riak_pb2 + + +DESCRIPTOR = _descriptor.FileDescriptor( + name='riak_ts.proto', + package='', + serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"D\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') + +_TSCOLUMNTYPE = _descriptor.EnumDescriptor( + name='TsColumnType', + full_name='TsColumnType', + filename=None, + file=DESCRIPTOR, + values=[ + _descriptor.EnumValueDescriptor( + name='VARCHAR', index=0, number=0, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='SINT64', index=1, number=1, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='DOUBLE', index=2, number=2, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='TIMESTAMP', index=3, number=3, + options=None, + type=None), + _descriptor.EnumValueDescriptor( + name='BOOLEAN', index=4, number=4, + options=None, + type=None), + ], + containing_type=None, + options=None, + serialized_start=925, + serialized_end=1004, +) + +TsColumnType = enum_type_wrapper.EnumTypeWrapper(_TSCOLUMNTYPE) +VARCHAR = 0 +SINT64 = 1 +DOUBLE = 2 +TIMESTAMP = 3 +BOOLEAN = 4 + + + +_TSQUERYREQ = _descriptor.Descriptor( + name='TsQueryReq', + full_name='TsQueryReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='query', full_name='TsQueryReq.query', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='stream', full_name='TsQueryReq.stream', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=29, + serialized_end=97, +) + + +_TSQUERYRESP = _descriptor.Descriptor( + name='TsQueryResp', + full_name='TsQueryResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='columns', full_name='TsQueryResp.columns', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows', full_name='TsQueryResp.rows', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='TsQueryResp.done', index=2, + number=3, type=8, cpp_type=7, label=1, + has_default_value=True, default_value=True, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=99, + serialized_end=193, +) + + +_TSGETREQ = _descriptor.Descriptor( + name='TsGetReq', + full_name='TsGetReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table', full_name='TsGetReq.table', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='TsGetReq.key', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='TsGetReq.timeout', index=2, + number=3, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=195, + serialized_end=259, +) + + +_TSGETRESP = _descriptor.Descriptor( + name='TsGetResp', + full_name='TsGetResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='columns', full_name='TsGetResp.columns', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows', full_name='TsGetResp.rows', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=261, + serialized_end=333, +) + + +_TSPUTREQ = _descriptor.Descriptor( + name='TsPutReq', + full_name='TsPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table', full_name='TsPutReq.table', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='columns', full_name='TsPutReq.columns', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows', full_name='TsPutReq.rows', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=335, + serialized_end=421, +) + + +_TSPUTRESP = _descriptor.Descriptor( + name='TsPutResp', + full_name='TsPutResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=423, + serialized_end=434, +) + + +_TSDELREQ = _descriptor.Descriptor( + name='TsDelReq', + full_name='TsDelReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table', full_name='TsDelReq.table', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='key', full_name='TsDelReq.key', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='vclock', full_name='TsDelReq.vclock', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='TsDelReq.timeout', index=3, + number=4, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=436, + serialized_end=516, +) + + +_TSDELRESP = _descriptor.Descriptor( + name='TsDelResp', + full_name='TsDelResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=518, + serialized_end=529, +) + + +_TSINTERPOLATION = _descriptor.Descriptor( + name='TsInterpolation', + full_name='TsInterpolation', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='base', full_name='TsInterpolation.base', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='interpolations', full_name='TsInterpolation.interpolations', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=531, + serialized_end=596, +) + + +_TSCOLUMNDESCRIPTION = _descriptor.Descriptor( + name='TsColumnDescription', + full_name='TsColumnDescription', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='name', full_name='TsColumnDescription.name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='type', full_name='TsColumnDescription.type', index=1, + number=2, type=14, cpp_type=8, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=598, + serialized_end=662, +) + + +_TSROW = _descriptor.Descriptor( + name='TsRow', + full_name='TsRow', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='cells', full_name='TsRow.cells', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=664, + serialized_end=695, +) + + +_TSCELL = _descriptor.Descriptor( + name='TsCell', + full_name='TsCell', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='varchar_value', full_name='TsCell.varchar_value', index=0, + number=1, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='sint64_value', full_name='TsCell.sint64_value', index=1, + number=2, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timestamp_value', full_name='TsCell.timestamp_value', index=2, + number=3, type=18, cpp_type=2, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='boolean_value', full_name='TsCell.boolean_value', index=3, + number=4, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='double_value', full_name='TsCell.double_value', index=4, + number=5, type=1, cpp_type=5, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=697, + serialized_end=820, +) + + +_TSLISTKEYSREQ = _descriptor.Descriptor( + name='TsListKeysReq', + full_name='TsListKeysReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table', full_name='TsListKeysReq.table', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='timeout', full_name='TsListKeysReq.timeout', index=1, + number=2, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=822, + serialized_end=869, +) + + +_TSLISTKEYSRESP = _descriptor.Descriptor( + name='TsListKeysResp', + full_name='TsListKeysResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='keys', full_name='TsListKeysResp.keys', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='done', full_name='TsListKeysResp.done', index=1, + number=2, type=8, cpp_type=7, label=1, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=871, + serialized_end=923, +) + +_TSQUERYREQ.fields_by_name['query'].message_type = _TSINTERPOLATION +_TSQUERYRESP.fields_by_name['columns'].message_type = _TSCOLUMNDESCRIPTION +_TSQUERYRESP.fields_by_name['rows'].message_type = _TSROW +_TSGETREQ.fields_by_name['key'].message_type = _TSCELL +_TSGETRESP.fields_by_name['columns'].message_type = _TSCOLUMNDESCRIPTION +_TSGETRESP.fields_by_name['rows'].message_type = _TSROW +_TSPUTREQ.fields_by_name['columns'].message_type = _TSCOLUMNDESCRIPTION +_TSPUTREQ.fields_by_name['rows'].message_type = _TSROW +_TSDELREQ.fields_by_name['key'].message_type = _TSCELL +_TSINTERPOLATION.fields_by_name['interpolations'].message_type = riak.pb.riak_pb2._RPBPAIR +_TSCOLUMNDESCRIPTION.fields_by_name['type'].enum_type = _TSCOLUMNTYPE +_TSROW.fields_by_name['cells'].message_type = _TSCELL +_TSLISTKEYSRESP.fields_by_name['keys'].message_type = _TSROW +DESCRIPTOR.message_types_by_name['TsQueryReq'] = _TSQUERYREQ +DESCRIPTOR.message_types_by_name['TsQueryResp'] = _TSQUERYRESP +DESCRIPTOR.message_types_by_name['TsGetReq'] = _TSGETREQ +DESCRIPTOR.message_types_by_name['TsGetResp'] = _TSGETRESP +DESCRIPTOR.message_types_by_name['TsPutReq'] = _TSPUTREQ +DESCRIPTOR.message_types_by_name['TsPutResp'] = _TSPUTRESP +DESCRIPTOR.message_types_by_name['TsDelReq'] = _TSDELREQ +DESCRIPTOR.message_types_by_name['TsDelResp'] = _TSDELRESP +DESCRIPTOR.message_types_by_name['TsInterpolation'] = _TSINTERPOLATION +DESCRIPTOR.message_types_by_name['TsColumnDescription'] = _TSCOLUMNDESCRIPTION +DESCRIPTOR.message_types_by_name['TsRow'] = _TSROW +DESCRIPTOR.message_types_by_name['TsCell'] = _TSCELL +DESCRIPTOR.message_types_by_name['TsListKeysReq'] = _TSLISTKEYSREQ +DESCRIPTOR.message_types_by_name['TsListKeysResp'] = _TSLISTKEYSRESP + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsQueryReq(_message.Message): + DESCRIPTOR = _TSQUERYREQ + + # @@protoc_insertion_point(class_scope:TsQueryReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsQueryResp(_message.Message): + DESCRIPTOR = _TSQUERYRESP + + # @@protoc_insertion_point(class_scope:TsQueryResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsGetReq(_message.Message): + DESCRIPTOR = _TSGETREQ + + # @@protoc_insertion_point(class_scope:TsGetReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsGetResp(_message.Message): + DESCRIPTOR = _TSGETRESP + + # @@protoc_insertion_point(class_scope:TsGetResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsPutReq(_message.Message): + DESCRIPTOR = _TSPUTREQ + + # @@protoc_insertion_point(class_scope:TsPutReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsPutResp(_message.Message): + DESCRIPTOR = _TSPUTRESP + + # @@protoc_insertion_point(class_scope:TsPutResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsDelReq(_message.Message): + DESCRIPTOR = _TSDELREQ + + # @@protoc_insertion_point(class_scope:TsDelReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsDelResp(_message.Message): + DESCRIPTOR = _TSDELRESP + + # @@protoc_insertion_point(class_scope:TsDelResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsInterpolation(_message.Message): + DESCRIPTOR = _TSINTERPOLATION + + # @@protoc_insertion_point(class_scope:TsInterpolation) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsColumnDescription(_message.Message): + DESCRIPTOR = _TSCOLUMNDESCRIPTION + + # @@protoc_insertion_point(class_scope:TsColumnDescription) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsRow(_message.Message): + DESCRIPTOR = _TSROW + + # @@protoc_insertion_point(class_scope:TsRow) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCell(_message.Message): + DESCRIPTOR = _TSCELL + + # @@protoc_insertion_point(class_scope:TsCell) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsListKeysReq(_message.Message): + DESCRIPTOR = _TSLISTKEYSREQ + + # @@protoc_insertion_point(class_scope:TsListKeysReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsListKeysResp(_message.Message): + DESCRIPTOR = _TSLISTKEYSRESP + + # @@protoc_insertion_point(class_scope:TsListKeysResp) + + +DESCRIPTOR.has_options = True +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakTsPB') +# @@protoc_insertion_point(module_scope) diff --git a/riak/riak_pb/riak_yokozuna_pb2.py b/riak/pb/riak_yokozuna_pb2.py similarity index 100% rename from riak/riak_pb/riak_yokozuna_pb2.py rename to riak/pb/riak_yokozuna_pb2.py diff --git a/riak/riak_pb/messages.py b/riak/riak_pb/messages.py deleted file mode 100644 index 7d7f8b91..00000000 --- a/riak/riak_pb/messages.py +++ /dev/null @@ -1,152 +0,0 @@ -# Copyright 2015 Basho Technologies, Inc. -# -# This file is provided to you under the Apache License, -# Version 2.0 (the "License"); you may not use this file -# except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# This is a generated file. DO NOT EDIT. - -""" -Constants and mappings between Riak protocol codes and messages. -""" - -import riak.riak_pb.riak_dt_pb2 -import riak.riak_pb.riak_kv_pb2 -import riak.riak_pb.riak_pb2 -import riak.riak_pb.riak_search_pb2 -import riak.riak_pb.riak_yokozuna_pb2 - -# Protocol codes -MSG_CODE_ERROR_RESP = 0 -MSG_CODE_PING_REQ = 1 -MSG_CODE_PING_RESP = 2 -MSG_CODE_GET_CLIENT_ID_REQ = 3 -MSG_CODE_GET_CLIENT_ID_RESP = 4 -MSG_CODE_SET_CLIENT_ID_REQ = 5 -MSG_CODE_SET_CLIENT_ID_RESP = 6 -MSG_CODE_GET_SERVER_INFO_REQ = 7 -MSG_CODE_GET_SERVER_INFO_RESP = 8 -MSG_CODE_GET_REQ = 9 -MSG_CODE_GET_RESP = 10 -MSG_CODE_PUT_REQ = 11 -MSG_CODE_PUT_RESP = 12 -MSG_CODE_DEL_REQ = 13 -MSG_CODE_DEL_RESP = 14 -MSG_CODE_LIST_BUCKETS_REQ = 15 -MSG_CODE_LIST_BUCKETS_RESP = 16 -MSG_CODE_LIST_KEYS_REQ = 17 -MSG_CODE_LIST_KEYS_RESP = 18 -MSG_CODE_GET_BUCKET_REQ = 19 -MSG_CODE_GET_BUCKET_RESP = 20 -MSG_CODE_SET_BUCKET_REQ = 21 -MSG_CODE_SET_BUCKET_RESP = 22 -MSG_CODE_MAP_RED_REQ = 23 -MSG_CODE_MAP_RED_RESP = 24 -MSG_CODE_INDEX_REQ = 25 -MSG_CODE_INDEX_RESP = 26 -MSG_CODE_SEARCH_QUERY_REQ = 27 -MSG_CODE_SEARCH_QUERY_RESP = 28 -MSG_CODE_RESET_BUCKET_REQ = 29 -MSG_CODE_RESET_BUCKET_RESP = 30 -MSG_CODE_GET_BUCKET_TYPE_REQ = 31 -MSG_CODE_SET_BUCKET_TYPE_REQ = 32 -MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ = 33 -MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP = 34 -MSG_CODE_CS_BUCKET_REQ = 40 -MSG_CODE_CS_BUCKET_RESP = 41 -MSG_CODE_COUNTER_UPDATE_REQ = 50 -MSG_CODE_COUNTER_UPDATE_RESP = 51 -MSG_CODE_COUNTER_GET_REQ = 52 -MSG_CODE_COUNTER_GET_RESP = 53 -MSG_CODE_YOKOZUNA_INDEX_GET_REQ = 54 -MSG_CODE_YOKOZUNA_INDEX_GET_RESP = 55 -MSG_CODE_YOKOZUNA_INDEX_PUT_REQ = 56 -MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ = 57 -MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ = 58 -MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP = 59 -MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ = 60 -MSG_CODE_DT_FETCH_REQ = 80 -MSG_CODE_DT_FETCH_RESP = 81 -MSG_CODE_DT_UPDATE_REQ = 82 -MSG_CODE_DT_UPDATE_RESP = 83 -MSG_CODE_AUTH_REQ = 253 -MSG_CODE_AUTH_RESP = 254 -MSG_CODE_START_TLS = 255 - -# Mapping from code to protobuf class -MESSAGE_CLASSES = { - MSG_CODE_ERROR_RESP: riak.riak_pb.riak_pb2.RpbErrorResp, - MSG_CODE_PING_REQ: None, - MSG_CODE_PING_RESP: None, - MSG_CODE_GET_CLIENT_ID_REQ: None, - MSG_CODE_GET_CLIENT_ID_RESP: riak.riak_pb.riak_kv_pb2.RpbGetClientIdResp, - MSG_CODE_SET_CLIENT_ID_REQ: riak.riak_pb.riak_kv_pb2.RpbSetClientIdReq, - MSG_CODE_SET_CLIENT_ID_RESP: None, - MSG_CODE_GET_SERVER_INFO_REQ: None, - MSG_CODE_GET_SERVER_INFO_RESP: riak.riak_pb.riak_pb2.RpbGetServerInfoResp, - MSG_CODE_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbGetReq, - MSG_CODE_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbGetResp, - MSG_CODE_PUT_REQ: riak.riak_pb.riak_kv_pb2.RpbPutReq, - MSG_CODE_PUT_RESP: riak.riak_pb.riak_kv_pb2.RpbPutResp, - MSG_CODE_DEL_REQ: riak.riak_pb.riak_kv_pb2.RpbDelReq, - MSG_CODE_DEL_RESP: None, - MSG_CODE_LIST_BUCKETS_REQ: riak.riak_pb.riak_kv_pb2.RpbListBucketsReq, - MSG_CODE_LIST_BUCKETS_RESP: riak.riak_pb.riak_kv_pb2.RpbListBucketsResp, - MSG_CODE_LIST_KEYS_REQ: riak.riak_pb.riak_kv_pb2.RpbListKeysReq, - MSG_CODE_LIST_KEYS_RESP: riak.riak_pb.riak_kv_pb2.RpbListKeysResp, - MSG_CODE_GET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbGetBucketReq, - MSG_CODE_GET_BUCKET_RESP: riak.riak_pb.riak_pb2.RpbGetBucketResp, - MSG_CODE_SET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbSetBucketReq, - MSG_CODE_SET_BUCKET_RESP: None, - MSG_CODE_MAP_RED_REQ: riak.riak_pb.riak_kv_pb2.RpbMapRedReq, - MSG_CODE_MAP_RED_RESP: riak.riak_pb.riak_kv_pb2.RpbMapRedResp, - MSG_CODE_INDEX_REQ: riak.riak_pb.riak_kv_pb2.RpbIndexReq, - MSG_CODE_INDEX_RESP: riak.riak_pb.riak_kv_pb2.RpbIndexResp, - MSG_CODE_SEARCH_QUERY_REQ: riak.riak_pb.riak_search_pb2.RpbSearchQueryReq, - MSG_CODE_SEARCH_QUERY_RESP: riak.riak_pb.riak_search_pb2.RpbSearchQueryResp, - MSG_CODE_RESET_BUCKET_REQ: riak.riak_pb.riak_pb2.RpbResetBucketReq, - MSG_CODE_RESET_BUCKET_RESP: None, - MSG_CODE_GET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbGetBucketTypeReq, - MSG_CODE_SET_BUCKET_TYPE_REQ: riak.riak_pb.riak_pb2.RpbSetBucketTypeReq, - MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ: - riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq, - MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP: - riak.riak_pb.riak_kv_pb2.RpbGetBucketKeyPreflistResp, - MSG_CODE_CS_BUCKET_REQ: riak.riak_pb.riak_kv_pb2.RpbCSBucketReq, - MSG_CODE_CS_BUCKET_RESP: riak.riak_pb.riak_kv_pb2.RpbCSBucketResp, - MSG_CODE_COUNTER_UPDATE_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateReq, - MSG_CODE_COUNTER_UPDATE_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterUpdateResp, - MSG_CODE_COUNTER_GET_REQ: riak.riak_pb.riak_kv_pb2.RpbCounterGetReq, - MSG_CODE_COUNTER_GET_RESP: riak.riak_pb.riak_kv_pb2.RpbCounterGetResp, - MSG_CODE_YOKOZUNA_INDEX_GET_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq, - MSG_CODE_YOKOZUNA_INDEX_GET_RESP: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexGetResp, - MSG_CODE_YOKOZUNA_INDEX_PUT_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq, - MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq, - MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq, - MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetResp, - MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ: - riak.riak_pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq, - MSG_CODE_DT_FETCH_REQ: riak.riak_pb.riak_dt_pb2.DtFetchReq, - MSG_CODE_DT_FETCH_RESP: riak.riak_pb.riak_dt_pb2.DtFetchResp, - MSG_CODE_DT_UPDATE_REQ: riak.riak_pb.riak_dt_pb2.DtUpdateReq, - MSG_CODE_DT_UPDATE_RESP: riak.riak_pb.riak_dt_pb2.DtUpdateResp, - MSG_CODE_AUTH_REQ: riak.riak_pb.riak_pb2.RpbAuthReq, - MSG_CODE_AUTH_RESP: None, - MSG_CODE_START_TLS: None -} From 508ad24a73cf64ff99ae7ca42069885e13b34e85 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 11:08:35 -0800 Subject: [PATCH 058/324] Remove Python 2.6 and 2.7.9, ensure latest version of 2.7, 3.3, 3.4 and 3.5 series are used --- Makefile | 48 +++++++--------- buildbot/Makefile | 9 --- buildbot/tox_setup.sh => env_setup.sh | 79 +++++++++++++++------------ setup.py | 1 + tox.ini | 8 +-- 5 files changed, 66 insertions(+), 79 deletions(-) rename buildbot/tox_setup.sh => env_setup.sh (51%) diff --git a/Makefile b/Makefile index 5562a9f2..fe88fb85 100644 --- a/Makefile +++ b/Makefile @@ -1,47 +1,41 @@ -.PHONY: all pb_compile pb_clean release install +.PHONY: pb_compile pb_clean release install # TODO: git submodule -all: pb_compile +CLEAN = rm -rf riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build clean: pb_clean pb_compile: echo "==> Python (compile)" - protoc -I riak_pb/src --python_out=riak/pb riak_pb/src/*.proto + protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto python setup.py build_messages pb_clean: - echo "==> Python (clean)" - rm -rf riak/pb/*.pyc riak/pb/*_pb2.py - rm -rf riak/pb/__pycache__ __pycache__ + @echo "==> Python (clean)" + $(CLEAN) release: pb_clean ifeq ($(RELEASE_GPG_KEYNAME),) @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" else - @echo "==> Python (release)" - @protoc -Isrc --python_out=riak/pb src/*.proto - @python2.7 setup.py build_messages build --build-base=riak - @python2.7 setup.py build --build-base=python bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - @rm -rf *.pyc riak_pb/*_pb2.py riak_pb/*.pyc riak_pb.egg-info python - + @echo "==> Python 2.7 (release)" + @python2.7 setup.py build_messages build --build-base=py-build + @python2.7 setup.py build --build-base=py-build bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + $(CLEAN) @echo "==> Python 3.3 (release)" - @protoc -Isrc --python_out=riak/pb src/*.proto - @python3.3 setup.py build_messages build --build-base=riak - @python3.3 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 - - @protoc -Isrc --python_out=riak/pb src/*.proto - @python3.4 setup.py build_messages build --build-base=riak - @python3.4 setup.py build --build-base=riak bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 - - @protoc -Isrc --python_out=riak/pb src/*.proto - @python3.4 setup.py build_messages build --build-base=riak - @python3.4 setup.py build --build-base=riak sdist upload -s -i $(RELEASE_GPG_KEYNAME) - @rm -rf riak/pb/*_pb2.py riak/pb/__pycache__ __pycache__ python3_riak/pb.egg-info python3 + @python3.3 setup.py build_messages build --build-base=py-build + @python3.3 setup.py build --build-base=py-build bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + $(CLEAN) + @echo "==> Python 3.4 (release)" + @python3.4 setup.py build_messages build --build-base=py-build + @python3.4 setup.py build --build-base=py-build bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + $(CLEAN) + @echo "==> Python 3.5 (release)" + @python3.5 setup.py build_messages build --build-base=py-build + @python3.5 setup.py build --build-base=py-build sdist upload -s -i $(RELEASE_GPG_KEYNAME) + $(CLEAN) endif install: pb_compile @echo "==> Python (install)" - @python setup.py build_messages build --build-base=riak install + @python setup.py build_messages build --build-base=py-build install diff --git a/buildbot/Makefile b/buildbot/Makefile index 341da322..9c93f1c2 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -32,12 +32,3 @@ test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=${RIAK_ADMIN} @RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. - -# These are required to actually build all the Python versions: -# * pip install tox -# * pyenv - https://github.com/yyuu/pyenv -# And two pyenv plugins: -# * pyenv virtualenv - https://github.com/yyuu/pyenv-virtualenv -# * pyenv alias - https://github.com/s1341/pyenv-alias -setup: - ./tox_setup.sh diff --git a/buildbot/tox_setup.sh b/env_setup.sh similarity index 51% rename from buildbot/tox_setup.sh rename to env_setup.sh index 94246d0d..ceabe7d9 100755 --- a/buildbot/tox_setup.sh +++ b/env_setup.sh @@ -5,7 +5,17 @@ then export PYENV_ROOT="$HOME/.pyenv" fi -TEST_ROOT=$PWD/.. +declare -r PROJDIR="$PWD" +if [[ ! -s $PROJDIR/riak/__init__.py ]] +then + echo "[ERROR] script must be run from the root of a clone of github.com/basho/riak-python-client" 1>&2 + exit 1 +fi + +if [[ ! -d $PROJDIR/riak_pb/src ]] +then + git submodule update --init +fi # Install pyenv if it's missing if [[ ! -d $PYENV_ROOT ]] @@ -47,45 +57,42 @@ then fi # Now install (allthethings) versions for testing -if [[ -z $(pyenv versions | grep riak_3.4.3) ]] -then - VERSION_ALIAS="riak_3.4.3" pyenv install 3.4.3 - pyenv virtualenv riak_3.4.3 riak-py34 -fi -if [[ -z $(pyenv versions | grep riak_3.3.6) ]] -then - VERSION_ALIAS="riak_3.3.6" pyenv install 3.3.6 - pyenv virtualenv riak_3.3.6 riak-py33 -fi -if [[ -z $(pyenv versions | grep riak_2.7.10) ]] -then - VERSION_ALIAS="riak_2.7.10" pyenv install 2.7.10 - pyenv virtualenv riak_2.7.10 riak-py27 -fi -if [[ -z $(pyenv versions | grep riak_2.7.9) ]] -then - VERSION_ALIAS="riak_2.7.9" pyenv install 2.7.9 - pyenv virtualenv riak_2.7.9 riak-py279 -fi -if [[ -z $(pyenv versions | grep riak_2.6.9) ]] -then - VERSION_ALIAS="riak_2.6.9" pyenv install 2.6.9 - pyenv virtualenv riak_2.6.9 riak-py26 -fi +for pyver in 2.7 3.3 3.4 3.5 +do + if ! pyenv versions | fgrep "riak_$pyver" + then + declare -i pymaj="${pyver%.*}" + declare -i pymin="${pyver#*.}" + pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]\$" | tail -n1 | sed -e 's/[[:space:]]//g')" + + echo "[INFO] installing Python $pyver_latest" + riak_pyver="riak_$pyver_latest" + VERSION_ALIAS="$riak_pyver" pyenv install "$pyver_latest" + pyenv virtualenv "$riak_pyver" "riak-py$pymaj$pymin" + fi +done -(cd $TEST_ROOT && pyenv local riak-py34 riak-py33 riak-py27 riak-py279 riak-py26) +(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27) pyenv versions -# Now install tox -pip install --upgrade pip -if [[ -z $(pip show tox) ]] +if [[ $(python --version) == Python\ 3.* ]] then - pip install -Iv tox - if [[ -z $(pip show tox) ]] - then - echo "[ERROR] install of tox failed" 1>&2 - exit 1 - fi + pip install --upgrade pip + for module in six tox python3-protobuf + do + if [[ -z $(pip show $module) ]] + then + pip install -Iv $module + if [[ -z $(pip show $module) ]] + then + echo "[ERROR] install of $module failed" 1>&2 + exit 1 + fi + fi + done pyenv rehash +else + echo "[ERROR] expected Python 3 to be 'python' at this point" 1>&2 + exit 1 fi diff --git a/setup.py b/setup.py index 152f2537..9e37db60 100755 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ #!/usr/bin/env python + import sys from setuptools import setup, find_packages from version import get_version diff --git a/tox.ini b/tox.ini index 03c15cbd..4cc3ab52 100644 --- a/tox.ini +++ b/tox.ini @@ -4,15 +4,9 @@ # and then run "tox" from this directory. [tox] -envlist = py26, py279, py27, py33, py34 +envlist = py27, py33, py34, py35 [testenv] -basepython = - py26: python2.6 - py279: {env:HOME}/.pyenv/versions/riak-py279/bin/python2.7 - py27: python2.7 - py33: python3.3 - py34: python3.4 install_command = pip install --upgrade {packages} commands = {envpython} setup.py test deps = six From 85c4ed70b2f45fd20a5dc2f5d994ceab57fe2bb0 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 11:10:05 -0800 Subject: [PATCH 059/324] Exclude pb files from lint, fix one lint error --- buildbot/Makefile | 4 ++-- commands.py | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index 9c93f1c2..398ec1c3 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -16,8 +16,8 @@ compile: lint: @pip install --upgrade pep8 flake8 - @cd ..; pep8 --exclude=riak_pb riak *.py - @cd ..; flake8 --exclude=riak_pb riak *.py + @cd ..; pep8 --exclude=riak/pb riak *.py + @cd ..; flake8 --exclude=riak/pb riak *.py @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/client.crt @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/server.crt diff --git a/commands.py b/commands.py index 9f3c108b..830a4773 100644 --- a/commands.py +++ b/commands.py @@ -4,7 +4,6 @@ import re import shutil -from datetime import date from distutils.core import Command from distutils.errors import DistutilsOptionError from distutils.file_util import write_file From d1de4bbaf6d900456caa2e5196869710f628dd57 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 11:12:52 -0800 Subject: [PATCH 060/324] Restore the buildbot setup target --- buildbot/Makefile | 3 ++ env_setup.sh | 98 ----------------------------------------------- 2 files changed, 3 insertions(+), 98 deletions(-) delete mode 100755 env_setup.sh diff --git a/buildbot/Makefile b/buildbot/Makefile index 398ec1c3..66ef8ec3 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -32,3 +32,6 @@ test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=${RIAK_ADMIN} @RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + +setup: + ./tox_setup.sh diff --git a/env_setup.sh b/env_setup.sh deleted file mode 100755 index ceabe7d9..00000000 --- a/env_setup.sh +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env bash - -if [[ ! -d $PYENV_ROOT ]] -then - export PYENV_ROOT="$HOME/.pyenv" -fi - -declare -r PROJDIR="$PWD" -if [[ ! -s $PROJDIR/riak/__init__.py ]] -then - echo "[ERROR] script must be run from the root of a clone of github.com/basho/riak-python-client" 1>&2 - exit 1 -fi - -if [[ ! -d $PROJDIR/riak_pb/src ]] -then - git submodule update --init -fi - -# Install pyenv if it's missing -if [[ ! -d $PYENV_ROOT ]] -then - git clone https://github.com/yyuu/pyenv.git $PYENV_ROOT - (cd $PYENV_ROOT && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) -fi - -# Upgrade it, if it's too old -if [[ -z $(pyenv install --list | grep 3.4.3) ]] -then - (cd $PYENV_ROOT && git pull -u origin master && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) -fi - -if [[ ! -d $PYENV_ROOT/plugins/pyenv-virtualenv ]] -then - git clone https://github.com/yyuu/pyenv-virtualenv.git $PYENV_ROOT/plugins/pyenv-virtualenv - (cd $PYENV_ROOT/plugins/pyenv-virtualenv && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) -fi - -if [[ ! -d $PYENV_ROOT/plugins/pyenv-alias ]] -then - git clone https://github.com/s1341/pyenv-alias.git $PYENV_ROOT/plugins/pyenv-alias -fi - -# Add pyenv root to PATH -# and initialize pyenv -if [[ $PATH != */.pyenv* ]] -then - echo "[INFO] adding $PYENV_ROOT/bin to PATH" - export PATH="$PYENV_ROOT/bin:$PATH" -fi - -if [[ $(type -t pyenv) != 'function' ]] -then - echo "[INFO] init pyenv" - eval "$(pyenv init -)" - eval "$(pyenv virtualenv-init -)" -fi - -# Now install (allthethings) versions for testing -for pyver in 2.7 3.3 3.4 3.5 -do - if ! pyenv versions | fgrep "riak_$pyver" - then - declare -i pymaj="${pyver%.*}" - declare -i pymin="${pyver#*.}" - pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]\$" | tail -n1 | sed -e 's/[[:space:]]//g')" - - echo "[INFO] installing Python $pyver_latest" - riak_pyver="riak_$pyver_latest" - VERSION_ALIAS="$riak_pyver" pyenv install "$pyver_latest" - pyenv virtualenv "$riak_pyver" "riak-py$pymaj$pymin" - fi -done - -(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27) - -pyenv versions - -if [[ $(python --version) == Python\ 3.* ]] -then - pip install --upgrade pip - for module in six tox python3-protobuf - do - if [[ -z $(pip show $module) ]] - then - pip install -Iv $module - if [[ -z $(pip show $module) ]] - then - echo "[ERROR] install of $module failed" 1>&2 - exit 1 - fi - fi - done - pyenv rehash -else - echo "[ERROR] expected Python 3 to be 'python' at this point" 1>&2 - exit 1 -fi From 20cee683cb39d7c1753ae4e9dd60ed3f2e733a2a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 11:15:19 -0800 Subject: [PATCH 061/324] tox setup script --- buildbot/tox_setup.sh | 101 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 101 insertions(+) create mode 100755 buildbot/tox_setup.sh diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh new file mode 100755 index 00000000..e08aa459 --- /dev/null +++ b/buildbot/tox_setup.sh @@ -0,0 +1,101 @@ +#!/usr/bin/env bash + +if [[ ! -d $PYENV_ROOT ]] +then + export PYENV_ROOT="$HOME/.pyenv" +fi + +declare -r PROJDIR="$PWD/.." +if [[ ! -s $PROJDIR/riak/__init__.py ]] +then + echo "[ERROR] script must be run from the root of a clone of github.com/basho/riak-python-client" 1>&2 + exit 1 +fi + +if [[ ! -d $PROJDIR/riak_pb/src ]] +then + (cd $PROJDIR && git submodule update --init) +fi + +# Install pyenv if it's missing +if [[ ! -d $PYENV_ROOT ]] +then + git clone https://github.com/yyuu/pyenv.git $PYENV_ROOT +else + (cd $PYENV_ROOT && git fetch --all) +fi + +(cd $PYENV_ROOT && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) + +# Upgrade it, if it's too old +if [[ -z $(pyenv install --list | grep 3.4.3) ]] +then + (cd $PYENV_ROOT && git pull -u origin master && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) +fi + +if [[ ! -d $PYENV_ROOT/plugins/pyenv-virtualenv ]] +then + git clone https://github.com/yyuu/pyenv-virtualenv.git $PYENV_ROOT/plugins/pyenv-virtualenv + (cd $PYENV_ROOT/plugins/pyenv-virtualenv && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) +fi + +if [[ ! -d $PYENV_ROOT/plugins/pyenv-alias ]] +then + git clone https://github.com/s1341/pyenv-alias.git $PYENV_ROOT/plugins/pyenv-alias +fi + +# Add pyenv root to PATH +# and initialize pyenv +if [[ $PATH != */.pyenv* ]] +then + echo "[INFO] adding $PYENV_ROOT/bin to PATH" + export PATH="$PYENV_ROOT/bin:$PATH" +fi + +if [[ $(type -t pyenv) != 'function' ]] +then + echo "[INFO] init pyenv" + eval "$(pyenv init -)" + eval "$(pyenv virtualenv-init -)" +fi + +# Now install (allthethings) versions for testing +for pyver in 2.7 3.3 3.4 3.5 +do + if ! pyenv versions | fgrep "riak_$pyver" + then + declare -i pymaj="${pyver%.*}" + declare -i pymin="${pyver#*.}" + pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]\$" | tail -n1 | sed -e 's/[[:space:]]//g')" + + echo "[INFO] installing Python $pyver_latest" + riak_pyver="riak_$pyver_latest" + VERSION_ALIAS="$riak_pyver" pyenv install "$pyver_latest" + pyenv virtualenv "$riak_pyver" "riak-py$pymaj$pymin" + fi +done + +(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27) + +pyenv versions + +if [[ $(python --version) == Python\ 3.* ]] +then + pip install --upgrade pip + for module in six tox python3-protobuf + do + if [[ -z $(pip show $module) ]] + then + pip install -Iv $module + if [[ -z $(pip show $module) ]] + then + echo "[ERROR] install of $module failed" 1>&2 + exit 1 + fi + fi + done + pyenv rehash +else + echo "[ERROR] expected Python 3 to be 'python' at this point" 1>&2 + exit 1 +fi From 3403f91a2b306491501b66980c17aac0cc304bac Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 11:33:44 -0800 Subject: [PATCH 062/324] Improvements to tox_setup.sh to ensure latest bits are used --- buildbot/tox_setup.sh | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index e08aa459..e1a35c62 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -27,21 +27,22 @@ fi (cd $PYENV_ROOT && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) -# Upgrade it, if it's too old -if [[ -z $(pyenv install --list | grep 3.4.3) ]] +declare -r pyenv_virtualenv_dir="$PYENV_ROOT/plugins/pyenv-virtualenv" +if [[ ! -d $pyenv_virtualenv_dir ]] then - (cd $PYENV_ROOT && git pull -u origin master && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) + git clone https://github.com/yyuu/pyenv-virtualenv.git $pyenv_virtualenv_dir +else + (cd $pyenv_virtualenv_dir && git fetch --all) fi -if [[ ! -d $PYENV_ROOT/plugins/pyenv-virtualenv ]] -then - git clone https://github.com/yyuu/pyenv-virtualenv.git $PYENV_ROOT/plugins/pyenv-virtualenv - (cd $PYENV_ROOT/plugins/pyenv-virtualenv && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) -fi +(cd $pyenv_virtualenv_dir && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) -if [[ ! -d $PYENV_ROOT/plugins/pyenv-alias ]] +declare -r pyenv_alias_dir="$PYENV_ROOT/plugins/pyenv-alias" +if [[ ! -d $pyenv_alias_dir ]] then - git clone https://github.com/s1341/pyenv-alias.git $PYENV_ROOT/plugins/pyenv-alias + git clone https://github.com/s1341/pyenv-alias.git $pyenv_alias_dir +else + (cd $pyenv_alias_dir && git pull origin master) fi # Add pyenv root to PATH @@ -84,10 +85,10 @@ then pip install --upgrade pip for module in six tox python3-protobuf do - if [[ -z $(pip show $module) ]] + if ! pip show --quiet $module then - pip install -Iv $module - if [[ -z $(pip show $module) ]] + pip install --ignore-installed $module + if ! pip show --quiet $module then echo "[ERROR] install of $module failed" 1>&2 exit 1 From 4fe3d1695905e1907e93d29e9570dbb983ec827d Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 11:47:17 -0800 Subject: [PATCH 063/324] Improve Makefile --- .gitignore | 4 +++- Makefile | 33 +++++++++++++++++---------------- 2 files changed, 20 insertions(+), 17 deletions(-) diff --git a/.gitignore b/.gitignore index 24c0bded..88cbc3de 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,6 @@ *.pyc .python-version +__pycache__/ .tox/ @@ -8,8 +9,9 @@ docs/_build .*.swp .coverage -build/ +py-build/ dist/ + riak.egg-info/ *.egg .eggs/ diff --git a/Makefile b/Makefile index fe88fb85..ffb7421c 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,6 @@ -.PHONY: pb_compile pb_clean release install +.PHONY: pb_compile pb_clean pb_build release install # TODO: git submodule -CLEAN = rm -rf riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build clean: pb_clean @@ -12,28 +11,30 @@ pb_compile: pb_clean: @echo "==> Python (clean)" - $(CLEAN) + @rm -rf riak/pb/*_pb2.py riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build -release: pb_clean +pb_build: pb_clean pb_compile + @echo "==> Python 2.7 (build)" + @python2.7 setup.py build --build-base=py-build/2.7 + @echo "==> Python 3.3 (build)" + @python3.3 setup.py build --build-base=py-build/3.3 + @echo "==> Python 3.4 (build)" + @python3.4 setup.py build --build-base=py-build/3.4 + @echo "==> Python 3.5 (build)" + @python3.5 setup.py build --build-base=py-build/3.5 + +release: pb_build ifeq ($(RELEASE_GPG_KEYNAME),) @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" else @echo "==> Python 2.7 (release)" - @python2.7 setup.py build_messages build --build-base=py-build - @python2.7 setup.py build --build-base=py-build bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - $(CLEAN) + @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.3 (release)" - @python3.3 setup.py build_messages build --build-base=py-build - @python3.3 setup.py build --build-base=py-build bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - $(CLEAN) + @python3.3 setup.py build --build-base=py-build/3.3 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.4 (release)" - @python3.4 setup.py build_messages build --build-base=py-build - @python3.4 setup.py build --build-base=py-build bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) - $(CLEAN) + @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.5 (release)" - @python3.5 setup.py build_messages build --build-base=py-build - @python3.5 setup.py build --build-base=py-build sdist upload -s -i $(RELEASE_GPG_KEYNAME) - $(CLEAN) + @python3.5 setup.py build --build-base=py-build/3.5 sdist upload -s -i $(RELEASE_GPG_KEYNAME) endif install: pb_compile From 03792c94cc2cdddee8d2791f458daa01e5a46902 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 11:50:11 -0800 Subject: [PATCH 064/324] Remove install target as it is no longer useful --- Makefile | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index ffb7421c..a7f72da3 100644 --- a/Makefile +++ b/Makefile @@ -1,18 +1,16 @@ -.PHONY: pb_compile pb_clean pb_build release install -# TODO: git submodule - +.PHONY: pb_clean pb_compile pb_build release clean: pb_clean +pb_clean: + @echo "==> Python (clean)" + @rm -rf riak/pb/*_pb2.py riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build + pb_compile: echo "==> Python (compile)" protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto python setup.py build_messages -pb_clean: - @echo "==> Python (clean)" - @rm -rf riak/pb/*_pb2.py riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build - pb_build: pb_clean pb_compile @echo "==> Python 2.7 (build)" @python2.7 setup.py build --build-base=py-build/2.7 @@ -36,7 +34,3 @@ else @echo "==> Python 3.5 (release)" @python3.5 setup.py build --build-base=py-build/3.5 sdist upload -s -i $(RELEASE_GPG_KEYNAME) endif - -install: pb_compile - @echo "==> Python (install)" - @python setup.py build_messages build --build-base=py-build install From c251a001e64cbb17e432aa07261f9351b3f1baec Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 16:39:48 -0800 Subject: [PATCH 065/324] Final makefile cleanup --- Makefile | 10 +++++----- buildbot/Makefile | 24 +++++++++++++----------- buildbot/tox_setup.sh | 2 +- 3 files changed, 19 insertions(+), 17 deletions(-) diff --git a/Makefile b/Makefile index a7f72da3..d1851078 100644 --- a/Makefile +++ b/Makefile @@ -6,12 +6,12 @@ pb_clean: @echo "==> Python (clean)" @rm -rf riak/pb/*_pb2.py riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build -pb_compile: - echo "==> Python (compile)" - protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto - python setup.py build_messages +pb_compile: pb_clean + @echo "==> Python (compile)" + @protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto + @python setup.py build_messages -pb_build: pb_clean pb_compile +pb_build: pb_compile @echo "==> Python 2.7 (build)" @python2.7 setup.py build --build-base=py-build/2.7 @echo "==> Python 3.3 (build)" diff --git a/buildbot/Makefile b/buildbot/Makefile index 66ef8ec3..7d171141 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -1,15 +1,17 @@ -RIAK_CONF = ${RIAK_DIR}/etc/riak.conf -# ADVANCED_CONF = ${RIAK_DIR}/etc/advanced.config -# RIAK = ${RIAK_DIR}/bin/riak -RIAK_ADMIN = ${RIAK_DIR}/bin/riak-admin -CERTS_DIR = $(shell pwd)/../riak/tests/resources +ifndef RIAK_DIR +$(error RIAK_DIR is not set) +endif + +RIAK_CONF = $(RIAK_DIR)/etc/riak.conf +RIAK_ADMIN = $(RIAK_DIR)/bin/riak-admin +CERTS_DIR = $(realpath $(CURDIR))/../riak/tests/resources unexport PYENV_VERSION preconfigure: - @../setup.py preconfigure --riak-conf=${RIAK_CONF} + @../setup.py preconfigure --riak-conf=$(RIAK_CONF) configure: - @../setup.py configure --riak-admin=${RIAK_ADMIN} + @../setup.py configure --riak-admin=$(RIAK_ADMIN) compile: @../setup.py develop @@ -18,19 +20,19 @@ lint: @pip install --upgrade pep8 flake8 @cd ..; pep8 --exclude=riak/pb riak *.py @cd ..; flake8 --exclude=riak/pb riak *.py - @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/client.crt - @openssl verify -CAfile ${CERTS_DIR}/ca.crt ${CERTS_DIR}/server.crt + @openssl verify -CAfile $(CERTS_DIR)/ca.crt $(CERTS_DIR)/client.crt + @openssl verify -CAfile $(CERTS_DIR)/ca.crt $(CERTS_DIR)/server.crt test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" - @../setup.py disable_security --riak-admin=${RIAK_ADMIN} + @../setup.py disable_security --riak-admin=$(RIAK_ADMIN) @RUN_YZ=1 SKIP_DATATYPES=0 SKIP_INDEXES=0 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" - @../setup.py enable_security --riak-admin=${RIAK_ADMIN} + @../setup.py enable_security --riak-admin=$(RIAK_ADMIN) @RUN_YZ=1 SKIP_INDEXES=0 RUN_SECURITY=1 SKIP_POOL=1 SKIP_RESOLVE=1 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. setup: diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index e1a35c62..b8da9208 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -67,7 +67,7 @@ do then declare -i pymaj="${pyver%.*}" declare -i pymin="${pyver#*.}" - pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]\$" | tail -n1 | sed -e 's/[[:space:]]//g')" + pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]+\$" | tail -n1 | sed -e 's/[[:space:]]//g')" echo "[INFO] installing Python $pyver_latest" riak_pyver="riak_$pyver_latest" From c1f0e372416d9cd733422087abbcc48d25d37782 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 12 Dec 2015 20:44:50 -0800 Subject: [PATCH 066/324] Modify error to use Gnu make error mechanism --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index d1851078..1e4e7dca 100644 --- a/Makefile +++ b/Makefile @@ -22,9 +22,7 @@ pb_build: pb_compile @python3.5 setup.py build --build-base=py-build/3.5 release: pb_build -ifeq ($(RELEASE_GPG_KEYNAME),) - @echo "RELEASE_GPG_KEYNAME must be set to release/deploy" -else +ifdef RELEASE_GPG_KEYNAME @echo "==> Python 2.7 (release)" @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.3 (release)" @@ -33,4 +31,6 @@ else @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.5 (release)" @python3.5 setup.py build --build-base=py-build/3.5 sdist upload -s -i $(RELEASE_GPG_KEYNAME) +else +$(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif From 57c47c02f946111038bd59d68fda2a2fb06baa43 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Sun, 13 Dec 2015 20:27:54 +0000 Subject: [PATCH 067/324] Add Google's protobuf library as a direct prerequisite --- setup.py | 35 ++++++++++++----------------------- 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/setup.py b/setup.py index 3e2b84d1..ba3474e1 100755 --- a/setup.py +++ b/setup.py @@ -1,6 +1,5 @@ #!/usr/bin/env python -import os -import sys +import platform from setuptools import setup, find_packages from version import get_version from commands import preconfigure, configure, create_bucket_types, \ @@ -8,33 +7,23 @@ install_requires = ['six >= 1.8.0'] requires = ['six(>=1.8.0)'] -if sys.version_info < (2, 7, 9): +if platform.python_version() < '2.7.9': install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") -riak_pb_in_pythonpath = False -os_env_pythonpath = os.environ.get('PYTHONPATH') -if os_env_pythonpath is not None: - for ppath in os_env_pythonpath.split(os.pathsep): - if ppath.find('riak_pb/python/lib') != -1: - riak_pb_messages = os.path.join(ppath, 'riak_pb', 'messages.py') - if os.path.exists(riak_pb_messages): - riak_pb_in_pythonpath = True - break - -if riak_pb_in_pythonpath: - install_requires.append("protobuf ==2.6.1") - requires.append("protobuf(==2.6.1)") +if platform.python_version() < '3.0': + install_requires.append('protobuf >=2.4.1, <2.7.0') + requires.append('protobuf(>=2.4.1,<2.7.0)') + install_requires.append("riak_pb >=2.0.0") + requires.append("riak_pb(>=2.0.0)") else: - if sys.version_info < (3, ): - install_requires.append("riak_pb >=2.0.0") - requires.append("riak_pb(>=2.0.0)") - else: - install_requires.append("python3_riak_pb >=2.0.0") - requires.append("python3_riak_pb(>=2.0.0)") + install_requires.append('python3_protobuf >=2.4.1, <2.6.0') + requires.append('python3_protobuf(>=2.4.1,<2.6.0)') + install_requires.append("python3_riak_pb >=2.0.0") + requires.append("python3_riak_pb(>=2.0.0)") tests_require = [] -if sys.version_info < (2, 7): +if platform.python_version() < '2.7.0': tests_require.append("unittest2") setup( From b4808304f80e60ce103f7d98cf8b6c082d3c1f0d Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 14 Dec 2015 04:51:33 +0000 Subject: [PATCH 068/324] Update the release notes for the 2.3.0 release --- RELEASE_NOTES.md | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 80fef083..3f35302a 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,5 +1,26 @@ # Riak Python Client Release Notes +## 2.3.0 Feature Release - 2015-12-14 + +Release 2.3.0 features support for new +[time series](https://github.com/basho/riak-python-client/pull/416) +functionality. + +This is release retires support for Python 2.6.x but adds support for +Python 3.5.x. + +There are also many bugfixes and new enhancements: + +* [Support for Preflists and Write-Once bucket types] + (https://github.com/basho/riak-python-client/pull/414) +* [Support Riak 2.1.1] + (https://github.com/basho/riak-python-client/pull/407) +* [Addition of new API entry point methods] + (https://github.com/basho/riak-python-client/pull/398) +* [Native SSL support for Python 2.7.9+] + (https://github.com/basho/riak-python-client/pull/397) + + ## 2.2.0 Feature Release - 2014-12-18 Release 2.2.0 features support for From 52ef8d7a2e613e3391745856106bcd1d4396257b Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 07:17:00 -0800 Subject: [PATCH 069/324] Detect PB corectly --- riak/tests/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 698a64ba..46fc2fb4 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -12,7 +12,7 @@ test_server.start() try: - __import__('riak.riak_pb') + __import__('riak.pb') HAVE_PROTO = True except ImportError: HAVE_PROTO = False From 88bf9c2445f537e8378dc9fab4ad974c766895ed Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 07:38:37 -0800 Subject: [PATCH 070/324] Small changes to the README.rst --- Makefile | 3 ++- README.rst | 16 ++++++---------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/Makefile b/Makefile index 1e4e7dca..357d6db2 100644 --- a/Makefile +++ b/Makefile @@ -21,7 +21,8 @@ pb_build: pb_compile @echo "==> Python 3.5 (build)" @python3.5 setup.py build --build-base=py-build/3.5 -release: pb_build + +release: ifdef RELEASE_GPG_KEYNAME @echo "==> Python 2.7 (release)" @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) diff --git a/README.rst b/README.rst index 99bfe27b..4adb0c14 100644 --- a/README.rst +++ b/README.rst @@ -17,9 +17,7 @@ Install ======= The recommended versions of Python for use with this client are Python -`2.7.x` and `3.3/3.4`. - -From the Riak Python Client root directory, execute +`2.7.x`, `3.3.x`, `3.4.x` and `3.5.x`. From Source ----------- @@ -28,7 +26,7 @@ From Source python setup.py install -There is an additional dependency on the Python package `setuptools`. +There are additional dependencies on Python packages `setuptools` and `protobuf`. From PyPI --------- @@ -64,16 +62,14 @@ variable to the root of your Riak installation. Then from the .. code-block:: console - cd buildbot - make preconfigure + make -C buildbot preconfigure -Start your Riak node with ``riak start`` from the the Riak directory, -then back in ``buildbot`` type +Start your Riak node with ``riak start`` from the the Riak directory, then .. code-block:: console - make configure - make test + make -C buildbot configure + make -C buildbot test That will run the test suite twice: once with security enabled and once without. From 8fe089ca4148743848061c43446bf80a8579c59a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 07:51:14 -0800 Subject: [PATCH 071/324] Fix the release target. --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 357d6db2..60065118 100644 --- a/Makefile +++ b/Makefile @@ -31,7 +31,7 @@ ifdef RELEASE_GPG_KEYNAME @echo "==> Python 3.4 (release)" @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.5 (release)" - @python3.5 setup.py build --build-base=py-build/3.5 sdist upload -s -i $(RELEASE_GPG_KEYNAME) + @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) else $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif From 5dbad5fd4fbaea0bd8aa0250f94f49b23eb3ab41 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 07:53:35 -0800 Subject: [PATCH 072/324] Remove git submodule call in tox setup. --- buildbot/tox_setup.sh | 5 ----- 1 file changed, 5 deletions(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index b8da9208..3156d5fe 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -12,11 +12,6 @@ then exit 1 fi -if [[ ! -d $PROJDIR/riak_pb/src ]] -then - (cd $PROJDIR && git submodule update --init) -fi - # Install pyenv if it's missing if [[ ! -d $PYENV_ROOT ]] then From 76122ef3fc28107de5ac8becace5fdc36406163f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 08:10:35 -0800 Subject: [PATCH 073/324] Improve error text. --- buildbot/tox_setup.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 3156d5fe..9a63a17c 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -8,7 +8,7 @@ fi declare -r PROJDIR="$PWD/.." if [[ ! -s $PROJDIR/riak/__init__.py ]] then - echo "[ERROR] script must be run from the root of a clone of github.com/basho/riak-python-client" 1>&2 + echo "[ERROR] script must be run from the buildbot/ dir in github.com/basho/riak-python-client" 1>&2 exit 1 fi From 3235bee7a8e9f06e4fa6713052479838a4f77de3 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 09:15:45 -0800 Subject: [PATCH 074/324] Restore testing on 2.7.8, clean up from merge --- Makefile | 17 +++------------- buildbot/tox_setup.sh | 11 ++++++++-- riak/transports/pbc/codec.py | 22 +++++++++++--------- riak/transports/pbc/stream.py | 2 +- riak/transports/pbc/transport.py | 35 ++++++++++++++++++-------------- setup.py | 2 +- 6 files changed, 46 insertions(+), 43 deletions(-) diff --git a/Makefile b/Makefile index 60065118..5740a000 100644 --- a/Makefile +++ b/Makefile @@ -11,19 +11,10 @@ pb_compile: pb_clean @protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto @python setup.py build_messages -pb_build: pb_compile - @echo "==> Python 2.7 (build)" - @python2.7 setup.py build --build-base=py-build/2.7 - @echo "==> Python 3.3 (build)" - @python3.3 setup.py build --build-base=py-build/3.3 - @echo "==> Python 3.4 (build)" - @python3.4 setup.py build --build-base=py-build/3.4 - @echo "==> Python 3.5 (build)" - @python3.5 setup.py build --build-base=py-build/3.5 - - release: -ifdef RELEASE_GPG_KEYNAME +ifeq ($(RELEASE_GPG_KEYNAME),) + $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) +else @echo "==> Python 2.7 (release)" @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.3 (release)" @@ -32,6 +23,4 @@ ifdef RELEASE_GPG_KEYNAME @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.5 (release)" @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) -else -$(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 9a63a17c..2712f7be 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -55,7 +55,14 @@ then eval "$(pyenv virtualenv-init -)" fi -# Now install (allthethings) versions for testing +# 2.7.8 is special case +if ! pyenv versions | fgrep -q 'riak_2.7.8' +then + echo "[INFO] installing Python 2.7.8" + VERSION_ALIAS='riak_2.7.8' pyenv install '2.7.8' + pyenv virtualenv 'riak_2.7.8' 'riak-py278' +fi + for pyver in 2.7 3.3 3.4 3.5 do if ! pyenv versions | fgrep "riak_$pyver" @@ -71,7 +78,7 @@ do fi done -(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27) +(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278) pyenv versions diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 985c569a..ec173f29 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -2,6 +2,7 @@ import riak.pb.riak_pb2 import riak.pb.riak_dt_pb2 import riak.pb.riak_kv_pb2 +import riak.pb.riak_ts_pb2 import logging import datetime @@ -9,6 +10,7 @@ from riak.content import RiakContent from riak.util import decode_index_value, str_to_bytes, bytes_to_str from riak.multidict import MultiDict +from riak.pb.riak_ts_pb2 import TsColumnType from six import string_types, PY2 @@ -677,7 +679,7 @@ def _encode_timeseries_put(self, tsobj, req): :param tsobj: a TsObject :type tsobj: TsObject :param req: the protobuf message to fill - :type req: riak_pb.TsPutReq + :type req: riak.pb.riak_ts_pb2.TsPutReq """ req.table = str_to_bytes(tsobj.table.name) @@ -686,11 +688,11 @@ def _encode_timeseries_put(self, tsobj, req): if tsobj.rows and isinstance(tsobj.rows, list): for row in tsobj.rows: - tsr = req.rows.add() # NB: type riak_pb.TsRow + tsr = req.rows.add() # NB: type TsRow if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: - tsc = tsr.cells.add() # NB: type riak_pb.TsCell + tsc = tsr.cells.add() # NB: type TsCell self._encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") @@ -701,7 +703,7 @@ def _decode_timeseries(self, resp, tsobj): metadata from a TsQueryResp. :param resp: the protobuf message from which to process data - :type resp: riak_pb.TsQueryRsp or riak_pb.TsGetResp + :type resp: riak.pb.TsQueryRsp or riak.pb.riak_ts_pb2.TsGetResp :param tsobj: a TsObject :type tsobj: TsObject """ @@ -721,7 +723,7 @@ def _decode_timeseries_row(self, tsrow, tscols=None): Decodes a TsRow into a list :param tsrow: the protobuf TsRow to decode. - :type tsrow: riak_pb.TsRow + :type tsrow: riak.pb.riak_ts_pb2.TsRow :param tscols: the protobuf TsColumn data to help decode. :type tscols: list :rtype list @@ -732,29 +734,29 @@ def _decode_timeseries_row(self, tsrow, tscols=None): if tscols is not None: col = tscols[i] if cell.HasField('varchar_value'): - if col and col.type != riak_pb.TsColumnType.Value('VARCHAR'): + if col and col.type != TsColumnType.Value('VARCHAR'): raise TypeError('expected VARCHAR column') else: row.append(bytes_to_str(cell.varchar_value)) elif cell.HasField('sint64_value'): - if col and col.type != riak_pb.TsColumnType.Value('SINT64'): + if col and col.type != TsColumnType.Value('SINT64'): raise TypeError('expected SINT64 column') else: row.append(cell.sint64_value) elif cell.HasField('double_value'): - if col and col.type != riak_pb.TsColumnType.Value('DOUBLE'): + if col and col.type != TsColumnType.Value('DOUBLE'): raise TypeError('expected DOUBLE column') else: row.append(cell.double_value) elif cell.HasField('timestamp_value'): - if col and col.type != riak_pb.TsColumnType.Value('TIMESTAMP'): + if col and col.type != TsColumnType.Value('TIMESTAMP'): raise TypeError('expected TIMESTAMP column') else: dt = self._datetime_from_unix_time_millis( cell.timestamp_value) row.append(dt) elif cell.HasField('boolean_value'): - if col and col.type != riak_pb.TsColumnType.Value('BOOLEAN'): + if col and col.type != TsColumnType.Value('BOOLEAN'): raise TypeError('expected BOOLEAN column') else: row.append(cell.boolean_value) diff --git a/riak/transports/pbc/stream.py b/riak/transports/pbc/stream.py index f4538f5f..ed649279 100644 --- a/riak/transports/pbc/stream.py +++ b/riak/transports/pbc/stream.py @@ -166,7 +166,7 @@ class RiakPbcTsKeyStream(RiakPbcStream, RiakPbcCodec): Used internally by RiakPbcTransport to implement key-list streams. """ - _expect = MSG_CODE_TS_LIST_KEYS_RESP + _expect = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP def next(self): response = super(RiakPbcTsKeyStream, self).next() diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 87228721..3ad5ae1c 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -1,6 +1,8 @@ import riak.pb.messages import riak.pb.riak_pb2 import riak.pb.riak_kv_pb2 +import riak.pb.riak_ts_pb2 + from riak import RiakError from riak.transports.transport import RiakTransport from riak.riak_object import VClock @@ -16,7 +18,6 @@ from six import PY2, PY3 - class RiakPbcTransport(RiakTransport, RiakPbcConnection, RiakPbcCodec): """ The RiakPbcTransport object holds a connection to the protocol @@ -173,22 +174,24 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, return robj def ts_get(self, table, key): - req = riak_pb.TsGetReq() + req = riak.pb.riak_ts_pb2.TsGetReq() self._encode_timeseries_keyreq(table, key, req) - msg_code, ts_get_resp = self._request(MSG_CODE_TS_GET_REQ, req, - MSG_CODE_TS_GET_RESP) + msg_code, ts_get_resp = self._request( + riak.pb.messages.MSG_CODE_TS_GET_REQ, req, + riak.pb.messages.MSG_CODE_TS_GET_RESP) tsobj = TsObject(self._client, table, [], None) self._decode_timeseries(ts_get_resp, tsobj) return tsobj def ts_put(self, tsobj): - req = riak_pb.TsPutReq() + req = riak.pb.riak_ts_pb2.TsPutReq() self._encode_timeseries_put(tsobj, req) - msg_code, resp = self._request(MSG_CODE_TS_PUT_REQ, req, - MSG_CODE_TS_PUT_RESP) + msg_code, resp = self._request( + riak.pb.messages.MSG_CODE_TS_PUT_REQ, req, + riak.pb.messages.MSG_CODE_TS_PUT_RESP) if resp is not None: return True @@ -196,11 +199,12 @@ def ts_put(self, tsobj): raise RiakError("missing response object") def ts_delete(self, table, key): - req = riak_pb.TsDelReq() + req = riak.pb.riak_ts_pb2.TsDelReq() self._encode_timeseries_keyreq(table, key, req) - msg_code, ts_del_resp = self._request(MSG_CODE_TS_DEL_REQ, req, - MSG_CODE_TS_DEL_RESP) + msg_code, ts_del_resp = self._request( + riak.pb.messages.MSG_CODE_TS_DEL_REQ, req, + riak.pb.messages.MSG_CODE_TS_DEL_RESP) if ts_del_resp is not None: return True @@ -208,11 +212,12 @@ def ts_delete(self, table, key): raise RiakError("missing response object") def ts_query(self, table, query, interpolations=None): - req = riak_pb.TsQueryReq() + req = riak.pb.riak_ts_pb2.TsQueryReq() req.query.base = str_to_bytes(query) - msg_code, ts_query_resp = self._request(MSG_CODE_TS_QUERY_REQ, req, - MSG_CODE_TS_QUERY_RESP) + msg_code, ts_query_resp = self._request( + riak.pb.messages.MSG_CODE_TS_QUERY_REQ, req, + riak.pb.messages.MSG_CODE_TS_QUERY_RESP) tsobj = TsObject(self._client, table, [], []) self._decode_timeseries(ts_query_resp, tsobj) @@ -223,13 +228,13 @@ def ts_stream_keys(self, table, timeout=None): Streams keys from a timeseries table, returning an iterator that yields lists of keys. """ - req = riak_pb.TsListKeysReq() + req = riak.pb.riak_ts_pb2.TsListKeysReq() t = None if self.client_timeouts() and timeout: t = timeout self._encode_timeseries_listkeysreq(table, req, t) - self._send_msg(MSG_CODE_TS_LIST_KEYS_REQ, req) + self._send_msg(riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ, req) return RiakPbcTsKeyStream(self) diff --git a/setup.py b/setup.py index c74ad69a..9446ae96 100755 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ from setuptools import setup, find_packages from version import get_version from commands import preconfigure, configure, create_bucket_types, \ - setup_security, enable_security, disable_security, setup_timeseries + setup_security, enable_security, disable_security, setup_timeseries, \ build_messages install_requires = ['six >= 1.8.0'] From 6bbb3fc5f65d8e671ea60e66ff787d67042018b4 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 09:22:38 -0800 Subject: [PATCH 075/324] pb namespace cleanup --- riak/tests/test_timeseries.py | 33 +++++++++++++++++---------------- riak/transports/pbc/codec.py | 2 +- 2 files changed, 18 insertions(+), 17 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 8835e21d..23b54ca0 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- import datetime import platform -import riak_pb +import riak.pb.riak_ts_pb2 from riak import RiakError from riak.table import Table @@ -10,6 +10,7 @@ from riak.util import str_to_bytes, bytes_to_str from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase +from riak.pb.riak_ts_pb2 import TsColumnType if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -47,18 +48,18 @@ def validate_keyreq(self, req): self.assertEqual(self.ts0ms, req.key[2].timestamp_value) def test_encode_data_for_get(self): - req = riak_pb.TsGetReq() + req = riak.pb.riak_ts_pb2.TsGetReq() self.c._encode_timeseries_keyreq(self.table, self.test_key, req) self.validate_keyreq(req) def test_encode_data_for_delete(self): - req = riak_pb.TsDelReq() + req = riak.pb.riak_ts_pb2.TsDelReq() self.c._encode_timeseries_keyreq(self.table, self.test_key, req) self.validate_keyreq(req) def test_encode_data_for_put(self): tsobj = TsObject(None, self.table, self.rows, None) - ts_put_req = riak_pb.TsPutReq() + ts_put_req = riak.pb.riak_pb_ts2.TsPutReq() self.c._encode_timeseries_put(tsobj, ts_put_req) # NB: expected, actual @@ -82,29 +83,29 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) def test_encode_data_for_listkeys(self): - req = riak_pb.TsListKeysReq() + req = riak.pb.riak_pb_ts2.TsListKeysReq() self.c._encode_timeseries_listkeysreq(self.table, req, 1234) self.assertEqual(self.table.name, bytes_to_str(req.table)) self.assertEqual(1234, req.timeout) def test_decode_data_from_query(self): - tqr = riak_pb.TsQueryResp() + tqr = riak.pb.riak_pb_ts2.TsQueryResp() c0 = tqr.columns.add() c0.name = str_to_bytes('col_varchar') - c0.type = riak_pb.TsColumnType.Value('VARCHAR') + c0.type = riak.pb.riak_pb_ts2.TsColumnType.Value('VARCHAR') c1 = tqr.columns.add() c1.name = str_to_bytes('col_integer') - c1.type = riak_pb.TsColumnType.Value('SINT64') + c1.type = riak.pb.riak_pb_ts2.TsColumnType.Value('SINT64') c2 = tqr.columns.add() c2.name = str_to_bytes('col_double') - c2.type = riak_pb.TsColumnType.Value('DOUBLE') + c2.type = TsColumnType.Value('DOUBLE') c3 = tqr.columns.add() c3.name = str_to_bytes('col_timestamp') - c3.type = riak_pb.TsColumnType.Value('TIMESTAMP') + c3.type = TsColumnType.Value('TIMESTAMP') c4 = tqr.columns.add() c4.name = str_to_bytes('col_boolean') - c4.type = riak_pb.TsColumnType.Value('BOOLEAN') + c4.type = TsColumnType.Value('BOOLEAN') r0 = tqr.rows.add() r0c0 = r0.cells.add() @@ -139,15 +140,15 @@ def test_decode_data_from_query(self): c = tsobj.columns self.assertEqual(c[0][0], 'col_varchar') - self.assertEqual(c[0][1], riak_pb.TsColumnType.Value('VARCHAR')) + self.assertEqual(c[0][1], TsColumnType.Value('VARCHAR')) self.assertEqual(c[1][0], 'col_integer') - self.assertEqual(c[1][1], riak_pb.TsColumnType.Value('SINT64')) + self.assertEqual(c[1][1], TsColumnType.Value('SINT64')) self.assertEqual(c[2][0], 'col_double') - self.assertEqual(c[2][1], riak_pb.TsColumnType.Value('DOUBLE')) + self.assertEqual(c[2][1], TsColumnType.Value('DOUBLE')) self.assertEqual(c[3][0], 'col_timestamp') - self.assertEqual(c[3][1], riak_pb.TsColumnType.Value('TIMESTAMP')) + self.assertEqual(c[3][1], TsColumnType.Value('TIMESTAMP')) self.assertEqual(c[4][0], 'col_boolean') - self.assertEqual(c[4][1], riak_pb.TsColumnType.Value('BOOLEAN')) + self.assertEqual(c[4][1], TsColumnType.Value('BOOLEAN')) r0 = tsobj.rows[0] self.assertEqual(r0[0], self.rows[0][0]) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index ec173f29..a7be2a89 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -692,7 +692,7 @@ def _encode_timeseries_put(self, tsobj, req): if not isinstance(row, list): raise ValueError("TsObject row must be a list of values") for cell in row: - tsc = tsr.cells.add() # NB: type TsCell + tsc = tsr.cells.add() # NB: type TsCell self._encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") From 477a0ab78754e01181e8f948f888c2857197ccd5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 09:51:51 -0800 Subject: [PATCH 076/324] Remove THANKS file from manifest --- MANIFEST.in | 1 - 1 file changed, 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index e573808c..db8a14a0 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,5 @@ include docs/* include riak/erl_src/* -include THANKS include README.rst include LICENSE include RELEASE_NOTES.md From 7885c90715a2e7ae95999db51b1d6102b8bd8467 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 10:00:01 -0800 Subject: [PATCH 077/324] special-case 2.7.8 --- buildbot/tox_setup.sh | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 2712f7be..df28ea34 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -55,17 +55,10 @@ then eval "$(pyenv virtualenv-init -)" fi -# 2.7.8 is special case -if ! pyenv versions | fgrep -q 'riak_2.7.8' -then - echo "[INFO] installing Python 2.7.8" - VERSION_ALIAS='riak_2.7.8' pyenv install '2.7.8' - pyenv virtualenv 'riak_2.7.8' 'riak-py278' -fi - +# NB: 2.7.8 is special-cased for pyver in 2.7 3.3 3.4 3.5 do - if ! pyenv versions | fgrep "riak_$pyver" + if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "riak_$pyver" then declare -i pymaj="${pyver%.*}" declare -i pymin="${pyver#*.}" @@ -78,6 +71,13 @@ do fi done +if ! pyenv versions | fgrep -q 'riak_2.7.8' +then + echo "[INFO] installing Python 2.7.8" + VERSION_ALIAS='riak_2.7.8' pyenv install '2.7.8' + pyenv virtualenv 'riak_2.7.8' 'riak-py278' +fi + (cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278) pyenv versions From 9c098ddbcf5362f5f0ff92f17dd427050feeaeb2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 14 Dec 2015 13:02:21 -0800 Subject: [PATCH 078/324] namespace fixes in timeseries tests --- riak/tests/test_timeseries.py | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 23b54ca0..b0b28423 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -59,7 +59,7 @@ def test_encode_data_for_delete(self): def test_encode_data_for_put(self): tsobj = TsObject(None, self.table, self.rows, None) - ts_put_req = riak.pb.riak_pb_ts2.TsPutReq() + ts_put_req = riak.pb.riak_ts_pb2.TsPutReq() self.c._encode_timeseries_put(tsobj, ts_put_req) # NB: expected, actual @@ -83,20 +83,20 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) def test_encode_data_for_listkeys(self): - req = riak.pb.riak_pb_ts2.TsListKeysReq() + req = riak.pb.riak_ts_pb2.TsListKeysReq() self.c._encode_timeseries_listkeysreq(self.table, req, 1234) self.assertEqual(self.table.name, bytes_to_str(req.table)) self.assertEqual(1234, req.timeout) def test_decode_data_from_query(self): - tqr = riak.pb.riak_pb_ts2.TsQueryResp() + tqr = riak.pb.riak_ts_pb2.TsQueryResp() c0 = tqr.columns.add() c0.name = str_to_bytes('col_varchar') - c0.type = riak.pb.riak_pb_ts2.TsColumnType.Value('VARCHAR') + c0.type = TsColumnType.Value('VARCHAR') c1 = tqr.columns.add() c1.name = str_to_bytes('col_integer') - c1.type = riak.pb.riak_pb_ts2.TsColumnType.Value('SINT64') + c1.type = TsColumnType.Value('SINT64') c2 = tqr.columns.add() c2.name = str_to_bytes('col_double') c2.type = TsColumnType.Value('DOUBLE') @@ -288,9 +288,8 @@ def test_stream_keys(self): self.assertEqual(len(key), 3) self.assertEqual('hash1', key[0]) self.assertEqual('user2', key[1]) - # TODO RTS-367 ENABLE - # self.assertIsInstance(key[2], datetime.datetime) - self.assertEqual(len(streamed_keys), 5) + self.assertIsInstance(key[2], datetime.datetime) + self.assertGreater(len(streamed_keys), 0) def test_delete_single_value(self): key = ['hash1', 'user2', self.twentyFiveMinsAgo] From 44ae30bae7dd5273b2602bb07d3def2d8e25399e Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 14 Dec 2015 14:55:19 -0700 Subject: [PATCH 079/324] Add note about integrated protobufs --- RELEASE_NOTES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 3f35302a..f631939f 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -11,6 +11,8 @@ Python 3.5.x. There are also many bugfixes and new enhancements: +* [Protocol buffers are now integrated into the Python Client] + (https://github.com/basho/riak-python-client/pull/418) * [Support for Preflists and Write-Once bucket types] (https://github.com/basho/riak-python-client/pull/414) * [Support Riak 2.1.1] From 5cefff83fffb0f9484228ebf39e691756d02768b Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 14 Dec 2015 19:49:36 -0700 Subject: [PATCH 080/324] Update RELEASE_NOTES.md --- RELEASE_NOTES.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index f631939f..4f418cbd 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -17,8 +17,6 @@ There are also many bugfixes and new enhancements: (https://github.com/basho/riak-python-client/pull/414) * [Support Riak 2.1.1] (https://github.com/basho/riak-python-client/pull/407) -* [Addition of new API entry point methods] - (https://github.com/basho/riak-python-client/pull/398) * [Native SSL support for Python 2.7.9+] (https://github.com/basho/riak-python-client/pull/397) From 2955d9f56ebfbe6cf9694646668ffc0e1bb10cd5 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Tue, 15 Dec 2015 05:26:53 +0000 Subject: [PATCH 081/324] Update the list of supported platforms --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 9446ae96..5219948d 100755 --- a/setup.py +++ b/setup.py @@ -56,9 +56,9 @@ classifiers=['License :: OSI Approved :: Apache Software License', 'Intended Audience :: Developers', 'Operating System :: OS Independent', - 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', 'Topic :: Database'] ) From 244a7f2817ce134c16f38df7e1c4c99e0a42ab64 Mon Sep 17 00:00:00 2001 From: Nico Revin Date: Fri, 25 Sep 2015 12:24:23 +0300 Subject: [PATCH 082/324] Fix TypeError when working with nested maps --- riak/datatypes/map.py | 13 ++++++++----- riak/tests/test_datatypes.py | 6 ++++++ 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/riak/datatypes/map.py b/riak/datatypes/map.py index 4ea64f67..a2bbdf2b 100644 --- a/riak/datatypes/map.py +++ b/riak/datatypes/map.py @@ -257,12 +257,15 @@ def modified(self): """ Whether the map has staged local modifications. """ - values_modified = [self._value[v].modified for v in self._value] - modified = (any(values_modified) or self._removes or self._updates) - if modified: + if self._removes: return True - else: - return False + for v in self._value: + if self._value[v].modified: + return True + for v in self._updates: + if self._updates[v].modified: + return True + return False def to_op(self): """ diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 747de515..87ca1b20 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -122,6 +122,9 @@ def op(self, dtype): dtype.registers['b'].assign('testing') dtype.flags['c'].enable() dtype.maps['d'][('e', 'set')].add('deep value') + dtype.maps['f'].counters['g'] + dtype.maps['h'].maps['i'].flags['j'] + def check_op_output(self, op): self.assertIn(('update', ('a', 'counter'), ('increment', 2)), op) @@ -130,6 +133,9 @@ def check_op_output(self, op): self.assertIn(('update', ('d', 'map'), [('update', ('e', 'set'), {'adds': ['deep value']})]), op) + self.assertNotIn(('update', ('f', 'map'), None), op) + self.assertNotIn(('update', ('h', 'map'), [('update', ('i', 'map'), + None)]), op) def test_removes_require_context(self): dtype = self.dtype(self.bucket, 'key') From 733f10c833712c550aff99c81db0b019edfe7f1a Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 21 Dec 2015 19:45:55 -0700 Subject: [PATCH 083/324] PEP8 --- riak/tests/test_datatypes.py | 1 - 1 file changed, 1 deletion(-) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 87ca1b20..c832583b 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -125,7 +125,6 @@ def op(self, dtype): dtype.maps['f'].counters['g'] dtype.maps['h'].maps['i'].flags['j'] - def check_op_output(self, op): self.assertIn(('update', ('a', 'counter'), ('increment', 2)), op) self.assertIn(('update', ('b', 'register'), ('assign', 'testing')), op) From 76eb4eddb10818fbe77f8c087c630b40d011aadf Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 22 Dec 2015 16:42:16 -0800 Subject: [PATCH 084/324] No need to run develop setup step --- buildbot/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index 32a23594..a32f3910 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -14,7 +14,7 @@ configure: @../setup.py configure --riak-admin=$(RIAK_ADMIN) compile: - @../setup.py develop + @echo NO-OP lint: @pip install --upgrade pep8 flake8 From 9eedc47b27778089c386d6b177f594c01716ccf6 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 4 Jan 2016 13:46:05 -0800 Subject: [PATCH 085/324] Add tests to ensure DESCRIBE statement returns columns and rows. Add feature to interpolate table name in queries if "{table}" is in query string. --- riak/tests/test_timeseries.py | 47 ++++++++++++++++++++++++++++++++ riak/transports/pbc/transport.py | 7 ++++- 2 files changed, 53 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index b0b28423..8261f170 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -213,6 +213,29 @@ def validate_data(self, ts_obj): self.assertEqual(row[3], 'wind') self.assertIsNone(row[4]) + def test_query_that_returns_table_description(self): + fmt = 'DESCRIBE {table}' + query = fmt.format(table=table_name) + ts_obj = self.client.ts_query('GeoCheckin', query) + self.assertIsNotNone(ts_obj) + self.assertGreater(len(ts_obj.columns), 0) + self.assertGreater(len(ts_obj.rows), 0) + + def test_query_that_returns_table_description_using_interpolation(self): + query = 'Describe {table}' + ts_obj = self.client.ts_query('GeoCheckin', query) + self.assertIsNotNone(ts_obj) + self.assertGreater(len(ts_obj.columns), 0) + self.assertGreater(len(ts_obj.rows), 0) + + def test_query_description_via_table(self): + query = 'describe {table}' + table = Table(self.client, 'GeoCheckin') + ts_obj = table.query(query) + self.assertIsNotNone(ts_obj) + self.assertGreater(len(ts_obj.columns), 0) + self.assertGreater(len(ts_obj.rows), 0) + def test_query_that_returns_no_data(self): fmt = """ select * from {table} where @@ -225,6 +248,17 @@ def test_query_that_returns_no_data(self): self.assertEqual(len(ts_obj.columns), 0) self.assertEqual(len(ts_obj.rows), 0) + def test_query_that_returns_no_data_using_interpolation(self): + query = """ + select * from {table} where + time > 0 and time < 10 and + geohash = 'hash1' and + user = 'user1' + """ + ts_obj = self.client.ts_query('GeoCheckin', query) + self.assertEqual(len(ts_obj.columns), 0) + self.assertEqual(len(ts_obj.rows), 0) + def test_query_that_matches_some_data(self): fmt = """ select * from {table} where @@ -239,6 +273,19 @@ def test_query_that_matches_some_data(self): ts_obj = self.client.ts_query('GeoCheckin', query) self.validate_data(ts_obj) + def test_query_that_matches_some_data_using_interpolation(self): + fmt = """ + select * from {table} where + time > {t1} and time < {t2} and + geohash = 'hash1' and + user = 'user2' + """ + query = fmt.format( + t1=self.tenMinsAgoMsec, + t2=self.nowMsec) + ts_obj = self.client.ts_query('GeoCheckin', query) + self.validate_data(ts_obj) + def test_query_that_matches_more_data(self): fmt = """ select * from {table} where diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 3ad5ae1c..9960cabb 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -213,7 +213,12 @@ def ts_delete(self, table, key): def ts_query(self, table, query, interpolations=None): req = riak.pb.riak_ts_pb2.TsQueryReq() - req.query.base = str_to_bytes(query) + + q = query + if '{table}' in q: + q = q.format(table=table.name) + + req.query.base = str_to_bytes(q) msg_code, ts_query_resp = self._request( riak.pb.messages.MSG_CODE_TS_QUERY_REQ, req, From fa12c25b6e4be47f7a636e141d14077815dbd394 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 4 Jan 2016 14:30:16 -0800 Subject: [PATCH 086/324] Add methods to retrieve a table description via client object and table object --- docs/client.rst | 2 ++ riak/client/operations.py | 19 +++++++++++++++++++ riak/table.py | 8 ++++++++ riak/tests/test_timeseries.py | 15 ++++++++++++++- riak/transports/pbc/transport.py | 4 ++++ riak/transports/transport.py | 18 ++++++++++++++++++ 6 files changed, 65 insertions(+), 1 deletion(-) diff --git a/docs/client.rst b/docs/client.rst index ae2f8e54..f014afd9 100644 --- a/docs/client.rst +++ b/docs/client.rst @@ -127,10 +127,12 @@ Key-level Operations Timeseries Operations -------------------- +.. automethod:: RiakClient.ts_describe .. automethod:: RiakClient.ts_get .. automethod:: RiakClient.ts_put .. automethod:: RiakClient.ts_delete .. automethod:: RiakClient.ts_query +.. automethod:: RiakClient.ts_stream_keys ---------------- Query Operations diff --git a/riak/client/operations.py b/riak/client/operations.py index aaecae7d..d3541b3c 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -536,6 +536,25 @@ def put(self, transport, robj, w=None, dw=None, pw=None, return_body=None, if_none_match=if_none_match, timeout=timeout) + @retryable + def ts_describe(self, transport, table): + """ + ts_describe(table) + + Retrieve a time series table description from the Riak cluster. + + .. note:: This request is automatically retried :attr:`retries` + times if it fails due to network error. + + :param table: The timeseries table. + :type table: string or :class:`Table ` + :rtype: :class:`TsObject ` + """ + t = table + if isinstance(t, string_types): + t = Table(self, table) + return transport.ts_describe(t) + @retryable def ts_get(self, transport, table, key): """ diff --git a/riak/table.py b/riak/table.py index c477a32b..d026bf18 100644 --- a/riak/table.py +++ b/riak/table.py @@ -49,6 +49,14 @@ def new(self, rows, columns=None): return TsObject(self._client, self, rows, columns) + def describe(self): + """ + Retrieves a timeseries table's description. + + :rtype: :class:`TsObject ` + """ + return self._client.ts_describe(self) + def get(self, key): """ Gets a value from a timeseries table. diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 8261f170..5560512d 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -236,6 +236,19 @@ def test_query_description_via_table(self): self.assertGreater(len(ts_obj.columns), 0) self.assertGreater(len(ts_obj.rows), 0) + def test_get_description(self): + ts_obj = self.client.ts_describe('GeoCheckin') + self.assertIsNotNone(ts_obj) + self.assertGreater(len(ts_obj.columns), 0) + self.assertGreater(len(ts_obj.rows), 0) + + def test_get_description_via_table(self): + table = Table(self.client, 'GeoCheckin') + ts_obj = table.describe() + self.assertIsNotNone(ts_obj) + self.assertGreater(len(ts_obj.columns), 0) + self.assertGreater(len(ts_obj.rows), 0) + def test_query_that_returns_no_data(self): fmt = """ select * from {table} where @@ -275,7 +288,7 @@ def test_query_that_matches_some_data(self): def test_query_that_matches_some_data_using_interpolation(self): fmt = """ - select * from {table} where + select * from {{table}} where time > {t1} and time < {t2} and geohash = 'hash1' and user = 'user2' diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 9960cabb..53df2181 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -173,6 +173,10 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, return robj + def ts_describe(self, table): + query = 'DESCRIBE {table}'.format(table=table.name) + return self.ts_query(table, query) + def ts_get(self, table, key): req = riak.pb.riak_ts_pb2.TsGetReq() self._encode_timeseries_keyreq(table, key, req) diff --git a/riak/transports/transport.py b/riak/transports/transport.py index f9fcae6d..4f33168c 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -92,12 +92,30 @@ def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, """ raise NotImplementedError + def ts_describe(self, table): + """ + Retrieves a timeseries table description. + """ + raise NotImplementedError + + def ts_get(self, table, key): + """ + Retrieves a timeseries object. + """ + raise NotImplementedError + def ts_put(self, tsobj): """ Stores a timeseries object. """ raise NotImplementedError + def ts_delete(self, table, key): + """ + Deletes a timeseries object. + """ + raise NotImplementedError + def ts_query(self, table, query, interpolations=None): """ Query timeseries data. From a3929635b3b88cb88543bbebc26c515bdfa5bba5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 5 Jan 2016 10:06:47 -0800 Subject: [PATCH 087/324] Add test to create a TS table via the query interface --- riak/tests/test_timeseries.py | 19 ++++++++++++++++++- 1 file changed, 18 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 5560512d..cef6406b 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,6 +1,8 @@ # -*- coding: utf-8 -*- import datetime import platform +import random +import string import riak.pb.riak_ts_pb2 from riak import RiakError @@ -27,7 +29,6 @@ ts1 = ts0 + fiveMins -@unittest.skipUnless(RUN_TIMESERIES, 'RUN_TIMESERIES is 0') class TimeseriesUnitTests(unittest.TestCase): def setUp(self): self.c = RiakPbcCodec() @@ -213,6 +214,22 @@ def validate_data(self, ts_obj): self.assertEqual(row[3], 'wind') self.assertIsNone(row[4]) + def test_query_that_creates_table_using_interpolation(self): + table = ''.join( + [random.choice(string.ascii_letters + string.digits) + for n in range(32)]) + query = """CREATE TABLE {table} ( + geohash varchar not null, + user varchar not null, + time timestamp not null, + weather varchar not null, + temperature double, + PRIMARY KEY((geohash, user, quantum(time, 15, m)), + geohash, user, time)) + """ + ts_obj = self.client.ts_query(table, query) + self.assertIsNotNone(ts_obj) + def test_query_that_returns_table_description(self): fmt = 'DESCRIBE {table}' query = fmt.format(table=table_name) From 44721f8273cf6c3d3be3466382b15e491fb2553b Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 5 Jan 2016 16:56:28 -0800 Subject: [PATCH 088/324] Validate the rows and columns returned from a DESCRIBE query --- riak/tests/test_timeseries.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index cef6406b..d819d146 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -235,36 +235,36 @@ def test_query_that_returns_table_description(self): query = fmt.format(table=table_name) ts_obj = self.client.ts_query('GeoCheckin', query) self.assertIsNotNone(ts_obj) - self.assertGreater(len(ts_obj.columns), 0) - self.assertGreater(len(ts_obj.rows), 0) + self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.rows), 5) def test_query_that_returns_table_description_using_interpolation(self): query = 'Describe {table}' ts_obj = self.client.ts_query('GeoCheckin', query) self.assertIsNotNone(ts_obj) - self.assertGreater(len(ts_obj.columns), 0) - self.assertGreater(len(ts_obj.rows), 0) + self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.rows), 5) def test_query_description_via_table(self): query = 'describe {table}' table = Table(self.client, 'GeoCheckin') ts_obj = table.query(query) self.assertIsNotNone(ts_obj) - self.assertGreater(len(ts_obj.columns), 0) - self.assertGreater(len(ts_obj.rows), 0) + self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.rows), 5) def test_get_description(self): ts_obj = self.client.ts_describe('GeoCheckin') self.assertIsNotNone(ts_obj) - self.assertGreater(len(ts_obj.columns), 0) - self.assertGreater(len(ts_obj.rows), 0) + self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.rows), 5) def test_get_description_via_table(self): table = Table(self.client, 'GeoCheckin') ts_obj = table.describe() self.assertIsNotNone(ts_obj) - self.assertGreater(len(ts_obj.columns), 0) - self.assertGreater(len(ts_obj.rows), 0) + self.assertEqual(len(ts_obj.columns), 5) + self.assertEqual(len(ts_obj.rows), 5) def test_query_that_returns_no_data(self): fmt = """ From f33059d74856b4599ba7b92942b8ac3298a853fb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 8 Jan 2016 12:32:52 -0800 Subject: [PATCH 089/324] Ensure table name starts with alpha char --- riak/tests/test_timeseries.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index d819d146..21a0d3b9 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -218,7 +218,7 @@ def test_query_that_creates_table_using_interpolation(self): table = ''.join( [random.choice(string.ascii_letters + string.digits) for n in range(32)]) - query = """CREATE TABLE {table} ( + query = """CREATE TABLE test-{table} ( geohash varchar not null, user varchar not null, time timestamp not null, From 6f169a7bd58fa0de3d8152c0239f34dde6ac2691 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Mon, 11 Jan 2016 16:58:46 +0000 Subject: [PATCH 090/324] Add convenience Makefile target configure_timeseries in buildbot --- buildbot/Makefile | 3 +++ 1 file changed, 3 insertions(+) diff --git a/buildbot/Makefile b/buildbot/Makefile index a32f3910..fd0ee0ed 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -13,6 +13,9 @@ preconfigure: configure: @../setup.py configure --riak-admin=$(RIAK_ADMIN) +configure_timeseries: + @../setup.py setup_timeseries --riak-admin=$(RIAK_ADMIN) + compile: @echo NO-OP From ce238e7a8df2da41ad60ed6bb3a7966f85a8d0c2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 13 Jan 2016 11:40:04 -0800 Subject: [PATCH 091/324] Add assertions for CREATE TABLE statement --- riak/tests/test_timeseries.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 21a0d3b9..e85c28c1 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -229,6 +229,8 @@ def test_query_that_creates_table_using_interpolation(self): """ ts_obj = self.client.ts_query(table, query) self.assertIsNotNone(ts_obj) + self.assertEqual(len(ts_obj.columns), 0) + self.assertEqual(len(ts_obj.rows), 0) def test_query_that_returns_table_description(self): fmt = 'DESCRIBE {table}' From 14d8e4cc195513378df7ccea68bed3b322600bbc Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 13 Jan 2016 11:44:51 -0800 Subject: [PATCH 092/324] Verbosely clarify TS put result --- riak/tests/test_timeseries.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index e85c28c1..595dcffb 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -189,7 +189,7 @@ def setUpClass(cls): ] ts_obj = table.new(rows) result = ts_obj.store() - if not result: + if not result == True: raise AssertionError("expected success") client.close() From eb6e5ebda200cbd8987303cd285e9bb891ad393a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 13 Jan 2016 11:56:23 -0800 Subject: [PATCH 093/324] 2.4.0 Release Notes --- RELEASE_NOTES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 4f418cbd..7bfea4f7 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,5 +1,11 @@ # Riak Python Client Release Notes +## 2.4.0 Feature Release - 2016-01-13 + +This release enhances Riak Time Series functionality. + +* [Encapsulate table description](https://github.com/basho/riak-python-client/pull/422) + ## 2.3.0 Feature Release - 2015-12-14 Release 2.3.0 features support for new From c4c17da704a872da1150db7753b08b72fc04f26e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 13 Jan 2016 12:11:20 -0800 Subject: [PATCH 094/324] Make lint happy --- riak/tests/test_timeseries.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 595dcffb..e36cd95e 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -189,7 +189,7 @@ def setUpClass(cls): ] ts_obj = table.new(rows) result = ts_obj.store() - if not result == True: + if result is not True: raise AssertionError("expected success") client.close() From 3b20639cbeaf443cfb51ff2c15c39d005fe26513 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 28 Jan 2016 10:54:42 -0800 Subject: [PATCH 095/324] Get benchmarks in their own spot to add some for timeseries. --- riak/benchmarks/multiget.py | 40 ++++++++++++++++++++++++ riak/client/multiget.py | 61 ++----------------------------------- 2 files changed, 42 insertions(+), 59 deletions(-) create mode 100644 riak/benchmarks/multiget.py diff --git a/riak/benchmarks/multiget.py b/riak/benchmarks/multiget.py new file mode 100644 index 00000000..6baaba71 --- /dev/null +++ b/riak/benchmarks/multiget.py @@ -0,0 +1,40 @@ +from riak import RiakClient +from multiprocessing import cpu_count +import binascii +import os +import riak.benchmark as benchmark +import riak.client.multiget as mget + +client = RiakClient(protocol='pbc') +bkeys = [('default', 'multiget', str(key)) for key in range(10000)] + +data = binascii.b2a_hex(os.urandom(1024)) + +print("Benchmarking multiget:") +print(" CPUs: {0}".format(cpu_count())) +print(" Threads: {0}".format(mget.POOL_SIZE)) +print(" Keys: {0}".format(len(bkeys))) +print() + +with benchmark.measure() as b: + with b.report('populate'): + for _, bucket, key in bkeys: + client.bucket(bucket).new(key, encoded_data=data, + content_type='text/plain' + ).store() +for b in benchmark.measure_with_rehearsal(): + client.protocol = 'http' + with b.report('http seq'): + for _, bucket, key in bkeys: + client.bucket(bucket).get(key) + + with b.report('http multi'): + mget.multiget(client, bkeys) + + client.protocol = 'pbc' + with b.report('pbc seq'): + for _, bucket, key in bkeys: + client.bucket(bucket).get(key) + + with b.report('pbc multi'): + mget.multiget(client, bkeys) diff --git a/riak/client/multiget.py b/riak/client/multiget.py index 20d02801..9b5d7522 100644 --- a/riak/client/multiget.py +++ b/riak/client/multiget.py @@ -1,26 +1,9 @@ -""" -Copyright 2013 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from __future__ import print_function from collections import namedtuple from threading import Thread, Lock, Event from multiprocessing import cpu_count from six import PY2 + if PY2: from Queue import Queue else: @@ -177,8 +160,8 @@ def multiget(client, keys, **options): :meth:`RiakBucket.get ` :type options: dict :rtype: list - """ + outq = Queue() if 'pool' in options: @@ -201,43 +184,3 @@ def multiget(client, keys, **options): outq.task_done() return results - -if __name__ == '__main__': - # Run a benchmark! - from riak import RiakClient - import riak.benchmark as benchmark - client = RiakClient(protocol='pbc') - bkeys = [('default', 'multiget', str(key)) for key in range(10000)] - - data = None - with open(__file__) as f: - data = f.read() - - print("Benchmarking multiget:") - print(" CPUs: {0}".format(cpu_count())) - print(" Threads: {0}".format(POOL_SIZE)) - print(" Keys: {0}".format(len(bkeys))) - print() - - with benchmark.measure() as b: - with b.report('populate'): - for _, bucket, key in bkeys: - client.bucket(bucket).new(key, encoded_data=data, - content_type='text/plain' - ).store() - for b in benchmark.measure_with_rehearsal(): - client.protocol = 'http' - with b.report('http seq'): - for _, bucket, key in bkeys: - client.bucket(bucket).get(key) - - with b.report('http multi'): - multiget(client, bkeys) - - client.protocol = 'pbc' - with b.report('pbc seq'): - for _, bucket, key in bkeys: - client.bucket(bucket).get(key) - - with b.report('pbc multi'): - multiget(client, bkeys) From 02f509ed1c840ccd3777c6ee80c26aeee799d134 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 28 Jan 2016 13:22:49 -0800 Subject: [PATCH 096/324] Begin timeseries benchmarks --- riak/benchmarks/timeseries.py | 39 +++++++++++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 riak/benchmarks/timeseries.py diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py new file mode 100644 index 00000000..ef3d789c --- /dev/null +++ b/riak/benchmarks/timeseries.py @@ -0,0 +1,39 @@ +from riak import RiakClient +from multiprocessing import cpu_count +import binascii +import os +import riak.benchmark as benchmark + +client = RiakClient(protocol='pbc') +bkeys = [('default', 'multiget', str(key)) for key in range(10000)] + +data = binascii.b2a_hex(os.urandom(1024)) + +print("Benchmarking timeseries:") +print(" CPUs: {0}".format(cpu_count())) +print(" Keys: {0}".format(len(bkeys))) +print() + +with benchmark.measure() as b: + with b.report('populate'): + for _, bucket, key in bkeys: + client.bucket(bucket).new(key, encoded_data=data, + content_type='text/plain' + ).store() + +for b in benchmark.measure_with_rehearsal(): + client.protocol = 'http' + with b.report('http seq'): + for _, bucket, key in bkeys: + client.bucket(bucket).get(key) + + with b.report('http multi'): + mget.multiget(client, bkeys) + + client.protocol = 'pbc' + with b.report('pbc seq'): + for _, bucket, key in bkeys: + client.bucket(bucket).get(key) + + with b.report('pbc multi'): + mget.multiget(client, bkeys) From 2baadfb8cde23bb8ec521c7e7bf7ca80d58dd478 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 28 Jan 2016 15:51:18 -0800 Subject: [PATCH 097/324] No need to limit connections to localhost. --- commands.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/commands.py b/commands.py index c12b9191..73a83ce2 100644 --- a/commands.py +++ b/commands.py @@ -266,9 +266,9 @@ class setup_security(Command, security_commands): _commands = [ "add-user $USERNAME password=$PASSWORD", - "add-source $USERNAME 127.0.0.1/32 password", + "add-source $USERNAME 0.0.0.0/32 password", "add-user $CERTUSER password=$CERTPASS", - "add-source $CERTUSER 127.0.0.1/32 certificate" + "add-source $CERTUSER 0.0.0.0/32 certificate" ] _grants = { @@ -392,9 +392,9 @@ class preconfigure(Command): * Update these lines in riak.conf * storage_backend = leveldb * search = on - * listener.protobuf.internal = 127.0.0.1:8087 - * listener.http.internal = 127.0.0.1:8098 - * listener.https.internal = 127.0.0.1:18098 + * listener.protobuf.internal = 0.0.0.0:8087 + * listener.http.internal = 0.0.0.0:8098 + * listener.https.internal = 0.0.0.0:18098 * ssl.certfile = $pwd/tests/resources/server.crt * ssl.keyfile = $pwd/tests/resources/server.key * ssl.cacertfile = $pwd/tests/resources/ca.crt @@ -412,7 +412,7 @@ class preconfigure(Command): def initialize_options(self): self.riak_conf = None - self.host = "127.0.0.1" + self.host = "0.0.0.0" self.pb_port = "8087" self.http_port = "8098" self.https_port = "18098" From 270a856d13230a25f37f7dbfaa3eeafa3f3ae557 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 1 Feb 2016 07:19:04 -0800 Subject: [PATCH 098/324] Put / Get TS benchmark complete. --- riak/benchmark.py | 18 -------- riak/benchmarks/multiget.py | 4 +- riak/benchmarks/timeseries.py | 84 +++++++++++++++++++++++------------ riak/tests/test_timeseries.py | 13 +++--- riak/transports/pbc/codec.py | 20 +++------ riak/util.py | 19 +++++++- 6 files changed, 88 insertions(+), 70 deletions(-) diff --git a/riak/benchmark.py b/riak/benchmark.py index 13286100..c26a0a49 100644 --- a/riak/benchmark.py +++ b/riak/benchmark.py @@ -1,21 +1,3 @@ -""" -Copyright 2013 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from __future__ import print_function import os import gc diff --git a/riak/benchmarks/multiget.py b/riak/benchmarks/multiget.py index 6baaba71..505069a4 100644 --- a/riak/benchmarks/multiget.py +++ b/riak/benchmarks/multiget.py @@ -20,8 +20,8 @@ with b.report('populate'): for _, bucket, key in bkeys: client.bucket(bucket).new(key, encoded_data=data, - content_type='text/plain' - ).store() + content_type='text/plain' + ).store() for b in benchmark.measure_with_rehearsal(): client.protocol = 'http' with b.report('http seq'): diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py index ef3d789c..57ed6f5c 100644 --- a/riak/benchmarks/timeseries.py +++ b/riak/benchmarks/timeseries.py @@ -1,39 +1,67 @@ -from riak import RiakClient from multiprocessing import cpu_count -import binascii -import os +from riak import RiakClient import riak.benchmark as benchmark +import datetime +import random -client = RiakClient(protocol='pbc') -bkeys = [('default', 'multiget', str(key)) for key in range(10000)] +epoch = datetime.datetime.utcfromtimestamp(0) +onesec = datetime.timedelta(0, 1) -data = binascii.b2a_hex(os.urandom(1024)) +rowcount = 32768 +batchsz = 32 +if rowcount % batchsz != 0: + raise AssertionError('rowcount must be divisible by batchsz') + +weather = ['typhoon', 'hurricane', 'rain', 'wind', 'snow'] +rows = [] +keys = [] +for i in range(rowcount): + ts = datetime.datetime(2016, 1, 1, 12, 0, 0) + \ + datetime.timedelta(seconds=i) + family_idx = i % 4 + series_idx = i % 4 + family = 'hash{:d}'.format(family_idx) + series = 'user{:d}'.format(series_idx) + w = weather[i % len(weather)] + temp = (i % 100) + random.random() + row = [family, series, ts, w, temp] + key = [family, series, ts] + rows.append(row) + keys.append(key) print("Benchmarking timeseries:") print(" CPUs: {0}".format(cpu_count())) -print(" Keys: {0}".format(len(bkeys))) +print(" Rows: {0}".format(len(rows))) print() +tbl = 'GeoCheckin' +h = 'riak-test' +n = [ + {'host': h, 'pb_port': 10017}, + {'host': h, 'pb_port': 10027}, + {'host': h, 'pb_port': 10037}, + {'host': h, 'pb_port': 10047} +] +client = RiakClient(nodes=n, protocol='pbc') +table = client.table(tbl) + with benchmark.measure() as b: with b.report('populate'): - for _, bucket, key in bkeys: - client.bucket(bucket).new(key, encoded_data=data, - content_type='text/plain' - ).store() - -for b in benchmark.measure_with_rehearsal(): - client.protocol = 'http' - with b.report('http seq'): - for _, bucket, key in bkeys: - client.bucket(bucket).get(key) - - with b.report('http multi'): - mget.multiget(client, bkeys) - - client.protocol = 'pbc' - with b.report('pbc seq'): - for _, bucket, key in bkeys: - client.bucket(bucket).get(key) - - with b.report('pbc multi'): - mget.multiget(client, bkeys) + for i in range(0, rowcount, batchsz): + x = i + y = i + batchsz + r = rows[x:y] + ts_obj = table.new(r) + result = ts_obj.store() + if result is not True: + raise AssertionError("expected success") + with b.report('get'): + for k in keys: + ts_obj = client.ts_get(tbl, k) + if ts_obj is None: + raise AssertionError("expected obj") + if len(ts_obj.rows) != 1: + raise AssertionError("expected one row") + row = ts_obj.rows[0] + if len(row) != 5: + raise AssertionError("expected row to have five items") diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index e36cd95e..d1b64642 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -9,7 +9,7 @@ from riak.table import Table from riak.ts_object import TsObject from riak.transports.pbc.codec import RiakPbcCodec -from riak.util import str_to_bytes, bytes_to_str +from riak.util import str_to_bytes, bytes_to_str, unix_time_millis from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase from riak.pb.riak_ts_pb2 import TsColumnType @@ -32,8 +32,8 @@ class TimeseriesUnitTests(unittest.TestCase): def setUp(self): self.c = RiakPbcCodec() - self.ts0ms = self.c._unix_time_millis(ts0) - self.ts1ms = self.c._unix_time_millis(ts1) + self.ts0ms = unix_time_millis(ts0) + self.ts1ms = unix_time_millis(ts1) self.rows = [ [bd0, 0, 1.2, ts0, True], [bd1, 3, 4.5, ts1, False] @@ -193,13 +193,12 @@ def setUpClass(cls): raise AssertionError("expected success") client.close() - codec = RiakPbcCodec() - cls.nowMsec = codec._unix_time_millis(cls.now) + cls.nowMsec = unix_time_millis(cls.now) cls.fiveMinsAgo = fiveMinsAgo cls.twentyMinsAgo = twentyMinsAgo cls.twentyFiveMinsAgo = twentyFiveMinsAgo - cls.tenMinsAgoMsec = codec._unix_time_millis(tenMinsAgo) - cls.twentyMinsAgoMsec = codec._unix_time_millis(twentyMinsAgo) + cls.tenMinsAgoMsec = unix_time_millis(tenMinsAgo) + cls.twentyMinsAgoMsec = unix_time_millis(twentyMinsAgo) cls.numCols = len(rows[0]) cls.rows = rows diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index a7be2a89..e5da11c2 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -8,14 +8,13 @@ from riak import RiakError from riak.content import RiakContent -from riak.util import decode_index_value, str_to_bytes, bytes_to_str +from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ + unix_time_millis, datetime_from_unix_time_millis from riak.multidict import MultiDict from riak.pb.riak_ts_pb2 import TsColumnType from six import string_types, PY2 -epoch = datetime.datetime.utcfromtimestamp(0) - def _invert(d): out = {} @@ -84,17 +83,10 @@ def __init__(self, **unused_args): super(RiakPbcCodec, self).__init__(**unused_args) def _unix_time_millis(self, dt): - td = dt - epoch - try: - return int(dt.total_seconds() * 1000.0) - except AttributeError: - # NB: python 2.6 must use this method - return int(((td.microseconds + - (td.seconds + td.days * 24 * 3600) * 10**6) / - 10**6) * 1000.0) + return unix_time_millis(dt) def _datetime_from_unix_time_millis(self, ut): - return datetime.datetime.utcfromtimestamp(ut / 1000.0) + return datetime_from_unix_time_millis(ut) def _encode_quorum(self, rw): """ @@ -637,7 +629,7 @@ def _encode_map_update(self, dtype, msg, op): def _encode_to_ts_cell(self, cell, ts_cell): if cell is not None: if isinstance(cell, datetime.datetime): - ts_cell.timestamp_value = self._unix_time_millis(cell) + ts_cell.timestamp_value = unix_time_millis(cell) elif isinstance(cell, bool): ts_cell.boolean_value = cell elif isinstance(cell, string_types): @@ -752,7 +744,7 @@ def _decode_timeseries_row(self, tsrow, tscols=None): if col and col.type != TsColumnType.Value('TIMESTAMP'): raise TypeError('expected TIMESTAMP column') else: - dt = self._datetime_from_unix_time_millis( + dt = datetime_from_unix_time_millis( cell.timestamp_value) row.append(dt) elif cell.HasField('boolean_value'): diff --git a/riak/util.py b/riak/util.py index 5dc3e61a..4ea50389 100644 --- a/riak/util.py +++ b/riak/util.py @@ -2,6 +2,24 @@ import warnings from collections import Mapping from six import string_types, PY2 +import datetime + +epoch = datetime.datetime.utcfromtimestamp(0) + + +def unix_time_millis(dt): + try: + return int(dt.total_seconds() * 1000.0) + except AttributeError: + # NB: python 2.6 must use this method + td = dt - epoch + return int(((td.microseconds + + (td.seconds + td.days * 24 * 3600) * 10**6) / + 10**6) * 1000.0) + + +def datetime_from_unix_time_millis(ut): + return datetime.datetime.utcfromtimestamp(ut / 1000.0) def quacks_like_dict(object): @@ -51,7 +69,6 @@ class lazy_property(object): memoization of an object attribute. The property should represent immutable data, as it replaces itself on first access. ''' - def __init__(self, fget): self.fget = fget self.func_name = fget.__name__ From e99c6965ecb6edfdfbd8393a7c1274f8b557147e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 1 Feb 2016 15:42:01 -0800 Subject: [PATCH 099/324] Adding TTB encoding and tests. --- riak/tests/test_timeseries.py | 2 +- riak/tests/test_timeseries_ttb.py | 40 +++++++++++++++++++++++++++ riak/transports/ttb/codec.py | 46 +++++++++++++++++++++++++++++++ setup.py | 4 +-- 4 files changed, 89 insertions(+), 3 deletions(-) create mode 100644 riak/tests/test_timeseries_ttb.py create mode 100644 riak/transports/ttb/codec.py diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index d1b64642..21a53497 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -39,7 +39,7 @@ def setUp(self): [bd1, 3, 4.5, ts1, False] ] self.test_key = ['hash1', 'user2', ts0] - self.table = Table(None, 'test-table') + self.table = Table(None, table_name) def validate_keyreq(self, req): self.assertEqual(self.table.name, bytes_to_str(req.table)) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py new file mode 100644 index 00000000..914758b2 --- /dev/null +++ b/riak/tests/test_timeseries_ttb.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +import datetime +import platform +import random +import string + +from riak.table import Table +from riak.transports.ttb.codec import RiakTtbCodec +from riak.util import str_to_bytes, bytes_to_str, unix_time_millis + +if platform.python_version() < '2.7': + unittest = __import__('unittest2') +else: + import unittest + +table_name = 'GeoCheckin' + +bd0 = '时间序列' +bd1 = 'временные ряды' + +fiveMins = datetime.timedelta(0, 300) +ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) +ts1 = ts0 + fiveMins + + +class TimeseriesTtbUnitTests(unittest.TestCase): + def setUp(self): + self.c = RiakTtbCodec() + self.ts0ms = unix_time_millis(ts0) + self.ts1ms = unix_time_millis(ts1) + self.rows = [ + [bd0, 0, 1.2, ts0, True], + [bd1, 3, 4.5, ts1, False] + ] + self.test_key = ['hash1', 'user2', ts0] + self.table = Table(None, table_name) + + def test_encode_data_for_get(self): + req = self.c._encode_timeseries_keyreq(self.table, self.test_key) + self.assertIsNotNone(req) diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py new file mode 100644 index 00000000..bdeb7513 --- /dev/null +++ b/riak/transports/ttb/codec.py @@ -0,0 +1,46 @@ +import erlastic +import datetime +import logging + +from riak import RiakError +from riak.content import RiakContent +from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ + unix_time_millis, datetime_from_unix_time_millis +from six import string_types, PY2 + + +class RiakTtbCodec(object): + ''' + Erlang term-to-binary Encoding and decoding methods for RiakTtbTransport + ''' + + def __init__(self, **unused_args): + super(RiakTtbCodec, self).__init__(**unused_args) + + def _encode_to_ts_cell(self, cell, ts_cell): + if cell is not None: + if isinstance(cell, datetime.datetime): + ts_cell.timestamp_value = unix_time_millis(cell) + elif isinstance(cell, bool): + ts_cell.boolean_value = cell + elif isinstance(cell, string_types): + logging.debug("cell -> str: '%s'", cell) + ts_cell.varchar_value = str_to_bytes(cell) + elif (isinstance(cell, int) or + (PY2 and isinstance(cell, long))): # noqa + logging.debug("cell -> int/long: '%s'", cell) + ts_cell.sint64_value = cell + elif isinstance(cell, float): + ts_cell.double_value = cell + else: + t = type(cell) + raise RiakError("can't serialize type '{}', value '{}'" + .format(t, cell)) + + def _encode_timeseries_keyreq(self, table, key): + key_vals = None + if isinstance(key, list): + key_vals = key + else: + raise ValueError("key must be a list") + return None diff --git a/setup.py b/setup.py index 5219948d..b2c9d39a 100755 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ setup_security, enable_security, disable_security, setup_timeseries, \ build_messages -install_requires = ['six >= 1.8.0'] -requires = ['six(>=1.8.0)'] +install_requires = ['six >= 1.8.0', 'erlastic >= 2.0.0'] +requires = ['six(>=1.8.0)', 'erlastic(>= 2.0.0)'] if platform.python_version() < '2.7.9': install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") From 2dd27d6d800de0022fb765b4914483e701f78d72 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 1 Feb 2016 17:56:14 -0800 Subject: [PATCH 100/324] Unit test for tsgetreq ttb encoding complete --- riak/tests/test_timeseries_ttb.py | 18 +++++++++++--- riak/transports/ttb/codec.py | 40 +++++++++++++++++++++---------- 2 files changed, 43 insertions(+), 15 deletions(-) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 914758b2..dfeabcb8 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -4,15 +4,20 @@ import random import string +from erlastic import decode, encode +from erlastic.types import Atom + from riak.table import Table from riak.transports.ttb.codec import RiakTtbCodec -from riak.util import str_to_bytes, bytes_to_str, unix_time_millis +from riak.util import str_to_bytes, unix_time_millis if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest +udef_a = Atom('undefined') +tsc_a = Atom('tscell') table_name = 'GeoCheckin' bd0 = '时间序列' @@ -36,5 +41,12 @@ def setUp(self): self.table = Table(None, table_name) def test_encode_data_for_get(self): - req = self.c._encode_timeseries_keyreq(self.table, self.test_key) - self.assertIsNotNone(req) + keylist = [ + (tsc_a, str_to_bytes('hash1'), udef_a, udef_a, udef_a, udef_a), + (tsc_a, str_to_bytes('user2'), udef_a, udef_a, udef_a, udef_a), + (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a) + ] + req = Atom('tsgetreq'), str_to_bytes(table_name), keylist, udef_a + req_test = encode(req) + req_encoded = self.c._encode_timeseries_keyreq(self.table, self.test_key) + self.assertEqual(req_test, req_encoded) diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index bdeb7513..90d82048 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -1,13 +1,20 @@ -import erlastic import datetime import logging -from riak import RiakError -from riak.content import RiakContent -from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ +from erlastic import decode, encode +from erlastic.types import Atom + +from riak.util import str_to_bytes, bytes_to_str, \ unix_time_millis, datetime_from_unix_time_millis from six import string_types, PY2 +udef_a = Atom('undefined') + +tsgetreq_a = Atom('tsgetreq') +tsputreq_a = Atom('tsputreq') +tscell_a = Atom('tscell') + +tscell_empty = (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) class RiakTtbCodec(object): ''' @@ -17,21 +24,28 @@ class RiakTtbCodec(object): def __init__(self, **unused_args): super(RiakTtbCodec, self).__init__(**unused_args) - def _encode_to_ts_cell(self, cell, ts_cell): - if cell is not None: + def _encode_to_ts_cell(self, cell): + if cell is None: + return tscell_empty + else: if isinstance(cell, datetime.datetime): - ts_cell.timestamp_value = unix_time_millis(cell) + ts = unix_time_millis(cell) + logging.debug("cell -> timestamp: '%s'", ts) + return (tscell_a, udef_a, udef_a, ts, udef_a, udef_a) elif isinstance(cell, bool): - ts_cell.boolean_value = cell + logging.debug("cell -> bool: '%s'", cell) + return (tscell_a, udef_a, udef_a, udef_a, cell, udef_a) elif isinstance(cell, string_types): logging.debug("cell -> str: '%s'", cell) - ts_cell.varchar_value = str_to_bytes(cell) + return (tscell_a, str_to_bytes(cell), + udef_a, udef_a, udef_a, udef_a) elif (isinstance(cell, int) or (PY2 and isinstance(cell, long))): # noqa logging.debug("cell -> int/long: '%s'", cell) - ts_cell.sint64_value = cell + return (tscell_a, udef_a, cell, udef_a, udef_a, udef_a) elif isinstance(cell, float): - ts_cell.double_value = cell + logging.debug("cell -> float: '%s'", cell) + return (tscell_a, udef_a, udef_a, udef_a, udef_a, cell) else: t = type(cell) raise RiakError("can't serialize type '{}', value '{}'" @@ -43,4 +57,6 @@ def _encode_timeseries_keyreq(self, table, key): key_vals = key else: raise ValueError("key must be a list") - return None + req = tsgetreq_a, str_to_bytes(table.name), \ + [self._encode_to_ts_cell(k) for k in key_vals], udef_a + return encode(req) From b251fa9bddbbd90d20e2e05d9b805e4afbd6a84c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 1 Feb 2016 18:35:42 -0800 Subject: [PATCH 101/324] Unit test for tsputreq ttb encoding complete --- riak/tests/test_timeseries_ttb.py | 31 +++++++++++++++++++++++++++++-- riak/transports/ttb/codec.py | 25 +++++++++++++++++++++++++ 2 files changed, 54 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index dfeabcb8..46a5797a 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -8,6 +8,7 @@ from erlastic.types import Atom from riak.table import Table +from riak.ts_object import TsObject from riak.transports.ttb.codec import RiakTtbCodec from riak.util import str_to_bytes, unix_time_millis @@ -34,8 +35,8 @@ def setUp(self): self.ts0ms = unix_time_millis(ts0) self.ts1ms = unix_time_millis(ts1) self.rows = [ - [bd0, 0, 1.2, ts0, True], - [bd1, 3, 4.5, ts1, False] + [bd0, 0, 1.2, ts0, True, None], + [bd1, 3, 4.5, ts1, False, None] ] self.test_key = ['hash1', 'user2', ts0] self.table = Table(None, table_name) @@ -48,5 +49,31 @@ def test_encode_data_for_get(self): ] req = Atom('tsgetreq'), str_to_bytes(table_name), keylist, udef_a req_test = encode(req) + req_encoded = self.c._encode_timeseries_keyreq(self.table, self.test_key) self.assertEqual(req_test, req_encoded) + + def test_encode_data_for_put(self): + r0 = [ + (tsc_a, bd0, udef_a, udef_a, udef_a, udef_a), + (tsc_a, udef_a, 0, udef_a, udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, 1.2), + (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, True, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + ] + r1 = [ + (tsc_a, bd1, udef_a, udef_a, udef_a, udef_a), + (tsc_a, udef_a, 3, udef_a, udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, 4.5), + (tsc_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, False, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + ] + rows = [r0, r1] + req = Atom('tsputreq'), str_to_bytes(table_name), udef_a, rows + req_test = encode(req) + + tsobj = TsObject(None, self.table, self.rows, None) + req_encoded = self.c._encode_timeseries_put(tsobj) + self.assertEqual(req_test, req_encoded) diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index 90d82048..9eb5240d 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -60,3 +60,28 @@ def _encode_timeseries_keyreq(self, table, key): req = tsgetreq_a, str_to_bytes(table.name), \ [self._encode_to_ts_cell(k) for k in key_vals], udef_a return encode(req) + + def _encode_timeseries_put(self, tsobj): + ''' + Returns an Erlang-TTB encoded tuple with the appropriate data and + metadata from a TsObject. + + :param tsobj: a TsObject + :type tsobj: TsObject + :rtype: term-to-binary encoded object + ''' + if tsobj.columns: + raise NotImplementedError("columns are not implemented yet") + + if tsobj.rows and isinstance(tsobj.rows, list): + req_rows = [] + for row in tsobj.rows: + req_r = [] + for cell in row: + req_r.append(self._encode_to_ts_cell(cell)) + req_rows.append(req_r) + req = tsputreq_a, str_to_bytes(tsobj.table.name), \ + udef_a, req_rows + return encode(req) + else: + raise RiakError("TsObject requires a list of rows") From f1228a7a9394871154ecf37e8f57e5cf34812514 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 2 Feb 2016 06:42:22 -0800 Subject: [PATCH 102/324] Fixes #429. Add target to release a source distribution to pypi --- Makefile | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 5740a000..5b3c059c 100644 --- a/Makefile +++ b/Makefile @@ -11,7 +11,15 @@ pb_compile: pb_clean @protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto @python setup.py build_messages -release: +release_sdist: +ifeq ($(RELEASE_GPG_KEYNAME),) + $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) +else + @echo "==> Python (sdist release)" + @python setup.py sdist upload -s -i $(RELEASE_GPG_KEYNAME) +endif + +release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) else From ce5901fdde37efa2d3f576ac895b63457fe9a025 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 2 Feb 2016 07:23:15 -0800 Subject: [PATCH 103/324] Adding milliseconds tests. Add microsecond untegration tests --- riak/tests/test_timeseries.py | 32 +++++++++++++++++++++++--------- riak/transports/pbc/codec.py | 12 +++--------- 2 files changed, 26 insertions(+), 18 deletions(-) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index e36cd95e..4649c21b 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -25,21 +25,34 @@ bd1 = 'временные ряды' fiveMins = datetime.timedelta(0, 300) -ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) +# NB: last arg is microseconds, 987ms expressed +ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0, 987000) ts1 = ts0 + fiveMins class TimeseriesUnitTests(unittest.TestCase): - def setUp(self): - self.c = RiakPbcCodec() - self.ts0ms = self.c._unix_time_millis(ts0) - self.ts1ms = self.c._unix_time_millis(ts1) - self.rows = [ + @classmethod + def setUpClass(cls): + cls.c = RiakPbcCodec() + + ex0ms = 1420113600987 + cls.ts0ms = cls.c._unix_time_millis(ts0) + if cls.ts0ms != ex0ms: + raise AssertionError( + 'expected {:d} to equal {:d}'.format(cls.ts0ms, ex0ms)) + + ex1ms = 1420113900987 + cls.ts1ms = cls.c._unix_time_millis(ts1) + if cls.ts1ms != ex1ms: + raise AssertionError( + 'expected {:d} to equal {:d}'.format(cls.ts1ms, ex1ms)) + + cls.rows = [ [bd0, 0, 1.2, ts0, True], [bd1, 3, 4.5, ts1, False] ] - self.test_key = ['hash1', 'user2', ts0] - self.table = Table(None, 'test-table') + cls.test_key = ['hash1', 'user2', ts0] + cls.table = Table(None, 'test-table') def validate_keyreq(self, req): self.assertEqual(self.table.name, bytes_to_str(req.table)) @@ -171,7 +184,7 @@ class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): super(TimeseriesTests, cls).setUpClass() - cls.now = datetime.datetime.utcfromtimestamp(144379690) + cls.now = datetime.datetime.utcfromtimestamp(144379690.987000) fiveMinsAgo = cls.now - fiveMins tenMinsAgo = fiveMinsAgo - fiveMins fifteenMinsAgo = tenMinsAgo - fiveMins @@ -211,6 +224,7 @@ def validate_data(self, ts_obj): self.assertEqual(row[0], 'hash1') self.assertEqual(row[1], 'user2') self.assertEqual(row[2], self.fiveMinsAgo) + self.assertEqual(row[2].microsecond, 987000) self.assertEqual(row[3], 'wind') self.assertIsNone(row[4]) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index a7be2a89..97629752 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,10 +1,10 @@ +import datetime +import logging import riak.pb import riak.pb.riak_pb2 import riak.pb.riak_dt_pb2 import riak.pb.riak_kv_pb2 import riak.pb.riak_ts_pb2 -import logging -import datetime from riak import RiakError from riak.content import RiakContent @@ -85,13 +85,7 @@ def __init__(self, **unused_args): def _unix_time_millis(self, dt): td = dt - epoch - try: - return int(dt.total_seconds() * 1000.0) - except AttributeError: - # NB: python 2.6 must use this method - return int(((td.microseconds + - (td.seconds + td.days * 24 * 3600) * 10**6) / - 10**6) * 1000.0) + return int(td.total_seconds() * 1000.0) def _datetime_from_unix_time_millis(self, ut): return datetime.datetime.utcfromtimestamp(ut / 1000.0) From eb0be348bcbb2d1a8cdb0d5686adb700ab5babb8 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 2 Feb 2016 11:08:53 -0800 Subject: [PATCH 104/324] Add unit test for timestamp encode / decode with millis --- riak/tests/test_timeseries.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 4649c21b..b55ae554 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -61,6 +61,11 @@ def validate_keyreq(self, req): self.assertEqual('user2', bytes_to_str(req.key[1].varchar_value)) self.assertEqual(self.ts0ms, req.key[2].timestamp_value) + def test_encode_decode_timestamp(self): + ts0ms = self.c._unix_time_millis(ts0) + ts0_d = self.c._datetime_from_unix_time_millis(ts0ms) + self.assertEqual(ts0, ts0_d) + def test_encode_data_for_get(self): req = riak.pb.riak_ts_pb2.TsGetReq() self.c._encode_timeseries_keyreq(self.table, self.test_key, req) From 0b704eb6f314bdae6ce12ce83961e605cc9c93a3 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 3 Feb 2016 08:35:22 -0800 Subject: [PATCH 105/324] Add datetime test; Remove Python 3.3 --- buildbot/tox_setup.sh | 4 ++-- riak/tests/test_timeseries.py | 6 ++++-- tox.ini | 7 +------ 3 files changed, 7 insertions(+), 10 deletions(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index df28ea34..2cd9b24d 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -56,7 +56,7 @@ then fi # NB: 2.7.8 is special-cased -for pyver in 2.7 3.3 3.4 3.5 +for pyver in 2.7 3.4 3.5 do if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "riak_$pyver" then @@ -78,7 +78,7 @@ then pyenv virtualenv 'riak_2.7.8' 'riak-py278' fi -(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278) +(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py27 riak-py278) pyenv versions diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index b55ae554..771ffe5a 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -27,7 +27,10 @@ fiveMins = datetime.timedelta(0, 300) # NB: last arg is microseconds, 987ms expressed ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0, 987000) +ex0ms = 1420113600987 + ts1 = ts0 + fiveMins +ex1ms = 1420113900987 class TimeseriesUnitTests(unittest.TestCase): @@ -35,13 +38,11 @@ class TimeseriesUnitTests(unittest.TestCase): def setUpClass(cls): cls.c = RiakPbcCodec() - ex0ms = 1420113600987 cls.ts0ms = cls.c._unix_time_millis(ts0) if cls.ts0ms != ex0ms: raise AssertionError( 'expected {:d} to equal {:d}'.format(cls.ts0ms, ex0ms)) - ex1ms = 1420113900987 cls.ts1ms = cls.c._unix_time_millis(ts1) if cls.ts1ms != ex1ms: raise AssertionError( @@ -63,6 +64,7 @@ def validate_keyreq(self, req): def test_encode_decode_timestamp(self): ts0ms = self.c._unix_time_millis(ts0) + self.assertEqual(ts0ms, ex0ms) ts0_d = self.c._datetime_from_unix_time_millis(ts0ms) self.assertEqual(ts0, ts0_d) diff --git a/tox.ini b/tox.ini index 87e74bb1..8c19f5ba 100644 --- a/tox.ini +++ b/tox.ini @@ -1,10 +1,5 @@ -# Tox (http://tox.testrun.org/) is a tool for running tests -# in multiple virtualenvs. This configuration file will run the -# test suite on all supported python versions. To use it, "pip install tox" -# and then run "tox" from this directory. - [tox] -envlist = py278, py27, py33, py34, py35 +envlist = py278, py27, py34, py35 [testenv:py278] basepython = {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 From 0ab5f17ab228e4c97b270b503820a710cb7c1136 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 3 Feb 2016 14:59:22 -0800 Subject: [PATCH 106/324] Skip timeseries tests if using unsupported Python version --- buildbot/tox_setup.sh | 4 ++-- riak/tests/test_timeseries.py | 8 +++++++- tox.ini | 2 +- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index 2cd9b24d..df28ea34 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -56,7 +56,7 @@ then fi # NB: 2.7.8 is special-cased -for pyver in 2.7 3.4 3.5 +for pyver in 2.7 3.3 3.4 3.5 do if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "riak_$pyver" then @@ -78,7 +78,7 @@ then pyenv virtualenv 'riak_2.7.8' 'riak-py278' fi -(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py27 riak-py278) +(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278) pyenv versions diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 771ffe5a..39e4edec 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -3,6 +3,8 @@ import platform import random import string +import sys + import riak.pb.riak_ts_pb2 from riak import RiakError @@ -32,7 +34,10 @@ ts1 = ts0 + fiveMins ex1ms = 1420113900987 +ts_supported = sys.version_info < (3,) or sys.version_info >= (3,4,4) + +@unittest.skipUnless(ts_supported, "Timeseries not supported") class TimeseriesUnitTests(unittest.TestCase): @classmethod def setUpClass(cls): @@ -186,7 +191,8 @@ def test_decode_data_from_query(self): self.assertEqual(r1[4], self.rows[1][4]) -@unittest.skipUnless(RUN_TIMESERIES, 'RUN_TIMESERIES is 0') +@unittest.skipUnless(ts_supported and RUN_TIMESERIES, + 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): diff --git a/tox.ini b/tox.ini index 8c19f5ba..5abd96c6 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py278, py27, py34, py35 +envlist = py278, py27, py33, py34, py35 [testenv:py278] basepython = {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 From 958ad8e1bfe4918f0bbd77d6bc85fc88f50a1b6c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 3 Feb 2016 15:37:15 -0800 Subject: [PATCH 107/324] Timeseries version check and tests --- README.rst | 10 +++++++++- RELEASE_NOTES.md | 5 +++++ riak/tests/test_timeseries.py | 12 +++++------- riak/tests/test_util.py | 20 ++++++++++++++++++++ riak/util.py | 8 ++++++++ 5 files changed, 47 insertions(+), 8 deletions(-) create mode 100644 riak/tests/test_util.py diff --git a/README.rst b/README.rst index c66a4747..6e9b66ba 100644 --- a/README.rst +++ b/README.rst @@ -17,7 +17,15 @@ Install ======= The recommended versions of Python for use with this client are Python -`2.7.x`, `3.3.x`, `3.4.x` and `3.5.x`. +`2.7.x`, `3.3.x`, `3.4.x` and `3.5.x`. The latest version from each series +should be preferred. + +Riak TS (Timeseries) +=================== + +You must use version `2.7.11`, `3.4.4` or `3.5.1` (or greater within a version series). +Otherwise you will be affected by `this Python bug `_. + From Source ----------- diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md index 7bfea4f7..3baf2dfe 100644 --- a/RELEASE_NOTES.md +++ b/RELEASE_NOTES.md @@ -1,5 +1,10 @@ # Riak Python Client Release Notes +## 2.4.1 Patch Release - 2016-02-03 + +* [Riak TS: Millisecond precision](https://github.com/basho/riak-python-client/issues/430) +* [Fix release process](https://github.com/basho/riak-python-client/issues/429) + ## 2.4.0 Feature Release - 2016-01-13 This release enhances Riak Time Series functionality. diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 39e4edec..600c67db 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -3,7 +3,6 @@ import platform import random import string -import sys import riak.pb.riak_ts_pb2 @@ -11,7 +10,8 @@ from riak.table import Table from riak.ts_object import TsObject from riak.transports.pbc.codec import RiakPbcCodec -from riak.util import str_to_bytes, bytes_to_str +from riak.util import str_to_bytes, bytes_to_str, \ + is_timeseries_supported from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase from riak.pb.riak_ts_pb2 import TsColumnType @@ -34,10 +34,8 @@ ts1 = ts0 + fiveMins ex1ms = 1420113900987 -ts_supported = sys.version_info < (3,) or sys.version_info >= (3,4,4) - -@unittest.skipUnless(ts_supported, "Timeseries not supported") +@unittest.skipUnless(is_timeseries_supported(), "Timeseries not supported") class TimeseriesUnitTests(unittest.TestCase): @classmethod def setUpClass(cls): @@ -191,8 +189,8 @@ def test_decode_data_from_query(self): self.assertEqual(r1[4], self.rows[1][4]) -@unittest.skipUnless(ts_supported and RUN_TIMESERIES, - 'Timeseries not supported or RUN_TIMESERIES is 0') +@unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, + 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): diff --git a/riak/tests/test_util.py b/riak/tests/test_util.py new file mode 100644 index 00000000..3cc69e95 --- /dev/null +++ b/riak/tests/test_util.py @@ -0,0 +1,20 @@ +import platform + +from riak.util import is_timeseries_supported + +if platform.python_version() < '2.7': + unittest = __import__('unittest2') +else: + import unittest + + +class UtilUnitTests(unittest.TestCase): + def test_is_timeseries_supported(self): + v = (2, 7, 11) + self.assertEqual(True, is_timeseries_supported(v)) + v = (2, 7, 12) + self.assertEqual(True, is_timeseries_supported(v)) + v = (3, 3, 6) + self.assertEqual(False, is_timeseries_supported(v)) + v = (3, 4, 3) + self.assertEqual(False, is_timeseries_supported(v)) diff --git a/riak/util.py b/riak/util.py index 5dc3e61a..26d991a6 100644 --- a/riak/util.py +++ b/riak/util.py @@ -1,8 +1,16 @@ from __future__ import print_function + +import sys import warnings + from collections import Mapping from six import string_types, PY2 +def is_timeseries_supported(v=None): + if v is None: + v = sys.version_info + return v < (3,) or v >= (3, 4, 4) + def quacks_like_dict(object): """Check if object is dict-like""" From 0850dd75ca98b129533c2638d831c86d6a99a337 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 4 Feb 2016 06:51:21 -0800 Subject: [PATCH 108/324] Restore tox boilerplate --- tox.ini | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/tox.ini b/tox.ini index 5abd96c6..87e74bb1 100644 --- a/tox.ini +++ b/tox.ini @@ -1,3 +1,8 @@ +# Tox (http://tox.testrun.org/) is a tool for running tests +# in multiple virtualenvs. This configuration file will run the +# test suite on all supported python versions. To use it, "pip install tox" +# and then run "tox" from this directory. + [tox] envlist = py278, py27, py33, py34, py35 From 65fa1996102dcc4a84cf5340e4692f499bc1ffbc Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 4 Feb 2016 07:28:15 -0800 Subject: [PATCH 109/324] fix lint --- riak/util.py | 1 + 1 file changed, 1 insertion(+) diff --git a/riak/util.py b/riak/util.py index 26d991a6..3932be99 100644 --- a/riak/util.py +++ b/riak/util.py @@ -6,6 +6,7 @@ from collections import Mapping from six import string_types, PY2 + def is_timeseries_supported(v=None): if v is None: v = sys.version_info From 312310c2a34ed7160e953950954ec461b251fa0f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 11 Feb 2016 14:33:24 -0800 Subject: [PATCH 110/324] Begin migrating to the use of riak-client-tools scripts --- .gitmodules | 3 ++ buildbot/Makefile | 24 ++++++++++----- commands.py | 43 ++++---------------------- riak/tests/__init__.py | 4 +-- riak/tests/test_btypes.py | 15 +++++----- riak/tests/test_datatypes.py | 36 +++++++++++----------- riak/tests/test_mapreduce.py | 58 ++++++++++++++++++------------------ setup.py | 3 +- tools | 1 + 9 files changed, 83 insertions(+), 104 deletions(-) create mode 160000 tools diff --git a/.gitmodules b/.gitmodules index e0cba09c..df75a761 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,6 @@ [submodule "riak_pb"] path = riak_pb url = git://github.com/basho/riak_pb.git +[submodule "tools"] + path = tools + url = git://github.com/basho/riak-client-tools.git diff --git a/buildbot/Makefile b/buildbot/Makefile index fd0ee0ed..87d53af1 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -2,16 +2,25 @@ ifndef RIAK_DIR $(error RIAK_DIR is not set) endif +PROJDIR = $(realpath $(CURDIR)/..) +TOOLS_DIR = $(PROJDIR)/tools/devrel +CA_DIR = $(PROJDIR)/tools/test-ca RIAK_CONF = $(RIAK_DIR)/etc/riak.conf +ADV_CONF = $(RIAK_DIR)/etc/advanced.config RIAK_ADMIN = $(RIAK_DIR)/bin/riak-admin + +export RIAK_HOST = localhost +export RIAK_PORT = 8087 + CERTS_DIR = $(realpath $(CURDIR))/../riak/tests/resources unexport PYENV_VERSION preconfigure: - @../setup.py preconfigure --riak-conf=$(RIAK_CONF) + $(TOOLS_DIR)/gen-riak-conf $(RIAK_CONF) 8098 8087 18098 $(CA_DIR)/certs/cacert.pem $(CA_DIR)/certs/riak-test-cert.pem $(CA_DIR)/private/riak-test-key.pem + $(TOOLS_DIR)/gen-adv-conf $(ADV_CONF) configure: - @../setup.py configure --riak-admin=$(RIAK_ADMIN) + $(TOOLS_DIR)/riak-cluster-config $(RIAK_ADMIN) 8098 true false configure_timeseries: @../setup.py setup_timeseries --riak-admin=$(RIAK_ADMIN) @@ -26,19 +35,20 @@ lint: @openssl verify -CAfile $(CERTS_DIR)/ca.crt $(CERTS_DIR)/client.crt @openssl verify -CAfile $(CERTS_DIR)/ca.crt $(CERTS_DIR)/server.crt -test: setup test_normal test_security +# TODO test: setup test_normal test_security +test: setup test_normal test_normal: @echo "Testing Riak Python Client (without security)" @../setup.py disable_security --riak-admin=$(RIAK_ADMIN) - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @../setup.py enable_security --riak-admin=$(RIAK_ADMIN) - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 RIAK_TEST_HTTP_PORT=18098 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=18098 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. test_timeseries: @echo "Testing Riak Python Client (timeseries)" diff --git a/commands.py b/commands.py index c12b9191..20ce95bc 100644 --- a/commands.py +++ b/commands.py @@ -12,7 +12,7 @@ from subprocess import Popen, PIPE -__all__ = ['create_bucket_types', 'build_messages', +__all__ = ['build_messages', 'setup_security', 'enable_security', 'disable_security', 'setup_timeseries', 'preconfigure', 'configure'] @@ -146,36 +146,6 @@ def _btype_command(self, *args): return cmd -class create_bucket_types(bucket_type_commands, Command): - """ - Creates bucket-types appropriate for testing. By default this will create: - - * `pytest-maps` with ``{"datatype":"map"}`` - * `pytest-sets` with ``{"datatype":"set"}`` - * `pytest-counters` with ``{"datatype":"counter"}`` - * `pytest-consistent` with ``{"consistent":true}`` - * `pytest-write-once` with ``{"write_once": true}`` - * `pytest-mr` - * `pytest` with ``{"allow_mult":false}`` - """ - - description = "create bucket-types used in integration tests" - - user_options = [ - ('riak-admin=', None, 'path to the riak-admin script') - ] - - _props = { - 'pytest-maps': {'datatype': 'map'}, - 'pytest-sets': {'datatype': 'set'}, - 'pytest-counters': {'datatype': 'counter'}, - 'pytest-consistent': {'consistent': True}, - 'pytest-write-once': {'write_once': True}, - 'pytest-mr': {}, - 'pytest': {'allow_mult': False} - } - - class setup_timeseries(bucket_type_commands, Command): """ Creates bucket-types appropriate for timeseries. @@ -477,13 +447,12 @@ class configure(Command): """ Sets up security configuration. - * Run setup_security and create_bucket_types + * Run setup_security """ - description = "create bucket types and security settings for testing" + description = "security settings for testing" - user_options = create_bucket_types.user_options + \ - setup_security.user_options + user_options = setup_security.user_options def initialize_options(self): self.riak_admin = None @@ -491,8 +460,6 @@ def initialize_options(self): self.password = None def finalize_options(self): - bucket = self.distribution.get_command_obj('create_bucket_types') - bucket.riak_admin = self.riak_admin security = self.distribution.get_command_obj('setup_security') security.riak_admin = self.riak_admin security.username = self.username @@ -503,7 +470,7 @@ def run(self): for cmd_name in self.get_sub_commands(): self.run_command(cmd_name) - sub_commands = [('create_bucket_types', None), ('setup_security', None)] + sub_commands = [('setup_security', None)] class ComparableMixin(object): diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 29c88619..7ee88648 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -22,10 +22,10 @@ PROTOCOL = os.environ.get('RIAK_TEST_PROTOCOL', 'pbc') PB_HOST = os.environ.get('RIAK_TEST_PB_HOST', HOST) -PB_PORT = int(os.environ.get('RIAK_TEST_PB_PORT', '8087')) +PB_PORT = int(os.environ.get('RIAK_TEST_PB_PORT', '10017')) HTTP_HOST = os.environ.get('RIAK_TEST_HTTP_HOST', HOST) -HTTP_PORT = int(os.environ.get('RIAK_TEST_HTTP_PORT', '8098')) +HTTP_PORT = int(os.environ.get('RIAK_TEST_HTTP_PORT', '10018')) # these ports are used to simulate errors, there shouldn't # be anything listening on either port. diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index 3c8b6c1e..30c9d6ac 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -44,7 +44,7 @@ def test_btype_repr(self): def test_btype_get_props(self): defbtype = self.client.bucket_type("default") - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') with self.assertRaises(ValueError): defbtype.get_properties() @@ -55,7 +55,7 @@ def test_btype_get_props(self): def test_btype_set_props(self): defbtype = self.client.bucket_type("default") - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') with self.assertRaises(ValueError): defbtype.set_properties({'allow_mult': True}) @@ -72,12 +72,12 @@ def test_btype_set_props(self): btype.set_properties(oldprops) def test_btype_set_props_immutable(self): - btype = self.client.bucket_type("pytest-maps") + btype = self.client.bucket_type("maps") with self.assertRaises(RiakError): btype.set_property('datatype', 'counter') def test_btype_list_buckets(self): - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) obj = bucket.new(self.key_name) obj.data = [1, 2, 3] @@ -91,7 +91,7 @@ def test_btype_list_buckets(self): self.assertIn(bucket, buckets) def test_btype_list_keys(self): - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) obj = bucket.new(self.key_name) @@ -140,7 +140,7 @@ def test_default_btype_list_keys(self): self.assertItemsEqual(keys, oldapikeys) def test_multiget_bucket_types(self): - btype = self.client.bucket_type('pytest') + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) for i in range(100): @@ -155,8 +155,7 @@ def test_multiget_bucket_types(self): self.assertEqual(btype, mobj.bucket.bucket_type) def test_write_once_bucket_type(self): - btype = self.client.bucket_type('pytest-write-once') - btype.set_property('write_once', True) + btype = self.client.bucket_type('write_once') bucket = btype.bucket(self.bucket_name) for i in range(100): diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index c832583b..39166069 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -157,7 +157,7 @@ class DatatypeIntegrationTests(IntegrationTestBase, unittest.TestCase, Comparison): def test_dt_counter(self): - btype = self.client.bucket_type('pytest-counters') + btype = self.client.bucket_type('counters') bucket = btype.bucket(self.bucket_name) mycount = datatypes.Counter(bucket, self.key_name) mycount.increment(5) @@ -173,7 +173,7 @@ def test_dt_counter(self): self.assertEqual(2, mycount.value) def test_dt_set(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) myset = datatypes.Set(bucket, self.key_name) myset.add('Sean') @@ -195,7 +195,7 @@ def test_dt_set(self): self.assertNotIn('Sean', myset) def test_dt_map(self): - btype = self.client.bucket_type('pytest-maps') + btype = self.client.bucket_type('maps') bucket = btype.bucket(self.bucket_name) mymap = datatypes.Map(bucket, self.key_name) @@ -230,7 +230,7 @@ def test_dt_map(self): self.assertItemsEqual(['thing1', 'thing2'], mymap.sets['f'].value) def test_dt_set_remove_without_context(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) set = datatypes.Set(bucket, self.key_name) @@ -241,7 +241,7 @@ def test_dt_set_remove_without_context(self): set.discard("Y") def test_dt_set_remove_fetching_context(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) set = datatypes.Set(bucket, self.key_name) @@ -257,7 +257,7 @@ def test_dt_set_remove_fetching_context(self): self.assertItemsEqual(['X', 'Y'], set2.value) def test_dt_set_add_twice(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) set = datatypes.Set(bucket, self.key_name) @@ -273,7 +273,7 @@ def test_dt_set_add_twice(self): self.assertItemsEqual(['X', 'Y'], set2.value) def test_dt_set_add_wins_in_same_op(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) set = datatypes.Set(bucket, self.key_name) @@ -290,7 +290,7 @@ def test_dt_set_add_wins_in_same_op(self): self.assertItemsEqual(['X', 'Y'], set2.value) def test_dt_set_add_wins_in_same_op_reversed(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) set = datatypes.Set(bucket, self.key_name) @@ -307,7 +307,7 @@ def test_dt_set_add_wins_in_same_op_reversed(self): self.assertItemsEqual(['X', 'Y'], set2.value) def test_dt_set_remove_old_context(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) set = datatypes.Set(bucket, self.key_name) @@ -328,7 +328,7 @@ def test_dt_set_remove_old_context(self): self.assertItemsEqual(['X', 'Y', 'Z'], set2.value) def test_dt_set_remove_updated_context(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) set = datatypes.Set(bucket, self.key_name) @@ -348,7 +348,7 @@ def test_dt_set_remove_updated_context(self): self.assertItemsEqual(['X', 'Y'], set2.value) def test_dt_map_remove_set_update_same_op(self): - btype = self.client.bucket_type('pytest-maps') + btype = self.client.bucket_type('maps') bucket = btype.bucket(self.bucket_name) map = datatypes.Map(bucket, self.key_name) @@ -365,7 +365,7 @@ def test_dt_map_remove_set_update_same_op(self): self.assertItemsEqual(["Z"], map2.sets['set']) def test_dt_map_remove_counter_increment_same_op(self): - btype = self.client.bucket_type('pytest-maps') + btype = self.client.bucket_type('maps') bucket = btype.bucket(self.bucket_name) map = datatypes.Map(bucket, self.key_name) @@ -382,7 +382,7 @@ def test_dt_map_remove_counter_increment_same_op(self): self.assertEqual(2, map2.counters['counter'].value) def test_dt_map_remove_map_update_same_op(self): - btype = self.client.bucket_type('pytest-maps') + btype = self.client.bucket_type('maps') bucket = btype.bucket(self.bucket_name) map = datatypes.Map(bucket, self.key_name) @@ -399,7 +399,7 @@ def test_dt_map_remove_map_update_same_op(self): self.assertItemsEqual(["Z"], map2.maps['map'].sets['set']) def test_dt_set_return_body_true_default(self): - btype = self.client.bucket_type('pytest-sets') + btype = self.client.bucket_type('sets') bucket = btype.bucket(self.bucket_name) myset = bucket.new(self.key_name) myset.add('X') @@ -416,7 +416,7 @@ def test_dt_set_return_body_true_default(self): self.assertItemsEqual(myset.value, ['Y']) def test_dt_map_return_body_true_default(self): - btype = self.client.bucket_type('pytest-maps') + btype = self.client.bucket_type('maps') bucket = btype.bucket(self.bucket_name) mymap = bucket.new(self.key_name) mymap.sets['a'].add('X') @@ -440,19 +440,19 @@ def test_dt_map_return_body_true_default(self): self.assertEqual(mymap.value, {}) def test_delete_datatype(self): - ctype = self.client.bucket_type('pytest-counters') + ctype = self.client.bucket_type('counters') cbucket = ctype.bucket(self.bucket_name) counter = cbucket.new(self.key_name) counter.increment(5) counter.store() - stype = self.client.bucket_type('pytest-sets') + stype = self.client.bucket_type('sets') sbucket = stype.bucket(self.bucket_name) set_ = sbucket.new(self.key_name) set_.add("Brett") set_.store() - mtype = self.client.bucket_type('pytest-maps') + mtype = self.client.bucket_type('maps') mbucket = mtype.bucket(self.bucket_name) map_ = mbucket.new(self.key_name) map_.sets['people'].add('Sean') diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index b6cd068f..a1827398 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -18,7 +18,7 @@ import unittest -testrun_yz_mr = {'btype': 'pytest-mr', +testrun_yz_mr = {'btype': 'mr', 'bucket': 'mrbucket', 'index': 'mrbucket'} @@ -116,16 +116,16 @@ def test_erlang_map_reduce(self): def test_erlang_map_reduce_bucket_type(self): # Create the object... - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) bucket.new("foo", 2).store() bucket.new("bar", 2).store() bucket.new("baz", 4).store() # Run the map... result = self.client \ - .add(self.bucket_name, "foo", bucket_type="pytest") \ - .add(self.bucket_name, "bar", bucket_type="pytest") \ - .add(self.bucket_name, "baz", bucket_type="pytest") \ + .add(self.bucket_name, "foo", bucket_type='no_siblings') \ + .add(self.bucket_name, "bar", bucket_type='no_siblings') \ + .add(self.bucket_name, "baz", bucket_type='no_siblings') \ .map(["riak_kv_mapreduce", "map_object_value"]) \ .reduce(["riak_kv_mapreduce", "reduce_set_union"]) \ .run() @@ -159,7 +159,7 @@ def test_erlang_source_map_reduce(self): def test_erlang_source_map_reduce_bucket_type(self): # Create the object... - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) bucket.new("foo", 2).store() bucket.new("bar", 3).store() @@ -168,9 +168,9 @@ def test_erlang_source_map_reduce_bucket_type(self): # Run the map... try: result = self.client \ - .add(self.bucket_name, "foo", bucket_type="pytest") \ - .add(self.bucket_name, "bar", bucket_type="pytest") \ - .add(self.bucket_name, "baz", bucket_type="pytest") \ + .add(self.bucket_name, "foo", bucket_type='no_siblings') \ + .add(self.bucket_name, "bar", bucket_type='no_siblings') \ + .add(self.bucket_name, "baz", bucket_type='no_siblings') \ .map("""fun(Object, _KD, _A) -> Value = riak_object:get_value(Object), [Value] @@ -255,12 +255,12 @@ def test_javascript_named_map(self): def test_javascript_named_map_bucket_type(self): # Create the object... - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) bucket.new("foo", 2).store() # Run the map... result = self.client \ - .add(self.bucket_name, "foo", bucket_type="pytest") \ + .add(self.bucket_name, "foo", bucket_type='no_siblings') \ .map("Riak.mapValuesJson") \ .run() self.assertEqual(result, [2]) @@ -283,16 +283,16 @@ def test_javascript_source_map_reduce(self): def test_javascript_source_map_reduce_bucket_type(self): # Create the object... - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) bucket.new("foo", 2).store() bucket.new("bar", 3).store() bucket.new("baz", 4).store() # Run the map... result = self.client \ - .add(self.bucket_name, "foo", bucket_type="pytest") \ - .add(self.bucket_name, "bar", bucket_type="pytest") \ - .add(self.bucket_name, "baz", bucket_type="pytest") \ + .add(self.bucket_name, "foo", bucket_type='no_siblings') \ + .add(self.bucket_name, "bar", bucket_type='no_siblings') \ + .add(self.bucket_name, "baz", bucket_type='no_siblings') \ .map("function (v) { return [1]; }") \ .reduce("Riak.reduceSum") \ .run() @@ -316,16 +316,16 @@ def test_javascript_named_map_reduce(self): def test_javascript_named_map_reduce_bucket_type(self): # Create the object... - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) bucket.new("foo", 2).store() bucket.new("bar", 3).store() bucket.new("baz", 4).store() # Run the map... result = self.client \ - .add(self.bucket_name, "foo", bucket_type="pytest") \ - .add(self.bucket_name, "bar", bucket_type="pytest") \ - .add(self.bucket_name, "baz", bucket_type="pytest") \ + .add(self.bucket_name, "foo", bucket_type='no_siblings') \ + .add(self.bucket_name, "bar", bucket_type='no_siblings') \ + .add(self.bucket_name, "baz", bucket_type='no_siblings') \ .map("Riak.mapValuesJson") \ .reduce("Riak.reduceSum") \ .run() @@ -347,14 +347,14 @@ def test_javascript_bucket_map_reduce(self): def test_javascript_bucket_map_reduceP_bucket_type(self): # Create the object... - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket("bucket_%s" % self.randint()) bucket.new("foo", 2).store() bucket.new("bar", 3).store() bucket.new("baz", 4).store() # Run the map... result = self.client \ - .add(bucket.name, bucket_type="pytest") \ + .add(bucket.name, bucket_type='no_siblings') \ .map("Riak.mapValuesJson") \ .reduce("Riak.reduceSum") \ .run() @@ -378,16 +378,16 @@ def test_javascript_arg_map_reduce(self): def test_javascript_arg_map_reduce_bucket_type(self): # Create the object... - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket(self.bucket_name) bucket.new("foo", 2).store() # Run the map... result = self.client \ - .add(self.bucket_name, "foo", 5, bucket_type="pytest") \ - .add(self.bucket_name, "foo", 10, bucket_type="pytest") \ - .add(self.bucket_name, "foo", 15, bucket_type="pytest") \ - .add(self.bucket_name, "foo", -15, bucket_type="pytest") \ - .add(self.bucket_name, "foo", -5, bucket_type="pytest") \ + .add(self.bucket_name, "foo", 5, bucket_type='no_siblings') \ + .add(self.bucket_name, "foo", 10, bucket_type='no_siblings') \ + .add(self.bucket_name, "foo", 15, bucket_type='no_siblings') \ + .add(self.bucket_name, "foo", -15, bucket_type='no_siblings') \ + .add(self.bucket_name, "foo", -5, bucket_type='no_siblings') \ .map("function(v, arg) { return [arg]; }") \ .reduce("Riak.reduceSum") \ .run() @@ -409,14 +409,14 @@ def test_key_filters(self): self.assertEqual(result, ["yahoo-20090613"]) def test_key_filters_bucket_type(self): - btype = self.client.bucket_type("pytest") + btype = self.client.bucket_type('no_siblings') bucket = btype.bucket("kftest") bucket.new("basho-20101215", 1).store() bucket.new("google-20110103", 2).store() bucket.new("yahoo-20090613", 3).store() result = self.client \ - .add("kftest", bucket_type="pytest") \ + .add("kftest", bucket_type='no_siblings') \ .add_key_filters([["tokenize", "-", 2]]) \ .add_key_filter("ends_with", "0613") \ .map("function (v, keydata) { return [v.key]; }") \ diff --git a/setup.py b/setup.py index 5219948d..1c40976b 100755 --- a/setup.py +++ b/setup.py @@ -3,7 +3,7 @@ import platform from setuptools import setup, find_packages from version import get_version -from commands import preconfigure, configure, create_bucket_types, \ +from commands import preconfigure, configure, \ setup_security, enable_security, disable_security, setup_timeseries, \ build_messages @@ -46,7 +46,6 @@ cmdclass={ 'build_messages': build_messages, 'setup_timeseries': setup_timeseries, - 'create_bucket_types': create_bucket_types, 'setup_security': setup_security, 'preconfigure': preconfigure, 'configure': configure, diff --git a/tools b/tools new file mode 160000 index 00000000..343c4a48 --- /dev/null +++ b/tools @@ -0,0 +1 @@ +Subproject commit 343c4a485e26bcee971a41faf9234d65a3fb8b53 From bbe7009d5d336692518c66c802d7ac4ebb710d30 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 11 Feb 2016 14:48:10 -0800 Subject: [PATCH 111/324] Convert to md format --- README.md | 245 +++++++++++++++++++++++++++++++++++++++++++ README.rst | 298 ----------------------------------------------------- 2 files changed, 245 insertions(+), 298 deletions(-) create mode 100644 README.md delete mode 100644 README.rst diff --git a/README.md b/README.md new file mode 100644 index 00000000..fc921135 --- /dev/null +++ b/README.md @@ -0,0 +1,245 @@ +Python Client for Riak +====================== + +Documentation +============= + +[Documentation for the Riak Python Client Library](http://basho.github.io/riak-python-client/index.html) is available here. The documentation source is found in [`docs/` subdirectory](https://github.com/basho/riak-python-client/tree/master/docs) and can be built with [Sphinx](http://sphinx.pocoo.org/). + +Documentation for Riak is available [here](http://docs.basho.com/riak/latest). + +Install +======= + +The recommended versions of Python for use with this client are Python `2.7.x`, `3.3.x`, `3.4.x` and `3.5.x`. The latest version from each series should be preferred. + +Riak TS (Timeseries) +=================== + +You must use version `2.7.11`, `3.4.4` or `3.5.1` (or greater within a version series). Otherwise you will be affected by [this Python bug](https://bugs.python.org/issue23517). + +From Source +----------- + +``sh +python setup.py install +`` + +There are additional dependencies on Python packages `setuptools` and `protobuf`. + +From PyPI +--------- + +Official packages are signed and published to [PyPI](https://pypi.python.org/pypi/riak). + +To install from [PyPI](https://pypi.python.org/pypi/riak) directly you can use `pip`. + +``sh +pip install riak +`` + +Testing +======= + +To setup the default test configuration build a test Riak node (from a `riak` directory) + +``sh +make rel +`` + +See [Basic Cluster Setup](http://docs.basho.com/riak/2.0.0/ops/building/basic-cluster-setup/) for more details. + +For all of the simple default values, set the `RIAK_DIR` environment variable to the root of your Riak installation. Then from the `riak-python-client` directory + +``sh +make -C buildbot preconfigure +`` + +Start your Riak node with `riak start` from the the Riak directory, then + +``sh +make -C buildbot configure +make -C buildbot test +`` + +That will run the test suite twice: once with security enabled and once without. + +Testing Options +--------------- + +If you wish to change the default options you can run the setup by hand. First configure the test node by adjusting the `riak.conf` settings, where `RIAK_DIR` is the path to the top your Riak installation + +```sh +python setup.py preconfigure --riak-conf=$RIAK_DIR/etc/riak.conf +``` + +Optionally the hostname and port numbers can be changed, too, via these arguments: + +* `--host=` IP of host running Riak (default is `localhost`) +* `--pb-port=` protocol buffers port number (default is `8087`) +* `--http-port=` http port number (default is `8098`) +* `--https-port=` https port number (default is `8099`) + +You may alternately add these lines to `setup.cfg` + +```ini +[preconfigure] +riak-conf=/Users/sean/dev/riak/rel/riak/etc/riak.conf +host=localhost +pb-port=8087 +http-port=8098 +https-port=8099 +``` + +Next start the test node. Once it is running, a test configuration is installed which includes security test users and bucket types + +```sh +python setup.py configure --riak-admin=$RIAK_DIR/bin/riak-admin +``` + +Optionally these configuration settings can be changed, too: + +* `--username=` test user account (default is `testuser`) +* `--password=` password for test user account (default is `testpassword`) +* `--certuser=` secruity test user account (default is `certuser`) +* `--certpass=` password for security test user account (default is `certpass`) + +Similarly `setup.cfg` may be modified instead. To run the tests against a Riak server (with configured TCP port configuration) on localhost, execute + +```sh +python setup.py test +``` + +Connections to Riak in Tests +---------------------------- + +If your Riak server isn't running on localhost or you have built a Riak devrel from source, use the environment variables `RIAK_TEST_HOST`, `RIAK_TEST_HTTP_PORT` and `RIAK_TEST_PB_PORT` to specify where to find the Riak server. `RIAK_TEST_PROTOCOL` to specify which protocol to test. Can be either `pbc` or `http`. + +Some of the connection tests need port numbers that are NOT in use. If ports 1023 and 1022 are in use on your test system, set the environment variables `DUMMY_HTTP_PORT` and `DUMMY_PB_PORT` to unused port numbers. + +Testing Search +-------------- + +If you don't have [Riak Search](http://docs.basho.com/riak/latest/dev/using/search/) enabled, you can set the `RUN_SEARCH` environment variable to 0 skip those tests. + +If you don't have [Search 2.0](https://github.com/basho/yokozuna) enabled, you can set the `RUN_YZ` environment variable to 0 to skip those tests. + +Testing Bucket Types (Riak 2+) +------------------------------ + +To test bucket-types, you must run the `create_bucket_types` setup command, which will create the bucket-types used in testing, or create them manually yourself. It can be run like so (substituting `$RIAK` with the root of your Riak install) + +```sh +./setup.py create_bucket_types --riak-admin=$RIAK/bin/riak-admin +``` + +You may alternately add these lines to `setup.cfg` + +```ini +[create_bucket_types] +riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin +``` + +To skip the bucket-type tests, set the `RUN_BTYPES` environment variable to `0`. + +Testing Data Types (Riak 2+) +---------------------------- + +To test data types, you must set up bucket types (see above.) + +To skip the data type tests, set the `RUN_DATATYPES` environment variable to `0`. + +Testing Timeseries (Riak 2.1+) +------------------------------ + +To test timeseries data, you must run the `setup_timeseries` command, which will create the bucket-types used in testing, or create them manually yourself. It can be run like so (substituting `$RIAK` with the root of your Riak install) + +```sh +./setup.py setup_timeseries --riak-admin=$RIAK/bin/riak-admin +``` + +You may alternately add these lines to `setup.cfg` + +```sh +[setup_timeseries] +riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin +``` + +To enable the timeseries tests, set the `RUN_TIMESERIES` environment variable to `1`. + +Testing Secondary Indexes +------------------------- + +To test [Secondary Indexes](http://docs.basho.com/riak/2.0.0/dev/using/2i/), the `RUN_INDEXES` environment variable must be set to 1 (or 0 to skip them.) + +Testing Security (Riak 2+) +-------------------------- + +By default [Security](http://docs.basho.com/riak/2.0.0beta1/ops/running/authz/) is not enabled on Riak. Once `security = on` is configured in the `riak.conf` file it can be enabled with `riak-admin`. + +If you have set up the test environment outlined in the Testing section you can go ahead and use this command to enable security + +```sh +python setup.py enable_security --riak-admin=$RIAK_DIR/bin/riak-admin +``` + +Once you are done testing security you can also + +```sh +python setup.py disable_security --riak-admin=$RIAK_DIR/bin/riak-admin +``` + +To run the tests + +```sh +RUN_SECURITY=1 RIAK_TEST_HTTP_PORT=18098 python setup.py test +``` + +Contributors +-------------------------- + +* Andrew Thompson +* Andy Gross +* Armon Dadgar +* Brett Hazen +* Brett Hoerner +* Brian Roach +* Bryan Fink +* Daniel Lindsley +* Daniel Néri +* Daniel Reverri +* David Koblas +* Dmitry Rozhkov +* Eric Florenzano +* Eric Moritz +* Filip de Waard +* Gilles Devaux +* Greg Nelson +* Gregory Burd +* Greg Stein +* Ian Plosker +* Jayson Baird +* Jeffrey Massung +* Jon Meredith +* Josip Lisec +* Justin Sheehy +* Kevin Smith +* [Luke Bakken](https://github.com/lukebakken) +* Mark Erdmann +* Mark Phillips +* Mathias Meyer +* Matt Heitzenroder +* Mikhail Sobolev +* Reid Draper +* Russell Brown +* Rusty Klophaus +* Rusty Klophaus +* Scott Lystig Fritchie +* Sean Cribbs +* Shuhao Wu +* Silas Sewell +* Socrates Lee +* Soren Hansen +* Sreejith Kesavan +* Timothée Peignier +* William Kral diff --git a/README.rst b/README.rst deleted file mode 100644 index 6e9b66ba..00000000 --- a/README.rst +++ /dev/null @@ -1,298 +0,0 @@ -====================== -Python Client for Riak -====================== - -Documentation -============= - -`Documentation for the Riak Python Client Library -`_ is available -here. The documentation source is found in `docs/ subdirectory -`_ and -can be built with `Sphinx `_. - -Documentation for Riak is available at http://docs.basho.com/riak/latest - -Install -======= - -The recommended versions of Python for use with this client are Python -`2.7.x`, `3.3.x`, `3.4.x` and `3.5.x`. The latest version from each series -should be preferred. - -Riak TS (Timeseries) -=================== - -You must use version `2.7.11`, `3.4.4` or `3.5.1` (or greater within a version series). -Otherwise you will be affected by `this Python bug `_. - - -From Source ------------ - -.. code-block:: console - - python setup.py install - -There are additional dependencies on Python packages `setuptools` and `protobuf`. - -From PyPI ---------- - -Official packages are signed and published to `PyPI -`_. - -To install from `PyPI `_ directly you can use -`pip`. - -.. code-block:: console - - pip install riak - - -Testing -======= - -To setup the default test configuration build a test Riak node (from -a ``riak`` directory) - -.. code-block:: console - - make rel - -See `Basic Cluster Setup -`_ -for more details. - -For all of the simple default values, set the ``RIAK_DIR`` environment -variable to the root of your Riak installation. Then from the -``riak-python-client`` directory - -.. code-block:: console - - make -C buildbot preconfigure - -Start your Riak node with ``riak start`` from the the Riak directory, then - -.. code-block:: console - - make -C buildbot configure - make -C buildbot test - -That will run the test suite twice: once with security enabled and once -without. - -Testing Options ---------------- - -If you wish to change the default options you can run the setup by hand. -First configure the test node by adjusting the ``riak.conf`` -settings, where ``RIAK_DIR`` is the path to the top your -Riak installation - -.. code-block:: console - - python setup.py preconfigure --riak-conf=$RIAK_DIR/etc/riak.conf - -Optionally the hostname and port numbers can be changed, too, via these -arguments: - - - ``--host=`` IP of host running Riak (default is ``localhost``) - - ``--pb-port=`` protocol buffers port number (default is ``8087``) - - ``--http-port=`` http port number (default is ``8098``) - - ``--https-port=`` https port number (default is ``8099``) - -You may alternately add these lines to ``setup.cfg`` - -.. code-block:: ini - - [preconfigure] - riak-conf=/Users/sean/dev/riak/rel/riak/etc/riak.conf - host=localhost - pb-port=8087 - http-port=8098 - https-port=8099 - -Next start the test node. Once it is running, a test configuration is -installed which includes security test users and bucket types - -.. code-block:: console - - python setup.py configure --riak-admin=$RIAK_DIR/bin/riak-admin - -Optionally these configuration settings can be changed, too: - - - ``--username=`` test user account (default is ``testuser``) - - ``--password=`` password for test user account (default is - ``testpassword``) - - ``--certuser=`` secruity test user account (default is ``certuser``) - - ``--certpass=`` password for security test user account (default is - ``certpass``) - -Similarly ``setup.cfg`` may be modified instead. To run the tests against a -Riak server (with configured TCP port configuration) on localhost, execute - -.. code-block:: console - - python setup.py test - -Connections to Riak in Tests ----------------------------- - -If your Riak server isn't running on localhost or you have built a -Riak devrel from source, use the environment variables -``RIAK_TEST_HOST``, ``RIAK_TEST_HTTP_PORT`` and -``RIAK_TEST_PB_PORT`` to specify where to find the Riak server. -``RIAK_TEST_PROTOCOL`` to specify which protocol to test. Can be -either ``pbc`` or ``http``. - -Some of the connection tests need port numbers that are NOT in use. If -ports 1023 and 1022 are in use on your test system, set the -environment variables ``DUMMY_HTTP_PORT`` and ``DUMMY_PB_PORT`` to -unused port numbers. - -Testing Search --------------- - -If you don't have `Riak Search -`_ enabled, you -can set the ``RUN_SEARCH`` environment variable to 0 skip those -tests. - -If you don't have `Search 2.0 `_ -enabled, you can set the ``RUN_YZ`` environment variable to 0 to skip -those tests. - -Testing Bucket Types (Riak 2+) ------------------------------- - -To test bucket-types, you must run the ``create_bucket_types`` setup -command, which will create the bucket-types used in testing, or create -them manually yourself. It can be run like so (substituting ``$RIAK`` -with the root of your Riak install) - -.. code-block:: console - - ./setup.py create_bucket_types --riak-admin=$RIAK/bin/riak-admin - -You may alternately add these lines to `setup.cfg` - -.. code-block:: ini - - [create_bucket_types] - riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin - -To skip the bucket-type tests, set the ``RUN_BTYPES`` environment -variable to ``0``. - -Testing Data Types (Riak 2+) ----------------------------- - -To test data types, you must set up bucket types (see above.) - -To skip the data type tests, set the ``RUN_DATATYPES`` environment -variable to ``0``. - -Testing Timeseries (Riak 2.1+) ------------------------------- - -To test timeseries data, you must run the ``setup_timeseries`` command, -which will create the bucket-types used in testing, or create them -manually yourself. It can be run like so (substituting ``$RIAK`` with -the root of your Riak install) - -.. code-block:: console - - ./setup.py setup_timeseries --riak-admin=$RIAK/bin/riak-admin - -You may alternately add these lines to `setup.cfg` - -.. code-block:: ini - - [setup_timeseries] - riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin - -To enable the timeseries tests, set the ``RUN_TIMESERIES`` environment -variable to ``1``. - -Testing Secondary Indexes -------------------------- - -To test -`Secondary Indexes `_, -the ``RUN_INDEXES`` environment variable must be set to 1 (or 0 to skip them.) - -Testing Security (Riak 2+) --------------------------- - -By default -`Security `_ is not -enabled on Riak. Once ``security = on`` is configured in the ``riak.conf`` -file it can be enabled with ``riak-admin``. - -If you have set up the test environment outlined in the `Testing`_ section -you can go ahead and use this command to enable security - -.. code-block:: console - - python setup.py enable_security --riak-admin=$RIAK_DIR/bin/riak-admin - -Once you are done testing security you can also - -.. code-block:: console - - python setup.py disable_security --riak-admin=$RIAK_DIR/bin/riak-admin - -To run the tests, then simply - -.. code-block:: console - - RUN_SECURITY=1 RIAK_TEST_HTTP_PORT=18098 python setup.py test - -Contributors --------------------------- - - Andrew Thompson - - Andy Gross - - Armon Dadgar - - Brett Hazen - - Brett Hoerner - - Brian Roach - - Bryan Fink - - Daniel Lindsley - - Daniel Néri - - Daniel Reverri - - David Koblas - - Dmitry Rozhkov - - Eric Florenzano - - Eric Moritz - - Filip de Waard - - Gilles Devaux - - Greg Nelson - - Greg Stein - - Gregory Burd - - Ian Plosker - - Jayson Baird - - Jeffrey Massung - - Jon Meredith - - Josip Lisec - - Justin Sheehy - - Kevin Smith - - `Luke Bakken `_ - - Mark Erdmann - - Mark Phillips - - Mathias Meyer - - Matt Heitzenroder - - Mikhail Sobolev - - Reid Draper - - Russell Brown - - Rusty Klophaus - - Rusty Klophaus - - Scott Lystig Fritchie - - Sean Cribbs - - Shuhao Wu - - Silas Sewell - - Socrates Lee - - Soren Hansen - - Sreejith Kesavan - - Timothée Peignier - - William Kral From d537563abf4479ed03285caac4d61ba1a567d810 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 11 Feb 2016 15:15:31 -0800 Subject: [PATCH 112/324] Fix code blocks --- README.md | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index fc921135..52f41f1e 100644 --- a/README.md +++ b/README.md @@ -21,9 +21,9 @@ You must use version `2.7.11`, `3.4.4` or `3.5.1` (or greater within a version s From Source ----------- -``sh +```sh python setup.py install -`` +``` There are additional dependencies on Python packages `setuptools` and `protobuf`. @@ -34,33 +34,33 @@ Official packages are signed and published to [PyPI](https://pypi.python.org/pyp To install from [PyPI](https://pypi.python.org/pypi/riak) directly you can use `pip`. -``sh +```sh pip install riak -`` +``` Testing ======= To setup the default test configuration build a test Riak node (from a `riak` directory) -``sh +```sh make rel -`` +``` See [Basic Cluster Setup](http://docs.basho.com/riak/2.0.0/ops/building/basic-cluster-setup/) for more details. For all of the simple default values, set the `RIAK_DIR` environment variable to the root of your Riak installation. Then from the `riak-python-client` directory -``sh +```sh make -C buildbot preconfigure -`` +``` Start your Riak node with `riak start` from the the Riak directory, then -``sh +```sh make -C buildbot configure make -C buildbot test -`` +``` That will run the test suite twice: once with security enabled and once without. From 00e27cb6c76c026373785b5ba2dc0ce94f618899 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 13 Feb 2016 18:14:22 -0800 Subject: [PATCH 113/324] Migrate more stuff to riak-client-tools --- buildbot/Makefile | 21 +-- commands.py | 304 +----------------------------- riak/tests/__init__.py | 43 +++-- riak/tests/test_kv.py | 2 +- riak/tests/test_security.py | 4 +- riak/transports/http/__init__.py | 3 +- riak/transports/pbc/connection.py | 7 +- riak/transports/security.py | 1 + setup.py | 11 +- tools | 2 +- 10 files changed, 51 insertions(+), 347 deletions(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index 87d53af1..8272fdf1 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -2,6 +2,8 @@ ifndef RIAK_DIR $(error RIAK_DIR is not set) endif +unexport PYENV_VERSION + PROJDIR = $(realpath $(CURDIR)/..) TOOLS_DIR = $(PROJDIR)/tools/devrel CA_DIR = $(PROJDIR)/tools/test-ca @@ -9,18 +11,12 @@ RIAK_CONF = $(RIAK_DIR)/etc/riak.conf ADV_CONF = $(RIAK_DIR)/etc/advanced.config RIAK_ADMIN = $(RIAK_DIR)/bin/riak-admin -export RIAK_HOST = localhost -export RIAK_PORT = 8087 - -CERTS_DIR = $(realpath $(CURDIR))/../riak/tests/resources -unexport PYENV_VERSION - preconfigure: $(TOOLS_DIR)/gen-riak-conf $(RIAK_CONF) 8098 8087 18098 $(CA_DIR)/certs/cacert.pem $(CA_DIR)/certs/riak-test-cert.pem $(CA_DIR)/private/riak-test-key.pem $(TOOLS_DIR)/gen-adv-conf $(ADV_CONF) configure: - $(TOOLS_DIR)/riak-cluster-config $(RIAK_ADMIN) 8098 true false + $(TOOLS_DIR)/riak-cluster-config $(RIAK_ADMIN) 8098 true true configure_timeseries: @../setup.py setup_timeseries --riak-admin=$(RIAK_ADMIN) @@ -32,27 +28,24 @@ lint: @pip install --upgrade pep8 flake8 @cd ..; pep8 --exclude=riak/pb riak *.py @cd ..; flake8 --exclude=riak/pb riak *.py - @openssl verify -CAfile $(CERTS_DIR)/ca.crt $(CERTS_DIR)/client.crt - @openssl verify -CAfile $(CERTS_DIR)/ca.crt $(CERTS_DIR)/server.crt -# TODO test: setup test_normal test_security -test: setup test_normal +test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" - @../setup.py disable_security --riak-admin=$(RIAK_ADMIN) + @$(RIAK_ADMIN) security disable @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" - @../setup.py enable_security --riak-admin=$(RIAK_ADMIN) + @$(RIAK_ADMIN) security enable @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=18098 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. test_timeseries: @echo "Testing Riak Python Client (timeseries)" - @../setup.py disable_security --riak-admin=${RIAK_ADMIN} + @$(RIAK_ADMIN) security disable @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=0 RUN_DATATYPES=0 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. setup: diff --git a/commands.py b/commands.py index 20ce95bc..ac55b4a8 100644 --- a/commands.py +++ b/commands.py @@ -2,20 +2,15 @@ import os import os.path import re -import shutil from distutils.core import Command from distutils.errors import DistutilsOptionError from distutils.file_util import write_file from distutils import log -from string import Template from subprocess import Popen, PIPE -__all__ = ['build_messages', - 'setup_security', 'enable_security', 'disable_security', - 'setup_timeseries', - 'preconfigure', 'configure'] +__all__ = ['build_messages', 'setup_timeseries'] # Exception classes used by this module. @@ -176,303 +171,6 @@ class setup_timeseries(bucket_type_commands, Command): } -class security_commands(object): - def check_security_command(self, *args): - cmd = self._security_command(*args) - return self.check_output(cmd) - - def run_security_command(self, *args): - self.spawn(self._security_command(*args)) - - def _security_command(self, *args): - cmd = [self.riak_admin, "security"] - if isinstance(args, tuple): - for elem in args: - cmd.extend(elem) - else: - cmd.extend(args) - return cmd - - def check_output(self, *args, **kwargs): - if self.dry_run: - log.info(' '.join(args)) - return bytearray() - else: - return check_output(*args, **kwargs) - - -class setup_security(Command, security_commands): - """ - Sets up security for testing. By default this will create: - - * User `testuser` with password `testpassword` - * User `certuser` with password `certpass` - * Two security sources - * Permissions on - * riak_kv.get - * riak_kv.put - * riak_kv.delete - * riak_kv.index - * riak_kv.list_keys - * riak_kv.list_buckets - * riak_kv.mapreduce - * riak_core.get_bucket - * riak_core.set_bucket - * riak_core.get_bucket_type - * riak_core.set_bucket_type - * search.admin - * search.query - """ - - description = "create security settings used in integration tests" - - user_options = [ - ('riak-admin=', None, 'path to the riak-admin script'), - ('username=', None, 'test user account'), - ('password=', None, 'password for test user account'), - ('certuser=', None, 'certificate test user account'), - ('certpass=', None, 'password for certificate test user account') - ] - - _commands = [ - "add-user $USERNAME password=$PASSWORD", - "add-source $USERNAME 127.0.0.1/32 password", - "add-user $CERTUSER password=$CERTPASS", - "add-source $CERTUSER 127.0.0.1/32 certificate" - ] - - _grants = { - "riak_kv.get": ["any"], - "riak_kv.get_preflist": ["any"], - "riak_kv.put": ["any"], - "riak_kv.delete": ["any"], - "riak_kv.index": ["any"], - "riak_kv.list_keys": ["any"], - "riak_kv.list_buckets": ["any"], - "riak_kv.mapreduce": ["any"], - "riak_core.get_bucket": ["any"], - "riak_core.set_bucket": ["any"], - "riak_core.get_bucket_type": ["any"], - "riak_core.set_bucket_type": ["any"], - "search.admin": ["index", "schema"], - "search.query": ["index", "schema"] - } - - def initialize_options(self): - self.riak_admin = None - self.username = None - self.password = None - self.certuser = None - self.certpass = None - - def finalize_options(self): - if self.riak_admin is None: - raise DistutilsOptionError("riak-admin option not set") - if self.username is None: - self.username = 'testuser' - if self.password is None: - self.password = 'testpassword' - if self.certuser is None: - self.certuser = 'certuser' - if self.certpass is None: - self.certpass = 'certpass' - - def run(self): - if self._check_available(): - for cmd in self._commands: - # Replace the username and password if specified - s = Template(cmd) - newcmd = s.substitute(USERNAME=self.username, - PASSWORD=self.password, - CERTUSER=self.certuser, - CERTPASS=self.certpass) - log.info("Security command: {0}".format(repr(newcmd))) - self.run_security_command(tuple(newcmd.split(' '))) - for perm in self._grants: - self._apply_grant(perm, self._grants[perm]) - - def _check_available(self): - try: - self.check_security_command("status") - return True - except CalledProcessError: - log.error("Security is not supported on this Riak node!") - return False - - def _apply_grant(self, perm, targets): - for target in targets: - cmd = ["grant", perm, "on", target, "to", self.username] - log.info("Granting permission {0} on {1} to {2}" - .format(repr(perm), repr(target), repr(self.username))) - self.run_security_command(cmd) - cmd = ["grant", perm, "on", target, "to", self.certuser] - log.info("Granting permission {0} on {1} to {2}" - .format(repr(perm), repr(target), repr(self.certuser))) - self.run_security_command(cmd) - - -class enable_security(Command, security_commands): - """ - Actually turn on security. - """ - description = "turn on security within Riak" - - user_options = [ - ('riak-admin=', None, 'path to the riak-admin script'), - ] - - def initialize_options(self): - self.riak_admin = None - - def finalize_options(self): - if self.riak_admin is None: - raise DistutilsOptionError("riak-admin option not set") - - def run(self): - cmd = "enable" - self.run_security_command(tuple(cmd.split(' '))) - - -class disable_security(Command, security_commands): - """ - Actually turn off security. - """ - description = "turn off security within Riak" - - user_options = [ - ('riak-admin=', None, 'path to the riak-admin script'), - ] - - def initialize_options(self): - self.riak_admin = None - - def finalize_options(self): - if self.riak_admin is None: - raise DistutilsOptionError("riak-admin option not set") - - def run(self): - cmd = "disable" - self.run_security_command(tuple(cmd.split(' '))) - - -class preconfigure(Command): - """ - Sets up security configuration. - - * Update these lines in riak.conf - * storage_backend = leveldb - * search = on - * listener.protobuf.internal = 127.0.0.1:8087 - * listener.http.internal = 127.0.0.1:8098 - * listener.https.internal = 127.0.0.1:18098 - * ssl.certfile = $pwd/tests/resources/server.crt - * ssl.keyfile = $pwd/tests/resources/server.key - * ssl.cacertfile = $pwd/tests/resources/ca.crt - * check_crl = off - """ - - description = "preconfigure security settings used in integration tests" - - user_options = [ - ('riak-conf=', None, 'path to the riak.conf file'), - ('host=', None, 'IP of host running Riak'), - ('pb-port=', None, 'protocol buffers port number'), - ('https-port=', None, 'https port number') - ] - - def initialize_options(self): - self.riak_conf = None - self.host = "127.0.0.1" - self.pb_port = "8087" - self.http_port = "8098" - self.https_port = "18098" - - def finalize_options(self): - if self.riak_conf is None: - raise DistutilsOptionError("riak-conf option not set") - - def run(self): - self.cert_dir = os.path.dirname(os.path.realpath(__file__)) + \ - "/riak/tests/resources" - self._update_riak_conf() - - def _update_riak_conf(self): - http_host = self.host + ':' + self.http_port - https_host = self.host + ':' + self.https_port - pb_host = self.host + ':' + self.pb_port - self._backup_file(self.riak_conf) - conf = None - with open(self.riak_conf, 'r', buffering=1) as f: - conf = f.read() - conf = re.sub(r'search\s+=\s+off', r'search = on', conf) - conf = re.sub(r'##[ ]+ssl\.', r'ssl.', conf) - conf = re.sub(r'ssl.certfile\s+=\s+\S+', - r'ssl.certfile = ' + self.cert_dir + '/server.crt', - conf) - conf = re.sub(r'storage_backend\s+=\s+\S+', - r'storage_backend = leveldb', - conf) - conf = re.sub(r'ssl.keyfile\s+=\s+\S+', - r'ssl.keyfile = ' + self.cert_dir + '/server.key', - conf) - conf = re.sub(r'ssl.cacertfile\s+=\s+\S+', - r'ssl.cacertfile = ' + self.cert_dir + - '/ca.crt', - conf) - conf = re.sub(r'#*[ ]*listener.http.internal\s+=\s+\S+', - r'listener.http.internal = ' + http_host, - conf) - conf = re.sub(r'#*[ ]*listener.https.internal\s+=\s+\S+', - r'listener.https.internal = ' + https_host, - conf) - conf = re.sub(r'listener.protobuf.internal\s+=\s+\S+', - r'listener.protobuf.internal = ' + pb_host, - conf) - conf += 'check_crl = off\n' - # Older versions of OpenSSL client library need to match on the server - conf += 'tls_protocols.tlsv1 = on\n' - conf += 'tls_protocols.tlsv1.1 = on\n' - with open(self.riak_conf, 'w', buffering=1) as f: - f.write(conf) - - def _backup_file(self, name): - backup = name + ".bak" - if os.path.isfile(name): - shutil.copyfile(name, backup) - else: - log.info("Cannot backup missing file {0}".format(repr(name))) - - -class configure(Command): - """ - Sets up security configuration. - - * Run setup_security - """ - - description = "security settings for testing" - - user_options = setup_security.user_options - - def initialize_options(self): - self.riak_admin = None - self.username = None - self.password = None - - def finalize_options(self): - security = self.distribution.get_command_obj('setup_security') - security.riak_admin = self.riak_admin - security.username = self.username - security.password = self.password - - def run(self): - # Run all relevant sub-commands. - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - sub_commands = [('setup_security', None)] - - class ComparableMixin(object): def _compare(self, other, method): try: diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 7ee88648..0cf7d1d4 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -1,4 +1,6 @@ import os +import socket + from riak.test_server import TestServer from riak.security import SecurityCreds @@ -17,6 +19,14 @@ except ImportError: HAVE_PROTO = False + +def hostname_resolves(hostname): + try: + socket.gethostbyname(hostname) + return 1 + except socket.error: + return 0 + HOST = os.environ.get('RIAK_TEST_HOST', '127.0.0.1') PROTOCOL = os.environ.get('RIAK_TEST_PROTOCOL', 'pbc') @@ -45,24 +55,33 @@ RUN_DATATYPES = int(os.environ.get('RUN_DATATYPES', '1')) RUN_SECURITY = int(os.environ.get('RUN_SECURITY', '0')) -SECURITY_USER = os.environ.get('RIAK_TEST_SECURITY_USER', 'testuser') -SECURITY_PASSWD = os.environ.get('RIAK_TEST_SECURITY_PASSWD', 'testpassword') +if RUN_SECURITY: + h = 'riak-test' + if hostname_resolves(h): + HOST = PB_HOST = HTTP_HOST = h + else: + raise AssertionError( + 'RUN_SECURITY requires that the host name' + + ' "riak-test" resolves to the IP address of a Riak node' + + ' with security enabled.') + +SECURITY_USER = os.environ.get('RIAK_TEST_SECURITY_USER', 'riakpass') +SECURITY_PASSWD = os.environ.get('RIAK_TEST_SECURITY_PASSWD', 'Test1234') + SECURITY_CACERT = os.environ.get('RIAK_TEST_SECURITY_CACERT', - 'riak/tests/resources/ca.crt') + 'tools/test-ca/certs/cacert.pem') SECURITY_REVOKED = os.environ.get('RIAK_TEST_SECURITY_REVOKED', - 'riak/tests/resources/server.crl') + 'tools/test-ca/crl/crl.pem') SECURITY_BAD_CERT = os.environ.get('RIAK_TEST_SECURITY_BAD_CERT', - 'riak/tests/resources/bad_ca.crt') + 'tools/test-ca/certs/badcert.pem') # Certificate-based Authentication only supported by PBC -# N.B., username and password must both still be supplied -SECURITY_KEY = os.environ.get('RIAK_TEST_SECURITY_KEY', - 'riak/tests/resources/client.key') +SECURITY_KEY = os.environ.get( + 'RIAK_TEST_SECURITY_KEY', + 'tools/test-ca/private/riakuser-client-cert-key.pem') SECURITY_CERT = os.environ.get('RIAK_TEST_SECURITY_CERT', - 'riak/tests/resources/client.crt') + 'tools/test-ca/certs/riakuser-client-cert.pem') SECURITY_CERT_USER = os.environ.get('RIAK_TEST_SECURITY_CERT_USER', - 'certuser') -SECURITY_CERT_PASSWD = os.environ.get('RIAK_TEST_SECURITY_CERT_PASSWD', - 'certpass') + 'riakuser') SECURITY_CIPHERS = 'DHE-RSA-AES128-SHA256:DHE-RSA-AES128-SHA:' + \ 'DHE-RSA-AES256-SHA256:DHE-RSA-AES256-SHA:AES128-SHA256:' + \ diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index ee35fefa..bfa2b888 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -701,7 +701,7 @@ def test_store_binary_object_from_file(self): def test_store_binary_object_from_file_should_use_default_mimetype(self): bucket = self.client.bucket(self.bucket_name) filepath = os.path.join(os.path.dirname(os.path.abspath(__file__)), - os.pardir, os.pardir, 'README.rst') + os.pardir, os.pardir, 'README.md') obj = bucket.new_from_file(self.key_name, filepath) obj.store() obj = bucket.get(self.key_name) diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index 85588ee0..056c1b48 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -4,8 +4,7 @@ from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ - SECURITY_CERT_USER, SECURITY_CERT_PASSWD, SECURITY_BAD_CERT, \ - SECURITY_CIPHERS + SECURITY_CERT_USER, SECURITY_BAD_CERT, SECURITY_CIPHERS from riak.security import SecurityCreds from riak.tests.base import IntegrationTestBase @@ -91,7 +90,6 @@ def test_security_password_without_cacert(self): @unittest.skipUnless(RUN_SECURITY, 'RUN_SECURITY is 0') def test_security_cert_authentication(self): creds = SecurityCreds(username=SECURITY_CERT_USER, - password=SECURITY_CERT_PASSWD, ciphers=SECURITY_CIPHERS, cert_file=SECURITY_CERT, pkey_file=SECURITY_KEY, diff --git a/riak/transports/http/__init__.py b/riak/transports/http/__init__.py index 0024589d..d7e69c3d 100644 --- a/riak/transports/http/__init__.py +++ b/riak/transports/http/__init__.py @@ -137,7 +137,6 @@ def connect(self): self.credentials._check_revoked_cert(self.sock) else: ssl_ctx = configure_ssl_context(self.credentials) - host = "riak@" + self.host if self.timeout is not None: sock.settimeout(self.timeout) self.sock = ssl.SSLSocket(sock=sock, @@ -146,7 +145,7 @@ def connect(self): cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.credentials.cacert_file, ciphers=self.credentials.ciphers, - server_hostname=host) + server_hostname=self.host) self.sock.context = ssl_ctx diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 12045323..60f264c1 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -84,7 +84,10 @@ def _auth(self): """ req = riak.pb.riak_pb2.RpbAuthReq() req.user = str_to_bytes(self._client._credentials.username) - req.password = str_to_bytes(self._client._credentials.password) + password = self._client._credentials.password + if not password: + password = '' + req.password = str_to_bytes(password) msg_code, _ = self._non_connect_request( riak.pb.messages.MSG_CODE_AUTH_REQ, req, @@ -130,7 +133,7 @@ def _ssl_handshake(self): if credentials: try: ssl_ctx = configure_ssl_context(credentials) - host = "riak@" + self._address[0] + host = self._address[0] ssl_socket = ssl.SSLSocket(sock=self._socket, keyfile=credentials.pkey_file, certfile=credentials.cert_file, diff --git a/riak/transports/security.py b/riak/transports/security.py index dfd4cdc1..a2ac46d5 100644 --- a/riak/transports/security.py +++ b/riak/transports/security.py @@ -72,6 +72,7 @@ def configure_ssl_context(credentials): pkeyfile = certfile if certfile: ssl_ctx.load_cert_chain(certfile, pkeyfile) + # TODO https://bugs.python.org/issue8813 if credentials.crl_file is not None: ssl_ctx.load_verify_locations(credentials.crl_file) ssl_ctx.verify_flags = ssl.VERIFY_CRL_CHECK_LEAF diff --git a/setup.py b/setup.py index 1c40976b..0d9ef61f 100755 --- a/setup.py +++ b/setup.py @@ -3,9 +3,7 @@ import platform from setuptools import setup, find_packages from version import get_version -from commands import preconfigure, configure, \ - setup_security, enable_security, disable_security, setup_timeseries, \ - build_messages +from commands import setup_timeseries, build_messages install_requires = ['six >= 1.8.0'] requires = ['six(>=1.8.0)'] @@ -45,12 +43,7 @@ url='https://github.com/basho/riak-python-client', cmdclass={ 'build_messages': build_messages, - 'setup_timeseries': setup_timeseries, - 'setup_security': setup_security, - 'preconfigure': preconfigure, - 'configure': configure, - 'enable_security': enable_security, - 'disable_security': disable_security + 'setup_timeseries': setup_timeseries }, classifiers=['License :: OSI Approved :: Apache Software License', 'Intended Audience :: Developers', diff --git a/tools b/tools index 343c4a48..c7b46bce 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 343c4a485e26bcee971a41faf9234d65a3fb8b53 +Subproject commit c7b46bce22cd96ef3c4b0c17f7e156f14029264c From 0905534418310197029eb05303e4aab6b0a44dfb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 13 Feb 2016 18:38:48 -0800 Subject: [PATCH 114/324] Remove sections from README that are no longer relevant --- README.md | 60 ++----------------------------------------------------- 1 file changed, 2 insertions(+), 58 deletions(-) diff --git a/README.md b/README.md index 52f41f1e..5542ed2d 100644 --- a/README.md +++ b/README.md @@ -64,52 +64,6 @@ make -C buildbot test That will run the test suite twice: once with security enabled and once without. -Testing Options ---------------- - -If you wish to change the default options you can run the setup by hand. First configure the test node by adjusting the `riak.conf` settings, where `RIAK_DIR` is the path to the top your Riak installation - -```sh -python setup.py preconfigure --riak-conf=$RIAK_DIR/etc/riak.conf -``` - -Optionally the hostname and port numbers can be changed, too, via these arguments: - -* `--host=` IP of host running Riak (default is `localhost`) -* `--pb-port=` protocol buffers port number (default is `8087`) -* `--http-port=` http port number (default is `8098`) -* `--https-port=` https port number (default is `8099`) - -You may alternately add these lines to `setup.cfg` - -```ini -[preconfigure] -riak-conf=/Users/sean/dev/riak/rel/riak/etc/riak.conf -host=localhost -pb-port=8087 -http-port=8098 -https-port=8099 -``` - -Next start the test node. Once it is running, a test configuration is installed which includes security test users and bucket types - -```sh -python setup.py configure --riak-admin=$RIAK_DIR/bin/riak-admin -``` - -Optionally these configuration settings can be changed, too: - -* `--username=` test user account (default is `testuser`) -* `--password=` password for test user account (default is `testpassword`) -* `--certuser=` secruity test user account (default is `certuser`) -* `--certpass=` password for security test user account (default is `certpass`) - -Similarly `setup.cfg` may be modified instead. To run the tests against a Riak server (with configured TCP port configuration) on localhost, execute - -```sh -python setup.py test -``` - Connections to Riak in Tests ---------------------------- @@ -175,19 +129,9 @@ To test [Secondary Indexes](http://docs.basho.com/riak/2.0.0/dev/using/2i/), the Testing Security (Riak 2+) -------------------------- -By default [Security](http://docs.basho.com/riak/2.0.0beta1/ops/running/authz/) is not enabled on Riak. Once `security = on` is configured in the `riak.conf` file it can be enabled with `riak-admin`. - -If you have set up the test environment outlined in the Testing section you can go ahead and use this command to enable security +Ensure that the hostname `riak-test` resolves to your Riak host (most likely `localhost`). This is so the SSL host verification can succeed. -```sh -python setup.py enable_security --riak-admin=$RIAK_DIR/bin/riak-admin -``` - -Once you are done testing security you can also - -```sh -python setup.py disable_security --riak-admin=$RIAK_DIR/bin/riak-admin -``` +By default [Security](http://docs.basho.com/riak/2.0.0beta1/ops/running/authz/) is not enabled on Riak. Once `security = on` is configured in the `riak.conf` file it can be enabled with `riak-admin`. To run the tests From d046ab5daec2d02d2e16d1243a1b4ca92dff85a4 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 13 Feb 2016 18:43:41 -0800 Subject: [PATCH 115/324] Remove last vestiges of that rst file --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index db8a14a0..93567e45 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,6 @@ include docs/* include riak/erl_src/* -include README.rst +include README.md include LICENSE include RELEASE_NOTES.md include version.py From 550a2d14fcb282af50a5b16e06c275e441956b4a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 18 Feb 2016 07:07:54 -0800 Subject: [PATCH 116/324] Update to latest riak-client-tools --- tools | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools b/tools index c7b46bce..5ff5850e 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit c7b46bce22cd96ef3c4b0c17f7e156f14029264c +Subproject commit 5ff5850e1d7164f4f64f45a31d9b257e01a19e58 From eabd6943b3b1203be947ac97d9b1795c871deca8 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 20 Feb 2016 13:09:51 -0800 Subject: [PATCH 117/324] 2.4.2 Release Notes --- README.md | 5 + RELEASE_NOTES.md | 300 ----------------------------------------------- 2 files changed, 5 insertions(+), 300 deletions(-) delete mode 100644 RELEASE_NOTES.md diff --git a/README.md b/README.md index 5542ed2d..cc50d68f 100644 --- a/README.md +++ b/README.md @@ -8,6 +8,11 @@ Documentation Documentation for Riak is available [here](http://docs.basho.com/riak/latest). +Repository Cloning +================== + +*NOTE*: please clone this repository using the `--recursive` argument to `git clone` or follow the clone with `git submodule update --init`. This repository uses two submodules. + Install ======= diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md deleted file mode 100644 index 3baf2dfe..00000000 --- a/RELEASE_NOTES.md +++ /dev/null @@ -1,300 +0,0 @@ -# Riak Python Client Release Notes - -## 2.4.1 Patch Release - 2016-02-03 - -* [Riak TS: Millisecond precision](https://github.com/basho/riak-python-client/issues/430) -* [Fix release process](https://github.com/basho/riak-python-client/issues/429) - -## 2.4.0 Feature Release - 2016-01-13 - -This release enhances Riak Time Series functionality. - -* [Encapsulate table description](https://github.com/basho/riak-python-client/pull/422) - -## 2.3.0 Feature Release - 2015-12-14 - -Release 2.3.0 features support for new -[time series](https://github.com/basho/riak-python-client/pull/416) -functionality. - -This is release retires support for Python 2.6.x but adds support for -Python 3.5.x. - -There are also many bugfixes and new enhancements: - -* [Protocol buffers are now integrated into the Python Client] - (https://github.com/basho/riak-python-client/pull/418) -* [Support for Preflists and Write-Once bucket types] - (https://github.com/basho/riak-python-client/pull/414) -* [Support Riak 2.1.1] - (https://github.com/basho/riak-python-client/pull/407) -* [Native SSL support for Python 2.7.9+] - (https://github.com/basho/riak-python-client/pull/397) - - -## 2.2.0 Feature Release - 2014-12-18 - -Release 2.2.0 features support for -[Python 3](https://github.com/basho/riak-python-client/pull/379), -specifically 3.3 and 3.4. This version uses the native SSL security instead -of [pyOpenSSL](http://pypi.python.org/pypi/pyOpenSSL) which is required -for the Python 2 series. - -This release also includes many bugfixes and enhancements, most -notably: - -* [Fixed an issue with the implementation of `Mapping.__iter__`] - (https://github.com/basho/riak-python-client/pull/367) -* [Test client certificate generation updated] - (https://github.com/basho/riak-python-client/pull/373) -* [Protocol Buffers had a socket.send issue] - (https://github.com/basho/riak-python-client/pull/382) -* [Support for bucket types in Map/Reduce jobs added] - (https://github.com/basho/riak-python-client/pull/385) -* [Race condition in `RiakBucket` creation fixed] - (https://github.com/basho/riak-python-client/pull/386) -* [Data Types can now be deleted] - (https://github.com/basho/riak-python-client/pull/387) -* [2i Range Queries with a zero end index now work] - (https://github.com/basho/riak-python-client/pull/388) - - -## 2.1.0 Feature Release - 2014-09-03 - -Release 2.1.0 features support for Riak 2.0 capabilities including: - -* Bucket Types -* Riak Data Types (CRDTs) -* Search 2.0 (codename Yokozuna) -* Security: SSL/TLS, Authentication, and Authorization - -As a result of the new security features, the package now depends on -[pyOpenSSL](http://pypi.python.org/pypi/pyOpenSSL) and will warn if -your version of OpenSSL is too old. - -This release also includes many bugfixes and enhancements, most -notably: - -* The default protocol is now 'pbc', not 'http'. -* When used correctly, streaming requests no longer result in leaks - from the connection pool. -* The size of the multiget worker pool can be set when initializing - the client. -* Secondary index queries can now iterate over all pages in a query. -* The number of times a request is retried after network failure is - now configurable. -* The additional request options `basic_quorum` and `notfound_ok` are - now supported. - -## 2.0.3 Patch Release - 2014-03-06 - -Release 2.0.3 includes support for 1.4.4's 2I regexp feature and fixes -a few bugs: - -* Docs generation now uses the version from the top-level package. -* Some internal uses of the deprecated RiakClient.solr were removed. -* More errors will be caught and propagated properly from multiget - requests, preventing deadlocks on the caller side. - -## 2.0.2 Patch release - 2013-11-18 - -Release 2.0.2 includes support for the 1.4.1+ "timeout" option on -secondary index queries. - -## 2.0.1 Patch release - 2013-08-28 - -Release 2.0.1 includes a minor compatibility fix for Python 2.6 and an -updated README. - -## 2.0.0 Feature Release - 2013-07-30 - -Release 2.0 is the culmination of many months of rearchitecting the -client. Highlights: - -* Automatic connection to multiple nodes, with request retries, - through a thread-safe connection pool. -* All Riak 1.3 and 1.4 features, including bucket properties, - paginating and streaming secondary indexes, CRDT counters, - client-specified timeouts, and more. -* Cleaner, more Pythonic access to RiakObject and RiakBucket - attributes, favoring properties over methods where possible. -* Simpler representations of links (3-tuples) and index entries - (2-tuples). -* Streaming requests (keys, buckets, MapReduce, 2i) are now exposed as - iterators. -* Feature detection prevents sending requests to hosts that can't - handle them. -* Better handling of siblings -- you don't have to request them - individually anymore -- and registrable resolver functions. -* A new `multiget` operation that fetches a collection of keys using - a pool background threads. -* A more resilient, repeatable test suite that generates buckets and - key names that are essentially random. -* Last but not least, a brand new, more detailed documentation site! - -Other features: - -* Added an encoder/decoder pair to support `text/plain`. -* The Travis CI build will now install the latest Riak to run the - suite against. - -Other bugfixes: - -* The `charset` metadata can now be received via the `Content-Type` - header on HTTP. -* Objects with empty keys and buckets with empty names cannot be - created or accessed, as they are unaddressable over HTTP. -* Performance and compatibility of `TestServer` was improved. -* Non-ASCII request bodies are better supported on HTTP. -* Enabling and disabling search indexing on a bucket now uses the - `search` bucket property. - -## 1.5.2 Patch Release - 2013-01-31 - -Release 1.5.2 fixes some bugs and adds HTTPS/SSL support. - -* Added support for HTTPS. -* Fixed writing of the `app.config` for the `TestServer`. -* Reorganized the tests into multiple files and cases. -* Some methods on `RiakObject` were made private where appropriate. -* The version comparison used in feature detection was loosened to - support pre-release versions of Riak. -* Prevent fetching the `protobuf` package from Google Code. -* Prefer `simplejson` over `json` when present. - -## 1.5.1 Patch Release - 2012-10-24 - -Release 1.5.1 fixes one bug and some documentation errors. - -* Fix bug where `http_status` is used instead of `http_code`. -* Fix documentation of `RiakMapReduce.index` method. -* Fix documentation of `RiakClient.__init__` method. - -## 1.5.0 Feature Release - 2012-08-29 - -Release 1.5.0 is a feature release that supports Riak 1.2. - -Noteworthy features: - -* Riak 1.2 features are now supported, including Search and 2I queries - over Protocol Buffers transport. The Protocol Buffers message - definitions now exist as a separate package, available on - [PyPi](http://pypi.python.org/pypi/riak_pb/1.2.0). - - **NOTE:** The return value of search queries over HTTP and MapReduce - were changed to be compatible with the results returned from the - Protocol Buffers interface. -* The client will use a version-based feature detection scheme to - enable or disable various features, including the new Riak 1.2 - features. This enables compatibility with older nodes during a - rolling upgrade, or usage of the newer client with older clusters. - -Noteworthy bugfixes: - -* The code formatting and style was adjusted to fit PEP8 standards. -* All classes in the package are now "new-style". -* The PW accessor methods on RiakClient now get and set the right - instance variable. -* Various fixes were made to the TestServer and it will throw an - exception when it fails to start. - -## 1.4.1 Patch Release - 2012-06-19 - -Noteworthy features: - -* New Riak objects support Riak-created random keys - -Noteworthy bugfixes: - -* Map Reduce queries now use "application/json" as the Content-Type - -## 1.4.0 Feature Release - 2012-03-30 - -Release 1.4.0 is a feature release comprising over 117 individual -commits. - -Noteworthy features: - -* Python 2.6 and 2.7 are supported. On 2.6, the unittest2 package is - required to run the test suite. -* Google's official protobuf package (2.4.1 or later) is now a - dependency. The package from downloads.basho.com/support is no - longer necessary. -* Travis-CI is enabled on the client. Go to - http://travis-ci.org/basho/riak-python-client for build status. -* Riak 1.0+ features, namely secondary indexes and primary quora - (PR/PW), are supported. -* `if_none_match` is a valid request option when storing objects, and - will prevent the write when set to `True` if the key already exists. -* Links can be set wholesale using the `set_links()` method. -* Transport-specific options can be passed through when creating a - `Client` object. -* A connection manager was added that will (when manipulated manually) - allow connections to multiple Riak nodes. This will be fully - integrated in a future release. - -Noteworthy bugfixes: - -* Links now use the proper URL-encoding in HTTP headers, preventing - problems with explosion from multiple encoding passes. -* Many fixes were applied to make the Protocol Buffers transport more - stable. -* `RiakObject.get_content_type()` will behave properly when content - type is not set. -* Deprecated transport classes were removed since their functionality - had folded into the primary transports. -* A temporary fix was made for unicode bucket/key names which raises - an error when they are used and cannot be coerced to ASCII. -* The Erlang sources/beams for the TestServer are now included in the - package. -* MapReduce failures will now produce a more useful error message and - be handled properly when no results are returned. - -There are lots of other great fixes from our wonderful -community. [Check them out!](https://github.com/basho/riak-python-client/compare/1.3.0...1.4.0) - -## 1.3.0 Feature Release - 2011-08-04 - -Release 1.3.0 is a feature release bringing a slew of updates. - -Noteworthy features: - -* #37: Support for the Riak Search HTTP Interface (Mathias Meyer) -* #36: Support to store large files in Luwak (Mathias Meyer) -* #35: Convenience methods to enable, disable and check search indexing - on Riak buckets (Mathias Meyer) -* #34: Port of Ripple's test server to Python, allows faster testing - thanks to an in-memory Riak instance (Mathias Meyer) -* #31: New transports: A Protocol Buffers connection cache - (riak.transports.pbc.RiakPbcCacheTransport), a transport to reuse the - underlying TCP connections by setting SO_REUSEADDR on the socket - (riak.transports.http.RiakHttpReuseTransport), and one that tries to - reuse connections to the same host (riak.transports.http.RiakHttpPoolTransport) - (Gilles Devaux) - -Fixes: - -* #33: Respect maximum link header size when using HTTP. Link header is now - split up into multiple headers when it exceeds the maximum size of 8192 bytes. - (Mathias Meyer) -* #41: Connections potentially not returned to the protocol buffers connection - pool. (Reid Draper) -* #42: Reset protocol buffer connection up on connection error (Brett Hoerner) - -## 1.2.2 Patch Release - 2011-06-22 - -Release 1.2.2 is a minor patch release. - -Noteworthy fixes and improvements: - -* #29: Add an nicer API for using key filters with MapReduce (Eric Moritz) -* #13 and #24: Let Riak generate a key when none is specified (Mark Erdmann) -* #28: Function aliases for the Riak built-in MapReduce functions (Eric Moritz) -* #20: Add a convenience method to create Riak object directly from file (Ana Nelson) -* #16: Support return\_body parameter when creating a new object (Stefan Praszalowicz, Andy Gross) -* #17: Storing an object fails when it doesn't exist in Riak (Eric Moritz, Andy Gross) -* #18: Ensure that a default content type is set when none specified (Andy Gross) -* #22: Fix user meta data support (Mathias Meyer) -* #23: Fix links to the wiki (Mikhail Sobolev) -* #25: Enable support for code coverage when running tests (Mikhail Sobolev) -* #26: Debian packaging (Dmitry Rozhkov) From 3d8ca0395aac1513650210f7c5fc52c520549e7a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sun, 21 Feb 2016 07:51:30 -0800 Subject: [PATCH 118/324] Release notes file --- RELNOTES.md | 305 ++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 305 insertions(+) create mode 100644 RELNOTES.md diff --git a/RELNOTES.md b/RELNOTES.md new file mode 100644 index 00000000..b36303ea --- /dev/null +++ b/RELNOTES.md @@ -0,0 +1,305 @@ +# Riak Python Client Release Notes + +## 2.4.2 Patch Release - 2016-02-20 + +* [Fix SSL host name](https://github.com/basho/riak-python-client/pull/436) +* [Use `riak-client-tools`](https://github.com/basho/riak-python-client/issues/434) + +## 2.4.1 Patch Release - 2016-02-03 + +* [Riak TS: Millisecond precision](https://github.com/basho/riak-python-client/issues/430) +* [Fix release process](https://github.com/basho/riak-python-client/issues/429) + +## 2.4.0 Feature Release - 2016-01-13 + +This release enhances Riak Time Series functionality. + +* [Encapsulate table description](https://github.com/basho/riak-python-client/pull/422) + +## 2.3.0 Feature Release - 2015-12-14 + +Release 2.3.0 features support for new +[time series](https://github.com/basho/riak-python-client/pull/416) +functionality. + +This is release retires support for Python 2.6.x but adds support for +Python 3.5.x. + +There are also many bugfixes and new enhancements: + +* [Protocol buffers are now integrated into the Python Client] + (https://github.com/basho/riak-python-client/pull/418) +* [Support for Preflists and Write-Once bucket types] + (https://github.com/basho/riak-python-client/pull/414) +* [Support Riak 2.1.1] + (https://github.com/basho/riak-python-client/pull/407) +* [Native SSL support for Python 2.7.9+] + (https://github.com/basho/riak-python-client/pull/397) + + +## 2.2.0 Feature Release - 2014-12-18 + +Release 2.2.0 features support for +[Python 3](https://github.com/basho/riak-python-client/pull/379), +specifically 3.3 and 3.4. This version uses the native SSL security instead +of [pyOpenSSL](http://pypi.python.org/pypi/pyOpenSSL) which is required +for the Python 2 series. + +This release also includes many bugfixes and enhancements, most +notably: + +* [Fixed an issue with the implementation of `Mapping.__iter__`] + (https://github.com/basho/riak-python-client/pull/367) +* [Test client certificate generation updated] + (https://github.com/basho/riak-python-client/pull/373) +* [Protocol Buffers had a socket.send issue] + (https://github.com/basho/riak-python-client/pull/382) +* [Support for bucket types in Map/Reduce jobs added] + (https://github.com/basho/riak-python-client/pull/385) +* [Race condition in `RiakBucket` creation fixed] + (https://github.com/basho/riak-python-client/pull/386) +* [Data Types can now be deleted] + (https://github.com/basho/riak-python-client/pull/387) +* [2i Range Queries with a zero end index now work] + (https://github.com/basho/riak-python-client/pull/388) + + +## 2.1.0 Feature Release - 2014-09-03 + +Release 2.1.0 features support for Riak 2.0 capabilities including: + +* Bucket Types +* Riak Data Types (CRDTs) +* Search 2.0 (codename Yokozuna) +* Security: SSL/TLS, Authentication, and Authorization + +As a result of the new security features, the package now depends on +[pyOpenSSL](http://pypi.python.org/pypi/pyOpenSSL) and will warn if +your version of OpenSSL is too old. + +This release also includes many bugfixes and enhancements, most +notably: + +* The default protocol is now 'pbc', not 'http'. +* When used correctly, streaming requests no longer result in leaks + from the connection pool. +* The size of the multiget worker pool can be set when initializing + the client. +* Secondary index queries can now iterate over all pages in a query. +* The number of times a request is retried after network failure is + now configurable. +* The additional request options `basic_quorum` and `notfound_ok` are + now supported. + +## 2.0.3 Patch Release - 2014-03-06 + +Release 2.0.3 includes support for 1.4.4's 2I regexp feature and fixes +a few bugs: + +* Docs generation now uses the version from the top-level package. +* Some internal uses of the deprecated RiakClient.solr were removed. +* More errors will be caught and propagated properly from multiget + requests, preventing deadlocks on the caller side. + +## 2.0.2 Patch release - 2013-11-18 + +Release 2.0.2 includes support for the 1.4.1+ "timeout" option on +secondary index queries. + +## 2.0.1 Patch release - 2013-08-28 + +Release 2.0.1 includes a minor compatibility fix for Python 2.6 and an +updated README. + +## 2.0.0 Feature Release - 2013-07-30 + +Release 2.0 is the culmination of many months of rearchitecting the +client. Highlights: + +* Automatic connection to multiple nodes, with request retries, + through a thread-safe connection pool. +* All Riak 1.3 and 1.4 features, including bucket properties, + paginating and streaming secondary indexes, CRDT counters, + client-specified timeouts, and more. +* Cleaner, more Pythonic access to RiakObject and RiakBucket + attributes, favoring properties over methods where possible. +* Simpler representations of links (3-tuples) and index entries + (2-tuples). +* Streaming requests (keys, buckets, MapReduce, 2i) are now exposed as + iterators. +* Feature detection prevents sending requests to hosts that can't + handle them. +* Better handling of siblings -- you don't have to request them + individually anymore -- and registrable resolver functions. +* A new `multiget` operation that fetches a collection of keys using + a pool background threads. +* A more resilient, repeatable test suite that generates buckets and + key names that are essentially random. +* Last but not least, a brand new, more detailed documentation site! + +Other features: + +* Added an encoder/decoder pair to support `text/plain`. +* The Travis CI build will now install the latest Riak to run the + suite against. + +Other bugfixes: + +* The `charset` metadata can now be received via the `Content-Type` + header on HTTP. +* Objects with empty keys and buckets with empty names cannot be + created or accessed, as they are unaddressable over HTTP. +* Performance and compatibility of `TestServer` was improved. +* Non-ASCII request bodies are better supported on HTTP. +* Enabling and disabling search indexing on a bucket now uses the + `search` bucket property. + +## 1.5.2 Patch Release - 2013-01-31 + +Release 1.5.2 fixes some bugs and adds HTTPS/SSL support. + +* Added support for HTTPS. +* Fixed writing of the `app.config` for the `TestServer`. +* Reorganized the tests into multiple files and cases. +* Some methods on `RiakObject` were made private where appropriate. +* The version comparison used in feature detection was loosened to + support pre-release versions of Riak. +* Prevent fetching the `protobuf` package from Google Code. +* Prefer `simplejson` over `json` when present. + +## 1.5.1 Patch Release - 2012-10-24 + +Release 1.5.1 fixes one bug and some documentation errors. + +* Fix bug where `http_status` is used instead of `http_code`. +* Fix documentation of `RiakMapReduce.index` method. +* Fix documentation of `RiakClient.__init__` method. + +## 1.5.0 Feature Release - 2012-08-29 + +Release 1.5.0 is a feature release that supports Riak 1.2. + +Noteworthy features: + +* Riak 1.2 features are now supported, including Search and 2I queries + over Protocol Buffers transport. The Protocol Buffers message + definitions now exist as a separate package, available on + [PyPi](http://pypi.python.org/pypi/riak_pb/1.2.0). + + **NOTE:** The return value of search queries over HTTP and MapReduce + were changed to be compatible with the results returned from the + Protocol Buffers interface. +* The client will use a version-based feature detection scheme to + enable or disable various features, including the new Riak 1.2 + features. This enables compatibility with older nodes during a + rolling upgrade, or usage of the newer client with older clusters. + +Noteworthy bugfixes: + +* The code formatting and style was adjusted to fit PEP8 standards. +* All classes in the package are now "new-style". +* The PW accessor methods on RiakClient now get and set the right + instance variable. +* Various fixes were made to the TestServer and it will throw an + exception when it fails to start. + +## 1.4.1 Patch Release - 2012-06-19 + +Noteworthy features: + +* New Riak objects support Riak-created random keys + +Noteworthy bugfixes: + +* Map Reduce queries now use "application/json" as the Content-Type + +## 1.4.0 Feature Release - 2012-03-30 + +Release 1.4.0 is a feature release comprising over 117 individual +commits. + +Noteworthy features: + +* Python 2.6 and 2.7 are supported. On 2.6, the unittest2 package is + required to run the test suite. +* Google's official protobuf package (2.4.1 or later) is now a + dependency. The package from downloads.basho.com/support is no + longer necessary. +* Travis-CI is enabled on the client. Go to + http://travis-ci.org/basho/riak-python-client for build status. +* Riak 1.0+ features, namely secondary indexes and primary quora + (PR/PW), are supported. +* `if_none_match` is a valid request option when storing objects, and + will prevent the write when set to `True` if the key already exists. +* Links can be set wholesale using the `set_links()` method. +* Transport-specific options can be passed through when creating a + `Client` object. +* A connection manager was added that will (when manipulated manually) + allow connections to multiple Riak nodes. This will be fully + integrated in a future release. + +Noteworthy bugfixes: + +* Links now use the proper URL-encoding in HTTP headers, preventing + problems with explosion from multiple encoding passes. +* Many fixes were applied to make the Protocol Buffers transport more + stable. +* `RiakObject.get_content_type()` will behave properly when content + type is not set. +* Deprecated transport classes were removed since their functionality + had folded into the primary transports. +* A temporary fix was made for unicode bucket/key names which raises + an error when they are used and cannot be coerced to ASCII. +* The Erlang sources/beams for the TestServer are now included in the + package. +* MapReduce failures will now produce a more useful error message and + be handled properly when no results are returned. + +There are lots of other great fixes from our wonderful +community. [Check them out!](https://github.com/basho/riak-python-client/compare/1.3.0...1.4.0) + +## 1.3.0 Feature Release - 2011-08-04 + +Release 1.3.0 is a feature release bringing a slew of updates. + +Noteworthy features: + +* #37: Support for the Riak Search HTTP Interface (Mathias Meyer) +* #36: Support to store large files in Luwak (Mathias Meyer) +* #35: Convenience methods to enable, disable and check search indexing + on Riak buckets (Mathias Meyer) +* #34: Port of Ripple's test server to Python, allows faster testing + thanks to an in-memory Riak instance (Mathias Meyer) +* #31: New transports: A Protocol Buffers connection cache + (riak.transports.pbc.RiakPbcCacheTransport), a transport to reuse the + underlying TCP connections by setting SO_REUSEADDR on the socket + (riak.transports.http.RiakHttpReuseTransport), and one that tries to + reuse connections to the same host (riak.transports.http.RiakHttpPoolTransport) + (Gilles Devaux) + +Fixes: + +* #33: Respect maximum link header size when using HTTP. Link header is now + split up into multiple headers when it exceeds the maximum size of 8192 bytes. + (Mathias Meyer) +* #41: Connections potentially not returned to the protocol buffers connection + pool. (Reid Draper) +* #42: Reset protocol buffer connection up on connection error (Brett Hoerner) + +## 1.2.2 Patch Release - 2011-06-22 + +Release 1.2.2 is a minor patch release. + +Noteworthy fixes and improvements: + +* #29: Add an nicer API for using key filters with MapReduce (Eric Moritz) +* #13 and #24: Let Riak generate a key when none is specified (Mark Erdmann) +* #28: Function aliases for the Riak built-in MapReduce functions (Eric Moritz) +* #20: Add a convenience method to create Riak object directly from file (Ana Nelson) +* #16: Support return\_body parameter when creating a new object (Stefan Praszalowicz, Andy Gross) +* #17: Storing an object fails when it doesn't exist in Riak (Eric Moritz, Andy Gross) +* #18: Ensure that a default content type is set when none specified (Andy Gross) +* #22: Fix user meta data support (Mathias Meyer) +* #23: Fix links to the wiki (Mikhail Sobolev) +* #25: Enable support for code coverage when running tests (Mikhail Sobolev) +* #26: Debian packaging (Dmitry Rozhkov) From 827787fe57ec5f667f29086eec4bd7f3044f2154 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 22 Feb 2016 08:35:03 -0800 Subject: [PATCH 119/324] Use pandoc to convert md to rst Add pandoc check to Makefile Add pypandoc check to Makefile --- MANIFEST.in | 2 +- Makefile | 14 +++++++++----- README.md | 8 ++++---- setup.py | 8 +++++++- 4 files changed, 21 insertions(+), 11 deletions(-) diff --git a/MANIFEST.in b/MANIFEST.in index 93567e45..d9f9a3fd 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,6 +2,6 @@ include docs/* include riak/erl_src/* include README.md include LICENSE -include RELEASE_NOTES.md +include RELNOTES.md include version.py include commands.py diff --git a/Makefile b/Makefile index 5b3c059c..efda2036 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,6 @@ -.PHONY: pb_clean pb_compile pb_build release +.PHONY: pb_clean pb_compile pb_build release release_sdist + +PANDOC_VERSION := $(shell pandoc --version) clean: pb_clean @@ -12,17 +14,20 @@ pb_compile: pb_clean @python setup.py build_messages release_sdist: +ifeq ($(PANDOC_VERSION),) + $(error The pandoc command is required to correctly convert README.md to rst format) +endif ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) -else +endif + @python -c 'import pypandoc' @echo "==> Python (sdist release)" @python setup.py sdist upload -s -i $(RELEASE_GPG_KEYNAME) -endif release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) -else +endif @echo "==> Python 2.7 (release)" @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.3 (release)" @@ -31,4 +36,3 @@ else @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.5 (release)" @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) -endif diff --git a/README.md b/README.md index cc50d68f..ba773034 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,7 @@ Python Client for Riak Documentation ============= -[Documentation for the Riak Python Client Library](http://basho.github.io/riak-python-client/index.html) is available here. The documentation source is found in [`docs/` subdirectory](https://github.com/basho/riak-python-client/tree/master/docs) and can be built with [Sphinx](http://sphinx.pocoo.org/). +[Documentation for the Riak Python Client Library](http://basho.github.io/riak-python-client/index.html) is available [here](http://basho.github.io/riak-python-client/index.html). Documentation for Riak is available [here](http://docs.basho.com/riak/latest). @@ -52,7 +52,7 @@ To setup the default test configuration build a test Riak node (from a `riak` di make rel ``` -See [Basic Cluster Setup](http://docs.basho.com/riak/2.0.0/ops/building/basic-cluster-setup/) for more details. +See [Basic Cluster Setup](http://docs.basho.com/riak/latest/ops/building/basic-cluster-setup/) for more details. For all of the simple default values, set the `RIAK_DIR` environment variable to the root of your Riak installation. Then from the `riak-python-client` directory @@ -129,14 +129,14 @@ To enable the timeseries tests, set the `RUN_TIMESERIES` environment variable to Testing Secondary Indexes ------------------------- -To test [Secondary Indexes](http://docs.basho.com/riak/2.0.0/dev/using/2i/), the `RUN_INDEXES` environment variable must be set to 1 (or 0 to skip them.) +To test [Secondary Indexes](http://docs.basho.com/riak/latest/dev/using/2i/), the `RUN_INDEXES` environment variable must be set to 1 (or 0 to skip them.) Testing Security (Riak 2+) -------------------------- Ensure that the hostname `riak-test` resolves to your Riak host (most likely `localhost`). This is so the SSL host verification can succeed. -By default [Security](http://docs.basho.com/riak/2.0.0beta1/ops/running/authz/) is not enabled on Riak. Once `security = on` is configured in the `riak.conf` file it can be enabled with `riak-admin`. +By default [Security](http://docs.basho.com/riak/latest/ops/running/authz/) is not enabled on Riak. Once `security = on` is configured in the `riak.conf` file it can be enabled with `riak-admin`. To run the tests diff --git a/setup.py b/setup.py index 0d9ef61f..faa91886 100755 --- a/setup.py +++ b/setup.py @@ -18,11 +18,16 @@ install_requires.append('python3_protobuf >=2.4.1, <2.6.0') requires.append('python3_protobuf(>=2.4.1, <2.6.0)') - tests_require = [] if platform.python_version() < '2.7.0': tests_require.append("unittest2") +try: + import pypandoc + long_description = pypandoc.convert('README.md', 'rst') +except(IOError, ImportError): + long_description = open('README.md').read() + setup( name='riak', version=get_version(), @@ -32,6 +37,7 @@ tests_require=tests_require, package_data={'riak': ['erl_src/*']}, description='Python client for Riak', + long_description=long_description, zip_safe=True, options={'easy_install': {'allow_hosts': 'pypi.python.org'}}, include_package_data=True, From aabd6009d135de9a2f402d32d17747dda66008e2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 22 Feb 2016 10:59:06 -0800 Subject: [PATCH 120/324] Merge fixes. --- riak/benchmarks/timeseries.py | 14 ++++-- riak/tests/test_timeseries.py | 16 +++--- riak/tests/test_timeseries_ttb.py | 56 +++++++++++++++++++-- riak/transports/pbc/codec.py | 8 +-- riak/transports/pbc/connection.py | 83 +++++++++++++++++++++---------- riak/transports/pbc/transport.py | 38 ++++++++++---- riak/transports/ttb/codec.py | 70 +++++++++++++++++++++++++- riak/util.py | 10 +--- 8 files changed, 232 insertions(+), 63 deletions(-) diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py index 57ed6f5c..1ddbac83 100644 --- a/riak/benchmarks/timeseries.py +++ b/riak/benchmarks/timeseries.py @@ -1,8 +1,16 @@ -from multiprocessing import cpu_count -from riak import RiakClient -import riak.benchmark as benchmark import datetime +import logging import random +import sys + +import riak.benchmark as benchmark + +from multiprocessing import cpu_count +from riak import RiakClient + +# logger = logging.getLogger() +# logger.level = logging.DEBUG +# logger.addHandler(logging.StreamHandler(sys.stdout)) epoch = datetime.datetime.utcfromtimestamp(0) onesec = datetime.timedelta(0, 1) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index c1d44a7d..cad20ad3 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -10,7 +10,8 @@ from riak.table import Table from riak.ts_object import TsObject from riak.transports.pbc.codec import RiakPbcCodec -from riak.util import str_to_bytes, bytes_to_str, unix_time_millis +from riak.util import str_to_bytes, bytes_to_str, \ + unix_time_millis, datetime_from_unix_time_millis, \ is_timeseries_supported from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase @@ -39,14 +40,13 @@ class TimeseriesUnitTests(unittest.TestCase): @classmethod def setUpClass(cls): - self.ts0ms = unix_time_millis(ts0) - self.ts1ms = unix_time_millis(ts1) - cls.ts0ms = cls.c._unix_time_millis(ts0) + cls.c = RiakPbcCodec() + cls.ts0ms = unix_time_millis(ts0) if cls.ts0ms != ex0ms: raise AssertionError( 'expected {:d} to equal {:d}'.format(cls.ts0ms, ex0ms)) - cls.ts1ms = cls.c._unix_time_millis(ts1) + cls.ts1ms = unix_time_millis(ts1) if cls.ts1ms != ex1ms: raise AssertionError( 'expected {:d} to equal {:d}'.format(cls.ts1ms, ex1ms)) @@ -56,7 +56,7 @@ def setUpClass(cls): [bd1, 3, 4.5, ts1, False] ] cls.test_key = ['hash1', 'user2', ts0] - self.table = Table(None, table_name) + cls.table = Table(None, table_name) def validate_keyreq(self, req): self.assertEqual(self.table.name, bytes_to_str(req.table)) @@ -66,9 +66,9 @@ def validate_keyreq(self, req): self.assertEqual(self.ts0ms, req.key[2].timestamp_value) def test_encode_decode_timestamp(self): - ts0ms = self.c._unix_time_millis(ts0) + ts0ms = unix_time_millis(ts0) self.assertEqual(ts0ms, ex0ms) - ts0_d = self.c._datetime_from_unix_time_millis(ts0ms) + ts0_d = datetime_from_unix_time_millis(ts0ms) self.assertEqual(ts0, ts0_d) def test_encode_data_for_get(self): diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 46a5797a..16e2c5fb 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -10,13 +10,19 @@ from riak.table import Table from riak.ts_object import TsObject from riak.transports.ttb.codec import RiakTtbCodec -from riak.util import str_to_bytes, unix_time_millis +from riak.util import str_to_bytes, \ + unix_time_millis, datetime_from_unix_time_millis if platform.python_version() < '2.7': unittest = __import__('unittest2') else: import unittest +rpberrorresp_a = Atom('rpberrorresp') +tsgetreq_a = Atom('tsgetreq') +tsgetresp_a = Atom('tsgetresp') +tsputreq_a = Atom('tsputreq') + udef_a = Atom('undefined') tsc_a = Atom('tscell') table_name = 'GeoCheckin' @@ -47,12 +53,54 @@ def test_encode_data_for_get(self): (tsc_a, str_to_bytes('user2'), udef_a, udef_a, udef_a, udef_a), (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a) ] - req = Atom('tsgetreq'), str_to_bytes(table_name), keylist, udef_a + req = tsgetreq_a, str_to_bytes(table_name), keylist, udef_a req_test = encode(req) - req_encoded = self.c._encode_timeseries_keyreq(self.table, self.test_key) + req_encoded = self.c._encode_timeseries_keyreq_ttb(self.table, self.test_key) self.assertEqual(req_test, req_encoded) + # def test_decode_riak_error(self): + + def test_decode_data_from_get(self): + cols = [] + r0 = [ + (tsc_a, bd0, udef_a, udef_a, udef_a, udef_a), + (tsc_a, udef_a, 0, udef_a, udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, 1.2), + (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, True, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + ] + r1 = [ + (tsc_a, bd1, udef_a, udef_a, udef_a, udef_a), + (tsc_a, udef_a, 3, udef_a, udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, 4.5), + (tsc_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), + (tsc_a, udef_a, udef_a, udef_a, False, udef_a), + (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + ] + rows = [r0, r1] + # { tsgetresp, [cols], [rows] } + rsp_data = tsgetresp_a, cols, rows # NB: Python tuple notation + rsp_ttb = encode(rsp_data) + + tsobj = TsObject(None, self.table, [], []) + self.c._decode_timeseries_ttb(rsp_ttb, tsobj) + + for i in range(0, 1): + dr = rows[i] + r = tsobj.rows[i] + self.assertEqual(r[0], dr[0][1]) + self.assertEqual(r[1], dr[1][2]) + self.assertEqual(r[2], dr[2][5]) + self.assertEqual(r[3], + datetime_from_unix_time_millis(dr[3][3])) + if i == 0: + self.assertEqual(r[4], True) + else: + self.assertEqual(r[4], False) + self.assertEqual(r[5], None) + def test_encode_data_for_put(self): r0 = [ (tsc_a, bd0, udef_a, udef_a, udef_a, udef_a), @@ -71,7 +119,7 @@ def test_encode_data_for_put(self): (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) ] rows = [r0, r1] - req = Atom('tsputreq'), str_to_bytes(table_name), udef_a, rows + req = tsputreq_a, str_to_bytes(table_name), udef_a, rows req_test = encode(req) tsobj = TsObject(None, self.table, self.rows, None) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 9321356a..e44ab42b 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -633,11 +633,11 @@ def _encode_to_ts_cell(self, cell, ts_cell): elif isinstance(cell, bool): ts_cell.boolean_value = cell elif isinstance(cell, string_types): - logging.debug("cell -> str: '%s'", cell) + # logging.debug("cell -> str: '%s'", cell) ts_cell.varchar_value = str_to_bytes(cell) elif (isinstance(cell, int) or (PY2 and isinstance(cell, long))): # noqa - logging.debug("cell -> int/long: '%s'", cell) + # logging.debug("cell -> int/long: '%s'", cell) ts_cell.sint64_value = cell elif isinstance(cell, float): ts_cell.double_value = cell @@ -692,10 +692,10 @@ def _encode_timeseries_put(self, tsobj, req): def _decode_timeseries(self, resp, tsobj): """ Fills an TsObject with the appropriate data and - metadata from a TsQueryResp. + metadata from a TsGetResp / TsQueryResp. :param resp: the protobuf message from which to process data - :type resp: riak.pb.TsQueryRsp or riak.pb.riak_ts_pb2.TsGetResp + :type resp: riak.pb.riak_ts_pb2.TsQueryRsp or riak.pb.riak_ts_pb2.TsGetResp :param tsobj: a TsObject :type tsobj: TsObject """ diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 60f264c1..c268d4f4 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -1,7 +1,9 @@ +import logging import socket import struct import riak.pb.riak_pb2 import riak.pb.messages +import erlastic from riak.security import SecurityError, USE_STDLIB_SSL from riak import RiakError @@ -21,17 +23,25 @@ class RiakPbcConnection(object): Connection-related methods for RiakPbcTransport. """ - def _encode_msg(self, msg_code, msg=None): + def __init__(self): + self._ttb_enabled = False + + def _encode_msg(self, msg_code, msg=None, is_ttb=False): if msg is None: return struct.pack("!iB", 1, msg_code) - msgstr = msg.SerializeToString() - slen = len(msgstr) - hdr = struct.pack("!iB", 1 + slen, msg_code) - return hdr + msgstr - def _request(self, msg_code, msg=None, expect=None): - self._send_msg(msg_code, msg) - return self._recv_msg(expect) + if is_ttb: + data = msg + else: + data = msg.SerializeToString() + + datalen = len(data) + hdr = struct.pack("!iB", 1 + datalen, msg_code) + return hdr + data + + def _request(self, msg_code, msg=None, expect=None, is_ttb=False): + self._send_msg(msg_code, msg, is_ttb) + return self._recv_msg(expect, is_ttb) def _non_connect_request(self, msg_code, msg=None, expect=None): """ @@ -41,16 +51,18 @@ def _non_connect_request(self, msg_code, msg=None, expect=None): self._non_connect_send_msg(msg_code, msg) return self._recv_msg(expect) - def _non_connect_send_msg(self, msg_code, msg): + def _non_connect_send_msg(self, msg_code, msg, is_ttb=False): """ Similar to self._send, but doesn't try to initiate a connection, thus preventing an infinite loop. """ - self._socket.sendall(self._encode_msg(msg_code, msg)) + self._socket.sendall(self._encode_msg(msg_code, msg, is_ttb)) - def _send_msg(self, msg_code, msg): + def _send_msg(self, msg_code, msg, is_ttb=False): self._connect() - self._non_connect_send_msg(msg_code, msg) + if is_ttb and not self._enable_ttb(): + raise RiakError('could not switch to TTB encoding!') + self._non_connect_send_msg(msg_code, msg, is_ttb) def _init_security(self): """ @@ -75,6 +87,20 @@ def _starttls(self): else: return False + def _enable_ttb(self): + if self._ttb_enabled: + return True + else: + logging.debug("pbc/connection enabling TTB") + msg_code, _ = self._non_connect_request( + riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ) + if msg_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: + self._ttb_enabled = True + logging.debug("pbc/connection TTB IS ENABLED") + return True + else: + return False + def _auth(self): """ Perform an authorization request against Riak @@ -154,23 +180,24 @@ def _ssl_handshake(self): # fail if *any* exceptions are thrown during SSL handshake raise SecurityError(e) - def _recv_msg(self, expect=None): + def _recv_msg(self, expect=None, is_ttb=False): self._recv_pkt() msg_code, = struct.unpack("B", self._inbuf[:1]) if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: - err = self._parse_msg(msg_code, self._inbuf[1:]) + err = self._parse_msg(msg_code, self._inbuf[1:], is_ttb) if err is None: raise RiakError('no error provided!') else: raise RiakError(bytes_to_str(err.errmsg)) elif msg_code in riak.pb.messages.MESSAGE_CLASSES: - msg = self._parse_msg(msg_code, self._inbuf[1:]) + msg = self._parse_msg(msg_code, self._inbuf[1:], is_ttb) else: raise Exception("unknown msg code %s" % msg_code) if expect and msg_code != expect: raise RiakError("unexpected protocol buffer message code: %d, %r" % (msg_code, msg)) + logging.debug("pbc/connection received msg_code %d msg %s", msg_code, msg) return msg_code, msg def _recv_pkt(self): @@ -218,18 +245,24 @@ def close(self): self._socket.close() del self._socket - def _parse_msg(self, code, packet): - try: - pbclass = riak.pb.messages.MESSAGE_CLASSES[code] - except KeyError: - pbclass = None + def _parse_msg(self, code, packet, is_ttb=False): + if is_ttb: + if code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ + code != riak.pb.messages.MSG_CODE_TS_PUT_RESP: + raise RiakError("TTB can't parse code: %d" % code) + return erlastic.decode(packet) + else: + try: + pbclass = riak.pb.messages.MESSAGE_CLASSES[code] + except KeyError: + pbclass = None - if pbclass is None: - return None + if pbclass is None: + return None - pbo = pbclass() - pbo.ParseFromString(packet) - return pbo + pbo = pbclass() + pbo.ParseFromString(packet) + return pbo # These are set in the RiakPbcTransport initializer _address = None diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 53df2181..c3e4f749 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -1,3 +1,4 @@ +import logging import riak.pb.messages import riak.pb.riak_pb2 import riak.pb.riak_kv_pb2 @@ -15,10 +16,13 @@ RiakPbcIndexStream, RiakPbcTsKeyStream) from riak.transports.pbc.codec import RiakPbcCodec +from riak.transports.ttb.codec import RiakTtbCodec + from six import PY2, PY3 -class RiakPbcTransport(RiakTransport, RiakPbcConnection, RiakPbcCodec): +class RiakPbcTransport(RiakTransport, RiakPbcConnection, + RiakPbcCodec, RiakTtbCodec): """ The RiakPbcTransport object holds a connection to the protocol buffers interface on the riak server. @@ -28,7 +32,7 @@ def __init__(self, node=None, client=None, timeout=None, - *unused_options): + **transport_options): """ Construct a new RiakPbcTransport object. """ @@ -39,6 +43,7 @@ def __init__(self, self._address = (node.host, node.pb_port) self._timeout = timeout self._socket = None + self._use_ttb = transport_options.get('use_ttb', False) # FeatureDetection API def _server_version(self): @@ -178,24 +183,39 @@ def ts_describe(self, table): return self.ts_query(table, query) def ts_get(self, table, key): - req = riak.pb.riak_ts_pb2.TsGetReq() - self._encode_timeseries_keyreq(table, key, req) + ts_get_resp = None + if self._use_ttb: + encoded = self._encode_timeseries_keyreq_ttb(table, key) + else: + req = riak.pb.riak_ts_pb2.TsGetReq() + self._encode_timeseries_keyreq(table, key, req) msg_code, ts_get_resp = self._request( riak.pb.messages.MSG_CODE_TS_GET_REQ, req, - riak.pb.messages.MSG_CODE_TS_GET_RESP) + riak.pb.messages.MSG_CODE_TS_GET_RESP, + self._use_ttb) tsobj = TsObject(self._client, table, [], None) - self._decode_timeseries(ts_get_resp, tsobj) + if self._use_ttb: + self._decode_timeseries_ttb(ts_get_resp, tsobj) + else: + self._decode_timeseries(ts_get_resp, tsobj) return tsobj def ts_put(self, tsobj): - req = riak.pb.riak_ts_pb2.TsPutReq() - self._encode_timeseries_put(tsobj, req) + if self._use_ttb: + req = self._encode_timeseries_put_ttb(tsobj) + else: + req = riak.pb.riak_ts_pb2.TsPutReq() + self._encode_timeseries_put(tsobj, req) + + logging.debug("pbc/transport ts_put _use_ttb: '%s'", + self._use_ttb) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_TS_PUT_REQ, req, - riak.pb.messages.MSG_CODE_TS_PUT_RESP) + riak.pb.messages.MSG_CODE_TS_PUT_RESP, + self._use_ttb) if resp is not None: return True diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index 9eb5240d..127483c7 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -3,14 +3,17 @@ from erlastic import decode, encode from erlastic.types import Atom +from six import string_types, PY2 +from riak import RiakError from riak.util import str_to_bytes, bytes_to_str, \ unix_time_millis, datetime_from_unix_time_millis -from six import string_types, PY2 udef_a = Atom('undefined') +rpberrorresp_a = Atom('rpberrorresp') tsgetreq_a = Atom('tsgetreq') +tsgetresp_a = Atom('tsgetresp') tsputreq_a = Atom('tsputreq') tscell_a = Atom('tscell') @@ -51,7 +54,7 @@ def _encode_to_ts_cell(self, cell): raise RiakError("can't serialize type '{}', value '{}'" .format(t, cell)) - def _encode_timeseries_keyreq(self, table, key): + def _encode_timeseries_keyreq_ttb(self, table, key): key_vals = None if isinstance(key, list): key_vals = key @@ -85,3 +88,66 @@ def _encode_timeseries_put(self, tsobj): return encode(req) else: raise RiakError("TsObject requires a list of rows") + + def _decode_timeseries_ttb(self, resp_ttb, tsobj): + """ + Fills an TsObject with the appropriate data and + metadata from a TTB-encoded TsGetResp / TsQueryResp. + + :param resp_ttb: the protobuf message from which to process data + :type resp_ttb: TTB-encoded tsqueryrsp or tsgetresp + :param tsobj: a TsObject + :type tsobj: TsObject + """ + # if tsobj.columns is not None: + # for col in resp.columns: + # col_name = bytes_to_str(col.name) + # col_type = col.type + # col = (col_name, col_type) + # tsobj.columns.append(col) + resp = decode(resp_ttb) + resp_a = resp[0] + if resp_a == tsgetresp_a: + resp_cols = resp[1] + resp_rows = resp[2] + for row_ttb in resp_rows: + tsobj.rows.append( + self._decode_timeseries_row(row_ttb, None)) # TODO cols + # elif resp_a == rpberrorresp_a: + else: + raise RiakError("Unknown TTB response type: {}".format(resp_a)) + + def _decode_timeseries_row(self, tsrow_ttb, tscols=None): + """ + Decodes a TTB-encoded TsRow into a list + + :param tsrow: the TTB-encoded TsRow to decode. + :type tsrow: TTB encoded row + :param tscols: the TTB-encoded TsColumn data to help decode. + :type tscols: list + :rtype list + """ + row = [] + for tsc_ttb in tsrow_ttb: + if tsc_ttb == tscell_empty: + row.append(None) + else: + val = None + if tsc_ttb[0] == tscell_a: + if tsc_ttb[1] != udef_a: + row.append(bytes_to_str(tsc_ttb[1])) + elif tsc_ttb[2] != udef_a: + row.append(tsc_ttb[2]) + elif tsc_ttb[3] != udef_a: + row.append( + datetime_from_unix_time_millis(tsc_ttb[3])) + elif tsc_ttb[4] != udef_a: + row.append(tsc_ttb[4]) + elif tsc_ttb[5] != udef_a: + row.append(tsc_ttb[5]) + else: + row.append(None) + else: + raise RiakError( + "Expected tscell atom, got: {}".format(tsc_ttb)) + return row diff --git a/riak/util.py b/riak/util.py index c54e7c4c..4cbe6c0f 100644 --- a/riak/util.py +++ b/riak/util.py @@ -11,14 +11,8 @@ def unix_time_millis(dt): - try: - return int(dt.total_seconds() * 1000.0) - except AttributeError: - # NB: python 2.6 must use this method - td = dt - epoch - return int(((td.microseconds + - (td.seconds + td.days * 24 * 3600) * 10**6) / - 10**6) * 1000.0) + td = dt - epoch + return int(td.total_seconds() * 1000.0) def datetime_from_unix_time_millis(ut): From c8e7341b653b3809ab5a4aaef0564e0bc826a7b6 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 23 Feb 2016 20:56:38 -0800 Subject: [PATCH 121/324] Update riak_pb to origin/riak_ts-develop-1.2 --- riak/pb/messages.py | 10 ++ riak/pb/riak_ts_pb2.py | 338 +++++++++++++++++++++++++++++++++++++---- riak_pb | 2 +- 3 files changed, 319 insertions(+), 31 deletions(-) diff --git a/riak/pb/messages.py b/riak/pb/messages.py index 0fea1f49..9bbf284a 100644 --- a/riak/pb/messages.py +++ b/riak/pb/messages.py @@ -77,6 +77,11 @@ MSG_CODE_TS_GET_RESP = 97 MSG_CODE_TS_LIST_KEYS_REQ = 98 MSG_CODE_TS_LIST_KEYS_RESP = 99 +MSG_CODE_TS_COVERAGE_REQ = 100 +MSG_CODE_TS_COVERAGE_RESP = 101 +MSG_CODE_TS_COVERAGE_ENTRY = 102 +MSG_CODE_TS_RANGE = 103 +MSG_CODE_TS_TTB_PUT_REQ = 104 MSG_CODE_TOGGLE_ENCODING_REQ = 110 MSG_CODE_TOGGLE_ENCODING_RESP = 111 MSG_CODE_AUTH_REQ = 253 @@ -159,6 +164,11 @@ MSG_CODE_TS_GET_RESP: riak.pb.riak_ts_pb2.TsGetResp, MSG_CODE_TS_LIST_KEYS_REQ: riak.pb.riak_ts_pb2.TsListKeysReq, MSG_CODE_TS_LIST_KEYS_RESP: riak.pb.riak_ts_pb2.TsListKeysResp, + MSG_CODE_TS_COVERAGE_REQ: riak.pb.riak_ts_pb2.TsCoverageReq, + MSG_CODE_TS_COVERAGE_RESP: riak.pb.riak_ts_pb2.TsCoverageResp, + MSG_CODE_TS_COVERAGE_ENTRY: riak.pb.riak_ts_pb2.TsCoverageEntry, + MSG_CODE_TS_RANGE: riak.pb.riak_ts_pb2.TsRange, + MSG_CODE_TS_TTB_PUT_REQ: riak.pb.riak_ts_pb2.TsTtbPutReq, MSG_CODE_TOGGLE_ENCODING_REQ: riak.pb.riak_pb2.RpbToggleEncodingReq, MSG_CODE_TOGGLE_ENCODING_RESP: riak.pb.riak_pb2.RpbToggleEncodingResp, MSG_CODE_AUTH_REQ: riak.pb.riak_pb2.RpbAuthReq, diff --git a/riak/pb/riak_ts_pb2.py b/riak/pb/riak_ts_pb2.py index b371fdea..ce9b250f 100644 --- a/riak/pb/riak_ts_pb2.py +++ b/riak/pb/riak_ts_pb2.py @@ -16,7 +16,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_ts.proto', package='', - serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"D\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') + serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"Y\n\x0bTsTtbPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') _TSCOLUMNTYPE = _descriptor.EnumDescriptor( name='TsColumnType', @@ -47,8 +47,8 @@ ], containing_type=None, options=None, - serialized_start=925, - serialized_end=1004, + serialized_start=1450, + serialized_end=1529, ) TsColumnType = enum_type_wrapper.EnumTypeWrapper(_TSCOLUMNTYPE) @@ -81,6 +81,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='cover_context', full_name='TsQueryReq.cover_context', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -91,7 +98,7 @@ is_extendable=False, extension_ranges=[], serialized_start=29, - serialized_end=97, + serialized_end=120, ) @@ -132,8 +139,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=99, - serialized_end=193, + serialized_start=122, + serialized_end=216, ) @@ -174,8 +181,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=195, - serialized_end=259, + serialized_start=218, + serialized_end=282, ) @@ -209,8 +216,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=261, - serialized_end=333, + serialized_start=284, + serialized_end=356, ) @@ -251,8 +258,50 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=335, - serialized_end=421, + serialized_start=358, + serialized_end=444, +) + + +_TSTTBPUTREQ = _descriptor.Descriptor( + name='TsTtbPutReq', + full_name='TsTtbPutReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='table', full_name='TsTtbPutReq.table', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='columns', full_name='TsTtbPutReq.columns', index=1, + number=2, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='rows', full_name='TsTtbPutReq.rows', index=2, + number=3, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=446, + serialized_end=535, ) @@ -272,8 +321,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=423, - serialized_end=434, + serialized_start=537, + serialized_end=548, ) @@ -321,8 +370,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=436, - serialized_end=516, + serialized_start=550, + serialized_end=630, ) @@ -342,8 +391,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=518, - serialized_end=529, + serialized_start=632, + serialized_end=643, ) @@ -377,8 +426,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=531, - serialized_end=596, + serialized_start=645, + serialized_end=710, ) @@ -412,8 +461,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=598, - serialized_end=662, + serialized_start=712, + serialized_end=776, ) @@ -440,8 +489,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=664, - serialized_end=695, + serialized_start=778, + serialized_end=809, ) @@ -496,8 +545,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=697, - serialized_end=820, + serialized_start=811, + serialized_end=934, ) @@ -531,8 +580,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=822, - serialized_end=869, + serialized_start=936, + serialized_end=983, ) @@ -566,8 +615,197 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=871, - serialized_end=923, + serialized_start=985, + serialized_end=1037, +) + + +_TSCOVERAGEREQ = _descriptor.Descriptor( + name='TsCoverageReq', + full_name='TsCoverageReq', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='query', full_name='TsCoverageReq.query', index=0, + number=1, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='table', full_name='TsCoverageReq.table', index=1, + number=2, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='replace_cover', full_name='TsCoverageReq.replace_cover', index=2, + number=3, type=12, cpp_type=9, label=1, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='unavailable_cover', full_name='TsCoverageReq.unavailable_cover', index=3, + number=4, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1039, + serialized_end=1152, +) + + +_TSCOVERAGERESP = _descriptor.Descriptor( + name='TsCoverageResp', + full_name='TsCoverageResp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='entries', full_name='TsCoverageResp.entries', index=0, + number=1, type=11, cpp_type=10, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1154, + serialized_end=1205, +) + + +_TSCOVERAGEENTRY = _descriptor.Descriptor( + name='TsCoverageEntry', + full_name='TsCoverageEntry', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='ip', full_name='TsCoverageEntry.ip', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='port', full_name='TsCoverageEntry.port', index=1, + number=2, type=13, cpp_type=3, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='cover_context', full_name='TsCoverageEntry.cover_context', index=2, + number=3, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='range', full_name='TsCoverageEntry.range', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1207, + serialized_end=1298, +) + + +_TSRANGE = _descriptor.Descriptor( + name='TsRange', + full_name='TsRange', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='field_name', full_name='TsRange.field_name', index=0, + number=1, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lower_bound', full_name='TsRange.lower_bound', index=1, + number=2, type=18, cpp_type=2, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='lower_bound_inclusive', full_name='TsRange.lower_bound_inclusive', index=2, + number=3, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='upper_bound', full_name='TsRange.upper_bound', index=3, + number=4, type=18, cpp_type=2, label=2, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='upper_bound_inclusive', full_name='TsRange.upper_bound_inclusive', index=4, + number=5, type=8, cpp_type=7, label=2, + has_default_value=False, default_value=False, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + _descriptor.FieldDescriptor( + name='desc', full_name='TsRange.desc', index=5, + number=6, type=12, cpp_type=9, label=2, + has_default_value=False, default_value="", + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=1301, + serialized_end=1448, ) _TSQUERYREQ.fields_by_name['query'].message_type = _TSINTERPOLATION @@ -578,16 +816,22 @@ _TSGETRESP.fields_by_name['rows'].message_type = _TSROW _TSPUTREQ.fields_by_name['columns'].message_type = _TSCOLUMNDESCRIPTION _TSPUTREQ.fields_by_name['rows'].message_type = _TSROW +_TSTTBPUTREQ.fields_by_name['columns'].message_type = _TSCOLUMNDESCRIPTION +_TSTTBPUTREQ.fields_by_name['rows'].message_type = _TSROW _TSDELREQ.fields_by_name['key'].message_type = _TSCELL _TSINTERPOLATION.fields_by_name['interpolations'].message_type = riak.pb.riak_pb2._RPBPAIR _TSCOLUMNDESCRIPTION.fields_by_name['type'].enum_type = _TSCOLUMNTYPE _TSROW.fields_by_name['cells'].message_type = _TSCELL _TSLISTKEYSRESP.fields_by_name['keys'].message_type = _TSROW +_TSCOVERAGEREQ.fields_by_name['query'].message_type = _TSINTERPOLATION +_TSCOVERAGERESP.fields_by_name['entries'].message_type = _TSCOVERAGEENTRY +_TSCOVERAGEENTRY.fields_by_name['range'].message_type = _TSRANGE DESCRIPTOR.message_types_by_name['TsQueryReq'] = _TSQUERYREQ DESCRIPTOR.message_types_by_name['TsQueryResp'] = _TSQUERYRESP DESCRIPTOR.message_types_by_name['TsGetReq'] = _TSGETREQ DESCRIPTOR.message_types_by_name['TsGetResp'] = _TSGETRESP DESCRIPTOR.message_types_by_name['TsPutReq'] = _TSPUTREQ +DESCRIPTOR.message_types_by_name['TsTtbPutReq'] = _TSTTBPUTREQ DESCRIPTOR.message_types_by_name['TsPutResp'] = _TSPUTRESP DESCRIPTOR.message_types_by_name['TsDelReq'] = _TSDELREQ DESCRIPTOR.message_types_by_name['TsDelResp'] = _TSDELRESP @@ -597,6 +841,10 @@ DESCRIPTOR.message_types_by_name['TsCell'] = _TSCELL DESCRIPTOR.message_types_by_name['TsListKeysReq'] = _TSLISTKEYSREQ DESCRIPTOR.message_types_by_name['TsListKeysResp'] = _TSLISTKEYSRESP +DESCRIPTOR.message_types_by_name['TsCoverageReq'] = _TSCOVERAGEREQ +DESCRIPTOR.message_types_by_name['TsCoverageResp'] = _TSCOVERAGERESP +DESCRIPTOR.message_types_by_name['TsCoverageEntry'] = _TSCOVERAGEENTRY +DESCRIPTOR.message_types_by_name['TsRange'] = _TSRANGE @add_metaclass(_reflection.GeneratedProtocolMessageType) class TsQueryReq(_message.Message): @@ -628,6 +876,12 @@ class TsPutReq(_message.Message): # @@protoc_insertion_point(class_scope:TsPutReq) +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsTtbPutReq(_message.Message): + DESCRIPTOR = _TSTTBPUTREQ + + # @@protoc_insertion_point(class_scope:TsTtbPutReq) + @add_metaclass(_reflection.GeneratedProtocolMessageType) class TsPutResp(_message.Message): DESCRIPTOR = _TSPUTRESP @@ -682,6 +936,30 @@ class TsListKeysResp(_message.Message): # @@protoc_insertion_point(class_scope:TsListKeysResp) +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCoverageReq(_message.Message): + DESCRIPTOR = _TSCOVERAGEREQ + + # @@protoc_insertion_point(class_scope:TsCoverageReq) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCoverageResp(_message.Message): + DESCRIPTOR = _TSCOVERAGERESP + + # @@protoc_insertion_point(class_scope:TsCoverageResp) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCoverageEntry(_message.Message): + DESCRIPTOR = _TSCOVERAGEENTRY + + # @@protoc_insertion_point(class_scope:TsCoverageEntry) + +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsRange(_message.Message): + DESCRIPTOR = _TSRANGE + + # @@protoc_insertion_point(class_scope:TsRange) + DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakTsPB') diff --git a/riak_pb b/riak_pb index 7fffa81b..e0986adb 160000 --- a/riak_pb +++ b/riak_pb @@ -1 +1 @@ -Subproject commit 7fffa81b38804c18fffbec8d1677966c37d49d55 +Subproject commit e0986adb3b0b79765b91c04bfee5b16cfedb1165 From 714e7f4185dfaf789dfd2c695bf493f70abeb4b7 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 24 Feb 2016 10:44:25 -0800 Subject: [PATCH 122/324] TTB fix --- riak/benchmarks/timeseries.py | 2 +- riak/transports/ttb/codec.py | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py index 1ddbac83..c222189c 100644 --- a/riak/benchmarks/timeseries.py +++ b/riak/benchmarks/timeseries.py @@ -50,7 +50,7 @@ {'host': h, 'pb_port': 10037}, {'host': h, 'pb_port': 10047} ] -client = RiakClient(nodes=n, protocol='pbc') +client = RiakClient(nodes=n, protocol='pbc', transport_options={'use_ttb': True}) table = client.table(tbl) with benchmark.measure() as b: diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index 127483c7..ac669863 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -27,7 +27,7 @@ class RiakTtbCodec(object): def __init__(self, **unused_args): super(RiakTtbCodec, self).__init__(**unused_args) - def _encode_to_ts_cell(self, cell): + def _encode_to_ts_cell_ttb(self, cell): if cell is None: return tscell_empty else: @@ -61,10 +61,10 @@ def _encode_timeseries_keyreq_ttb(self, table, key): else: raise ValueError("key must be a list") req = tsgetreq_a, str_to_bytes(table.name), \ - [self._encode_to_ts_cell(k) for k in key_vals], udef_a + [self._encode_to_ts_cell_ttb(k) for k in key_vals], udef_a return encode(req) - def _encode_timeseries_put(self, tsobj): + def _encode_timeseries_put_ttb(self, tsobj): ''' Returns an Erlang-TTB encoded tuple with the appropriate data and metadata from a TsObject. @@ -81,7 +81,7 @@ def _encode_timeseries_put(self, tsobj): for row in tsobj.rows: req_r = [] for cell in row: - req_r.append(self._encode_to_ts_cell(cell)) + req_r.append(self._encode_to_ts_cell_ttb(cell)) req_rows.append(req_r) req = tsputreq_a, str_to_bytes(tsobj.table.name), \ udef_a, req_rows From 4cddfe5304bd1dbf35c48a768c1905a7d08171ce Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 23 Feb 2016 21:44:40 -0800 Subject: [PATCH 123/324] Fix toggle encoding request --- riak/transports/pbc/connection.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index c268d4f4..a1718e82 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -92,8 +92,12 @@ def _enable_ttb(self): return True else: logging.debug("pbc/connection enabling TTB") + req = riak.pb.riak_pb2.RpbToggleEncodingReq() + req.use_native = True msg_code, _ = self._non_connect_request( - riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ) + riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ, + req, + riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP) if msg_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: self._ttb_enabled = True logging.debug("pbc/connection TTB IS ENABLED") From 246ad6ba91c3c33a0953f544d9f5abddc13471b6 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 24 Feb 2016 11:43:44 -0800 Subject: [PATCH 124/324] Add ttb integration test. --- riak/tests/test_timeseries_ttb.py | 43 +++++++++++++++++++++++++++++-- riak/transports/pbc/connection.py | 5 +++- riak/transports/pbc/transport.py | 3 +++ 3 files changed, 48 insertions(+), 3 deletions(-) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 16e2c5fb..8e010bec 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -7,11 +7,15 @@ from erlastic import decode, encode from erlastic.types import Atom +from riak.client import RiakClient from riak.table import Table from riak.ts_object import TsObject from riak.transports.ttb.codec import RiakTtbCodec from riak.util import str_to_bytes, \ - unix_time_millis, datetime_from_unix_time_millis + unix_time_millis, datetime_from_unix_time_millis, \ + is_timeseries_supported +from riak.tests import RUN_TIMESERIES +from riak.tests.base import IntegrationTestBase if platform.python_version() < '2.7': unittest = __import__('unittest2') @@ -35,6 +39,7 @@ ts1 = ts0 + fiveMins +@unittest.skipUnless(is_timeseries_supported(), "Timeseries not supported") class TimeseriesTtbUnitTests(unittest.TestCase): def setUp(self): self.c = RiakTtbCodec() @@ -123,5 +128,39 @@ def test_encode_data_for_put(self): req_test = encode(req) tsobj = TsObject(None, self.table, self.rows, None) - req_encoded = self.c._encode_timeseries_put(tsobj) + req_encoded = self.c._encode_timeseries_put_ttb(tsobj) self.assertEqual(req_test, req_encoded) + + +@unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, + 'Timeseries not supported or RUN_TIMESERIES is 0') +class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): + @classmethod + def setUpClass(cls): + super(TimeseriesTtbTests, cls).setUpClass() + + def test_store_data_ttb(self): + now = datetime.datetime.utcfromtimestamp(144379690.987000) + fiveMinsAgo = now - fiveMins + tenMinsAgo = fiveMinsAgo - fiveMins + fifteenMinsAgo = tenMinsAgo - fiveMins + twentyMinsAgo = fifteenMinsAgo - fiveMins + twentyFiveMinsAgo = twentyMinsAgo - fiveMins + + client = RiakClient(protocol='pbc', + host='riak-test', + pb_port=10017, + transport_options={'use_ttb': True}) + + table = client.table(table_name) + rows = [ + ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], + ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], + ['hash1', 'user2', fifteenMinsAgo, 'rain', 79.0], + ['hash1', 'user2', fiveMinsAgo, 'wind', None], + ['hash1', 'user2', now, 'snow', 20.1] + ] + ts_obj = table.new(rows) + result = ts_obj.store() + self.assertTrue(result) + client.close() diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index a1718e82..eec9de23 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -254,7 +254,10 @@ def _parse_msg(self, code, packet, is_ttb=False): if code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ code != riak.pb.messages.MSG_CODE_TS_PUT_RESP: raise RiakError("TTB can't parse code: %d" % code) - return erlastic.decode(packet) + if len(packet) > 0: + return erlastic.decode(packet) + else: + return None else: try: pbclass = riak.pb.messages.MESSAGE_CLASSES[code] diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index c3e4f749..595aa7c3 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -217,6 +217,9 @@ def ts_put(self, tsobj): riak.pb.messages.MSG_CODE_TS_PUT_RESP, self._use_ttb) + if self._use_ttb and resp is None: + return True + if resp is not None: return True else: From c89872d2e90c5428c2d1025547bf1ad50cdd9e1c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 24 Feb 2016 12:44:35 -0800 Subject: [PATCH 125/324] TTB is ALIVE --- riak/benchmark.py | 9 +++++++-- riak/tests/test_timeseries_ttb.py | 2 +- riak/transports/pbc/connection.py | 2 +- riak/transports/pbc/transport.py | 6 +++--- riak/transports/ttb/codec.py | 33 +++++++++++++------------------ 5 files changed, 26 insertions(+), 26 deletions(-) diff --git a/riak/benchmark.py b/riak/benchmark.py index c26a0a49..cfb220c1 100644 --- a/riak/benchmark.py +++ b/riak/benchmark.py @@ -1,6 +1,9 @@ from __future__ import print_function + import os import gc +import sys +import traceback __all__ = ['measure', 'measure_with_rehearsal'] @@ -154,5 +157,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): elif exc_type is KeyboardInterrupt: return False else: - print("EXCEPTION! %r" % ((exc_type, exc_val, exc_tb),)) - return True + msg = "EXCEPTION! type: %r val: %r" % (exc_type, exc_val) + print(msg, file=sys.stderr) + traceback.print_tb(exc_tb) + return True if exc_type is None else False diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 8e010bec..6fffd161 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -90,7 +90,7 @@ def test_decode_data_from_get(self): rsp_ttb = encode(rsp_data) tsobj = TsObject(None, self.table, [], []) - self.c._decode_timeseries_ttb(rsp_ttb, tsobj) + self.c._decode_timeseries_ttb(decode(rsp_ttb), tsobj) for i in range(0, 1): dr = rows[i] diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index eec9de23..f13c75a4 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -201,7 +201,7 @@ def _recv_msg(self, expect=None, is_ttb=False): if expect and msg_code != expect: raise RiakError("unexpected protocol buffer message code: %d, %r" % (msg_code, msg)) - logging.debug("pbc/connection received msg_code %d msg %s", msg_code, msg) + # logging.debug("pbc/connection received msg_code %d msg %s", msg_code, msg) return msg_code, msg def _recv_pkt(self): diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 595aa7c3..837508e8 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -185,7 +185,7 @@ def ts_describe(self, table): def ts_get(self, table, key): ts_get_resp = None if self._use_ttb: - encoded = self._encode_timeseries_keyreq_ttb(table, key) + req = self._encode_timeseries_keyreq_ttb(table, key) else: req = riak.pb.riak_ts_pb2.TsGetReq() self._encode_timeseries_keyreq(table, key, req) @@ -209,8 +209,8 @@ def ts_put(self, tsobj): req = riak.pb.riak_ts_pb2.TsPutReq() self._encode_timeseries_put(tsobj, req) - logging.debug("pbc/transport ts_put _use_ttb: '%s'", - self._use_ttb) + # logging.debug("pbc/transport ts_put _use_ttb: '%s'", + # self._use_ttb) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_TS_PUT_REQ, req, diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index ac669863..e5afa5f2 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -1,5 +1,4 @@ import datetime -import logging from erlastic import decode, encode from erlastic.types import Atom @@ -15,6 +14,7 @@ tsgetreq_a = Atom('tsgetreq') tsgetresp_a = Atom('tsgetresp') tsputreq_a = Atom('tsputreq') +tsrow_a = Atom('tsrow') tscell_a = Atom('tscell') tscell_empty = (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) @@ -33,21 +33,16 @@ def _encode_to_ts_cell_ttb(self, cell): else: if isinstance(cell, datetime.datetime): ts = unix_time_millis(cell) - logging.debug("cell -> timestamp: '%s'", ts) return (tscell_a, udef_a, udef_a, ts, udef_a, udef_a) elif isinstance(cell, bool): - logging.debug("cell -> bool: '%s'", cell) return (tscell_a, udef_a, udef_a, udef_a, cell, udef_a) elif isinstance(cell, string_types): - logging.debug("cell -> str: '%s'", cell) return (tscell_a, str_to_bytes(cell), udef_a, udef_a, udef_a, udef_a) elif (isinstance(cell, int) or (PY2 and isinstance(cell, long))): # noqa - logging.debug("cell -> int/long: '%s'", cell) return (tscell_a, udef_a, cell, udef_a, udef_a, udef_a) elif isinstance(cell, float): - logging.debug("cell -> float: '%s'", cell) return (tscell_a, udef_a, udef_a, udef_a, udef_a, cell) else: t = type(cell) @@ -94,7 +89,7 @@ def _decode_timeseries_ttb(self, resp_ttb, tsobj): Fills an TsObject with the appropriate data and metadata from a TTB-encoded TsGetResp / TsQueryResp. - :param resp_ttb: the protobuf message from which to process data + :param resp_ttb: the decoded TTB data :type resp_ttb: TTB-encoded tsqueryrsp or tsgetresp :param tsobj: a TsObject :type tsobj: TsObject @@ -105,19 +100,18 @@ def _decode_timeseries_ttb(self, resp_ttb, tsobj): # col_type = col.type # col = (col_name, col_type) # tsobj.columns.append(col) - resp = decode(resp_ttb) - resp_a = resp[0] + resp_a = resp_ttb[0] if resp_a == tsgetresp_a: - resp_cols = resp[1] - resp_rows = resp[2] + resp_cols = resp_ttb[1] + resp_rows = resp_ttb[2] for row_ttb in resp_rows: tsobj.rows.append( - self._decode_timeseries_row(row_ttb, None)) # TODO cols + self._decode_timeseries_row_ttb(row_ttb, None)) # TODO cols # elif resp_a == rpberrorresp_a: else: raise RiakError("Unknown TTB response type: {}".format(resp_a)) - def _decode_timeseries_row(self, tsrow_ttb, tscols=None): + def _decode_timeseries_row_ttb(self, tsrow_ttb, tscols=None): """ Decodes a TTB-encoded TsRow into a list @@ -127,11 +121,9 @@ def _decode_timeseries_row(self, tsrow_ttb, tscols=None): :type tscols: list :rtype list """ - row = [] - for tsc_ttb in tsrow_ttb: - if tsc_ttb == tscell_empty: - row.append(None) - else: + if tsrow_ttb[0] == tsrow_a: + row = [] + for tsc_ttb in tsrow_ttb[1]: val = None if tsc_ttb[0] == tscell_a: if tsc_ttb[1] != udef_a: @@ -149,5 +141,8 @@ def _decode_timeseries_row(self, tsrow_ttb, tscols=None): row.append(None) else: raise RiakError( - "Expected tscell atom, got: {}".format(tsc_ttb)) + "Expected tscell atom, got: {}".format(tsc_ttb[0])) + else: + raise RiakError( + "Expected tsrow atom, got: {}".format(tsrow_ttb[0])) return row From ac1afcea2a4bff1ff043b6b798a30982a13ab438 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 24 Feb 2016 17:18:07 -0800 Subject: [PATCH 126/324] TTB encode rows correctly --- riak/benchmarks/timeseries.py | 4 ++-- riak/tests/test_timeseries_ttb.py | 29 +++++++++++++++++++---------- riak/transports/pbc/transport.py | 3 ++- riak/transports/ttb/codec.py | 3 ++- 4 files changed, 25 insertions(+), 14 deletions(-) diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py index c222189c..12734719 100644 --- a/riak/benchmarks/timeseries.py +++ b/riak/benchmarks/timeseries.py @@ -50,7 +50,7 @@ {'host': h, 'pb_port': 10037}, {'host': h, 'pb_port': 10047} ] -client = RiakClient(nodes=n, protocol='pbc', transport_options={'use_ttb': True}) +client = RiakClient(nodes=n, protocol='pbc', transport_options={'use_ttb': False}) table = client.table(tbl) with benchmark.measure() as b: @@ -69,7 +69,7 @@ if ts_obj is None: raise AssertionError("expected obj") if len(ts_obj.rows) != 1: - raise AssertionError("expected one row") + raise AssertionError("expected one row, got: %d" % len(tsobj.rows)) row = ts_obj.rows[0] if len(row) != 5: raise AssertionError("expected row to have five items") diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 6fffd161..23d2d787 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -28,6 +28,7 @@ tsputreq_a = Atom('tsputreq') udef_a = Atom('undefined') +tsr_a = Atom('tsrow') tsc_a = Atom('tscell') table_name = 'GeoCheckin' @@ -68,22 +69,22 @@ def test_encode_data_for_get(self): def test_decode_data_from_get(self): cols = [] - r0 = [ + r0 = (tsr_a, [ (tsc_a, bd0, udef_a, udef_a, udef_a, udef_a), (tsc_a, udef_a, 0, udef_a, udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, 1.2), (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, True, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) - ] - r1 = [ + ]) + r1 = (tsr_a, [ (tsc_a, bd1, udef_a, udef_a, udef_a, udef_a), (tsc_a, udef_a, 3, udef_a, udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, 4.5), (tsc_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, False, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) - ] + ]) rows = [r0, r1] # { tsgetresp, [cols], [rows] } rsp_data = tsgetresp_a, cols, rows # NB: Python tuple notation @@ -93,7 +94,7 @@ def test_decode_data_from_get(self): self.c._decode_timeseries_ttb(decode(rsp_ttb), tsobj) for i in range(0, 1): - dr = rows[i] + dr = rows[i][1] r = tsobj.rows[i] self.assertEqual(r[0], dr[0][1]) self.assertEqual(r[1], dr[1][2]) @@ -107,22 +108,22 @@ def test_decode_data_from_get(self): self.assertEqual(r[5], None) def test_encode_data_for_put(self): - r0 = [ + r0 = (tsr_a, [ (tsc_a, bd0, udef_a, udef_a, udef_a, udef_a), (tsc_a, udef_a, 0, udef_a, udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, 1.2), (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, True, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) - ] - r1 = [ + ]) + r1 = (tsr_a, [ (tsc_a, bd1, udef_a, udef_a, udef_a, udef_a), (tsc_a, udef_a, 3, udef_a, udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, 4.5), (tsc_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), (tsc_a, udef_a, udef_a, udef_a, False, udef_a), (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) - ] + ]) rows = [r0, r1] req = tsputreq_a, str_to_bytes(table_name), udef_a, rows req_test = encode(req) @@ -139,7 +140,7 @@ class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() - def test_store_data_ttb(self): + def test_store_and_fetch_ttb(self): now = datetime.datetime.utcfromtimestamp(144379690.987000) fiveMinsAgo = now - fiveMins tenMinsAgo = fiveMinsAgo - fiveMins @@ -163,4 +164,12 @@ def test_store_data_ttb(self): ts_obj = table.new(rows) result = ts_obj.store() self.assertTrue(result) + + for r in rows: + k = r[0:3] + ts_obj = client.ts_get(table_name, k) + self.assertIsNotNone(ts_obj) + self.assertEqual(len(ts_obj.rows), 1) + self.assertEqual(len(ts_obj.rows[0]), 5) + client.close() diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 837508e8..2e822adf 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -217,7 +217,8 @@ def ts_put(self, tsobj): riak.pb.messages.MSG_CODE_TS_PUT_RESP, self._use_ttb) - if self._use_ttb and resp is None: + if self._use_ttb and resp is None and \ + msg_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: return True if resp is not None: diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index e5afa5f2..fb7a14ef 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -77,7 +77,8 @@ def _encode_timeseries_put_ttb(self, tsobj): req_r = [] for cell in row: req_r.append(self._encode_to_ts_cell_ttb(cell)) - req_rows.append(req_r) + req_t = (tsrow_a, req_r) + req_rows.append(req_t) req = tsputreq_a, str_to_bytes(tsobj.table.name), \ udef_a, req_rows return encode(req) From 2db41577d31a4f9bb9f28b17674d41723014cbc0 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 25 Feb 2016 12:47:58 -0800 Subject: [PATCH 127/324] Add args to benchmark program. --- riak/benchmarks/timeseries.py | 57 +++++++++++++++++------------------ 1 file changed, 27 insertions(+), 30 deletions(-) diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py index 12734719..4bd8a676 100644 --- a/riak/benchmarks/timeseries.py +++ b/riak/benchmarks/timeseries.py @@ -12,22 +12,26 @@ # logger.level = logging.DEBUG # logger.addHandler(logging.StreamHandler(sys.stdout)) -epoch = datetime.datetime.utcfromtimestamp(0) -onesec = datetime.timedelta(0, 1) +# batch sizes 8, 16, 32, 64, 128, 256 +if len(sys.argv) != 3: + raise AssertionError('first arg is batch size, second arg is true / false for use_ttb') rowcount = 32768 -batchsz = 32 +batchsz = int(sys.argv[1]) if rowcount % batchsz != 0: raise AssertionError('rowcount must be divisible by batchsz') +use_ttb = sys.argv[2].lower() == 'true' + +epoch = datetime.datetime.utcfromtimestamp(0) +onesec = datetime.timedelta(0, 1) weather = ['typhoon', 'hurricane', 'rain', 'wind', 'snow'] rows = [] -keys = [] for i in range(rowcount): ts = datetime.datetime(2016, 1, 1, 12, 0, 0) + \ datetime.timedelta(seconds=i) - family_idx = i % 4 - series_idx = i % 4 + family_idx = i % batchsz + series_idx = i % batchsz family = 'hash{:d}'.format(family_idx) series = 'user{:d}'.format(series_idx) w = weather[i % len(weather)] @@ -35,11 +39,12 @@ row = [family, series, ts, w, temp] key = [family, series, ts] rows.append(row) - keys.append(key) print("Benchmarking timeseries:") -print(" CPUs: {0}".format(cpu_count())) -print(" Rows: {0}".format(len(rows))) +print(" Use TTB: {}".format(use_ttb)) +print("Batch Size: {}".format(batchsz)) +print(" CPUs: {}".format(cpu_count())) +print(" Rows: {}".format(len(rows))) print() tbl = 'GeoCheckin' @@ -48,28 +53,20 @@ {'host': h, 'pb_port': 10017}, {'host': h, 'pb_port': 10027}, {'host': h, 'pb_port': 10037}, - {'host': h, 'pb_port': 10047} + {'host': h, 'pb_port': 10047}, + {'host': h, 'pb_port': 10057} ] -client = RiakClient(nodes=n, protocol='pbc', transport_options={'use_ttb': False}) +client = RiakClient(nodes=n, protocol='pbc', transport_options={'use_ttb': use_ttb}) table = client.table(tbl) with benchmark.measure() as b: - with b.report('populate'): - for i in range(0, rowcount, batchsz): - x = i - y = i + batchsz - r = rows[x:y] - ts_obj = table.new(r) - result = ts_obj.store() - if result is not True: - raise AssertionError("expected success") - with b.report('get'): - for k in keys: - ts_obj = client.ts_get(tbl, k) - if ts_obj is None: - raise AssertionError("expected obj") - if len(ts_obj.rows) != 1: - raise AssertionError("expected one row, got: %d" % len(tsobj.rows)) - row = ts_obj.rows[0] - if len(row) != 5: - raise AssertionError("expected row to have five items") + for i in (1, 2, 3): + with b.report('populate-%d' % i): + for i in range(0, rowcount, batchsz): + x = i + y = i + batchsz + r = rows[x:y] + ts_obj = table.new(r) + result = ts_obj.store() + if result is not True: + raise AssertionError("expected success") From 324e9f277a90ea0ec987667d19313920c0ff17fb Mon Sep 17 00:00:00 2001 From: Vitaly Shestovskiy Date: Wed, 2 Mar 2016 13:55:58 +0200 Subject: [PATCH 128/324] Fix for riak per bucket replication settings encoding in protocol buffers. Codec tried to convert to python notation instead of protobuffers --- riak/transports/pbc/codec.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index 97629752..1727bd6b 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -295,7 +295,7 @@ def _encode_bucket_props(self, props, msg): else: setattr(msg.props, prop, value) if 'repl' in props: - msg.props.repl = REPL_TO_PY[props['repl']] + msg.props.repl = REPL_TO_PB[props['repl']] return msg From 518e621d4d7513c830359c4eaaa395a14577d69d Mon Sep 17 00:00:00 2001 From: Patrick Ellul Date: Fri, 11 Mar 2016 11:27:12 +1100 Subject: [PATCH 129/324] In pbc connection, consider a timeout as a BadResource because the fallout is not dealt with. https://github.com/basho/riak-python-client/issues/425 --- riak/transports/pbc/connection.py | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 0bc58232..1b2e7720 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -29,6 +29,7 @@ MSG_CODE_AUTH_RESP ) from riak.util import bytes_to_str, str_to_bytes +from riak.transports.pool import BadResource from six import PY2 if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection @@ -171,7 +172,15 @@ def _ssl_handshake(self): raise SecurityError(e) def _recv_msg(self, expect=None): - self._recv_pkt() + try: + self._recv_pkt() + except socket.timeout, e: + # A timeout can leave the socket in an inconsistent state because + # it might still receive the data later and mix up with a + # subsequent request. + # https://github.com/basho/riak-python-client/issues/425 + raise BadResource(e) + msg_code, = struct.unpack("B", self._inbuf[:1]) if msg_code is MSG_CODE_ERROR_RESP: err = self._parse_msg(msg_code, self._inbuf[1:]) From 4469de4be2ae53c755793fe85fdbb4339bf9437c Mon Sep 17 00:00:00 2001 From: Patrick Ellul Date: Fri, 11 Mar 2016 14:02:27 +1100 Subject: [PATCH 130/324] avoiding merge conflict in import --- riak/transports/pbc/connection.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index 1b2e7720..d20c096d 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -29,7 +29,7 @@ MSG_CODE_AUTH_RESP ) from riak.util import bytes_to_str, str_to_bytes -from riak.transports.pool import BadResource + from six import PY2 if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection @@ -38,6 +38,8 @@ import ssl from riak.transports.security import configure_ssl_context +from riak.transports.pool import BadResource + class RiakPbcConnection(object): """ From c340dd5b8acc31901d0886d689aa2ab451cfa019 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 21 Mar 2016 15:13:27 -0700 Subject: [PATCH 131/324] bump submodules --- riak_pb | 2 +- tools | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/riak_pb b/riak_pb index e0986adb..341269c1 160000 --- a/riak_pb +++ b/riak_pb @@ -1 +1 @@ -Subproject commit e0986adb3b0b79765b91c04bfee5b16cfedb1165 +Subproject commit 341269c19c75fa0557d5aa5fd5ac1f0dfe18cfae diff --git a/tools b/tools index 5ff5850e..4dae68dd 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 5ff5850e1d7164f4f64f45a31d9b257e01a19e58 +Subproject commit 4dae68ddca2d405090d64a97c7e99b4607263892 From fd157fbbf4cf501ed64f6ec5a8cac7c5705817bf Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 28 Mar 2016 14:41:11 -0700 Subject: [PATCH 132/324] Python 3 + TTB --- riak/tests/suite.py | 7 +- riak/tests/test_2i.py | 7 +- riak/tests/test_btypes.py | 8 +- riak/tests/test_client.py | 8 +- riak/tests/test_comparison.py | 8 +- riak/tests/test_datatypes.py | 10 +-- riak/tests/test_feature_detection.py | 8 +- riak/tests/test_filters.py | 8 +- riak/tests/test_kv.py | 11 +-- riak/tests/test_mapreduce.py | 7 +- riak/tests/test_pool.py | 8 +- riak/tests/test_search.py | 9 +- riak/tests/test_security.py | 7 +- riak/tests/test_timeseries.py | 7 +- riak/tests/test_timeseries_ttb.py | 126 ++++++++++++++++----------- riak/tests/test_util.py | 7 +- riak/tests/test_yokozuna.py | 8 +- riak/transports/ttb/codec.py | 21 +++-- setup.py | 11 +-- 19 files changed, 120 insertions(+), 166 deletions(-) diff --git a/riak/tests/suite.py b/riak/tests/suite.py index 97f3532c..e317213a 100644 --- a/riak/tests/suite.py +++ b/riak/tests/suite.py @@ -1,10 +1,5 @@ import os.path -import platform - -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest +import unittest def additional_tests(): diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index d7b254e3..f6e6d54a 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -1,13 +1,10 @@ # -*- coding: utf-8 -*- -import platform +import unittest + from riak import RiakError from riak.tests import RUN_INDEXES from riak.tests.base import IntegrationTestBase -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest class TwoITests(IntegrationTestBase, unittest.TestCase): diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index 30c9d6ac..d0fe728b 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -1,15 +1,11 @@ -import platform +import unittest + from riak import RiakError, RiakObject from riak.bucket import RiakBucket, BucketType from riak.tests import RUN_BTYPES from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - @unittest.skipUnless(RUN_BTYPES, "RUN_BTYPES is 0") class BucketTypeTests(IntegrationTestBase, unittest.TestCase, Comparison): diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index 46700a61..ffcd6ea0 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -1,4 +1,5 @@ -import platform +import unittest + from six import PY2 from threading import Thread from riak.riak_object import RiakObject @@ -10,11 +11,6 @@ else: from queue import Queue -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - class ClientTests(IntegrationTestBase, unittest.TestCase): def test_uses_client_id_if_given(self): diff --git a/riak/tests/test_comparison.py b/riak/tests/test_comparison.py index 446bc031..86fb9f8b 100644 --- a/riak/tests/test_comparison.py +++ b/riak/tests/test_comparison.py @@ -1,14 +1,10 @@ # -*- coding: utf-8 -*- -import platform +import unittest + from riak.riak_object import RiakObject from riak.bucket import RiakBucket, BucketType from riak.tests.base import IntegrationTestBase -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - class BucketTypeRichComparisonTest(unittest.TestCase): def test_btype_eq(self): diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 39166069..3e945920 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -1,16 +1,12 @@ # -*- coding: utf-8 -*- -import platform -from riak import RiakBucket, BucketType, RiakObject +import unittest import riak.datatypes as datatypes + +from riak import RiakBucket, BucketType, RiakObject from riak.tests import RUN_DATATYPES from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - class DatatypeUnitTestBase(object): dtype = None diff --git a/riak/tests/test_feature_detection.py b/riak/tests/test_feature_detection.py index d88334aa..894c8b17 100644 --- a/riak/tests/test_feature_detection.py +++ b/riak/tests/test_feature_detection.py @@ -1,11 +1,7 @@ # -*- coding: utf-8 -*- -import platform -from riak.transports.feature_detect import FeatureDetection +import unittest -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest +from riak.transports.feature_detect import FeatureDetection class IncompleteTransport(FeatureDetection): diff --git a/riak/tests/test_filters.py b/riak/tests/test_filters.py index c821ce95..e41eea6c 100644 --- a/riak/tests/test_filters.py +++ b/riak/tests/test_filters.py @@ -1,13 +1,9 @@ # -*- coding: utf-8 -*- -import platform +import unittest + from riak.mapreduce import RiakKeyFilter from riak import key_filter -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - class FilterTests(unittest.TestCase): def test_simple(self): diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index bfa2b888..abcf4e95 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -1,9 +1,9 @@ # -*- coding: utf-8 -*- +import copy import os -import platform -from six import string_types, PY2, PY3 +import unittest -import copy +from six import string_types, PY2, PY3 from time import sleep from riak import ConflictError, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver @@ -16,11 +16,6 @@ except ImportError: import json -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - if PY2: import cPickle test_pickle_dumps = cPickle.dumps diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index a1827398..b22a70ba 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- from __future__ import print_function -import platform +import unittest from six import PY2 from riak.mapreduce import RiakMapReduce @@ -12,11 +12,6 @@ from riak.tests import RUN_SECURITY from riak.tests.yz_setup import yzSetUp, yzTearDown -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - testrun_yz_mr = {'btype': 'mr', 'bucket': 'mrbucket', diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index f1088244..a5f8ffd5 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- +import unittest + from six import PY2 -import platform from threading import Thread, currentThread from riak.transports.pool import Pool, BadResource from random import SystemRandom @@ -8,11 +9,6 @@ from riak.tests import RUN_POOL from riak.tests.comparison import Comparison -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - if PY2: from Queue import Queue else: diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index 7cc369b6..17e2ea6a 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -1,14 +1,11 @@ # -*- coding: utf-8 -*- from __future__ import print_function -import platform + +import unittest + from riak.tests import RUN_SEARCH, RUN_YZ from riak.tests.base import IntegrationTestBase -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - testrun_search_bucket = 'searchbucket' diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index 056c1b48..8a3db8f7 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -import platform import sys +import unittest from riak.tests import RUN_SECURITY, SECURITY_USER, SECURITY_PASSWD, \ SECURITY_CACERT, SECURITY_KEY, SECURITY_CERT, SECURITY_REVOKED, \ @@ -8,11 +8,6 @@ from riak.security import SecurityCreds from riak.tests.base import IntegrationTestBase -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - class SecurityTests(IntegrationTestBase, unittest.TestCase): @unittest.skipIf(RUN_SECURITY, 'RUN_SECURITY is 1') diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index cad20ad3..bace0543 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -1,8 +1,8 @@ # -*- coding: utf-8 -*- import datetime -import platform import random import string +import unittest import riak.pb.riak_ts_pb2 @@ -17,11 +17,6 @@ from riak.tests.base import IntegrationTestBase from riak.pb.riak_ts_pb2 import TsColumnType -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - table_name = 'GeoCheckin' bd0 = '时间序列' diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 23d2d787..70ae6d88 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -1,8 +1,9 @@ # -*- coding: utf-8 -*- import datetime -import platform import random +import six import string +import unittest from erlastic import decode, encode from erlastic.types import Atom @@ -17,23 +18,26 @@ from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - rpberrorresp_a = Atom('rpberrorresp') tsgetreq_a = Atom('tsgetreq') tsgetresp_a = Atom('tsgetresp') tsputreq_a = Atom('tsputreq') udef_a = Atom('undefined') -tsr_a = Atom('tsrow') -tsc_a = Atom('tscell') +tsrow_a = Atom('tsrow') +tscell_a = Atom('tscell') table_name = 'GeoCheckin' -bd0 = '时间序列' -bd1 = 'временные ряды' +str0 = 'ascii-0' +str1 = 'ascii-1' + +if six.PY2: + # https://docs.python.org/2/library/functions.html#unicode + bd0 = unicode('时间序列', 'utf-8') + bd1 = unicode('временные ряды', 'utf-8') +else: + bd0 = u'时间序列' + bd1 = u'временные ряды' fiveMins = datetime.timedelta(0, 300) ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) @@ -44,46 +48,42 @@ class TimeseriesTtbUnitTests(unittest.TestCase): def setUp(self): self.c = RiakTtbCodec() - self.ts0ms = unix_time_millis(ts0) - self.ts1ms = unix_time_millis(ts1) - self.rows = [ - [bd0, 0, 1.2, ts0, True, None], - [bd1, 3, 4.5, ts1, False, None] - ] - self.test_key = ['hash1', 'user2', ts0] self.table = Table(None, table_name) def test_encode_data_for_get(self): keylist = [ - (tsc_a, str_to_bytes('hash1'), udef_a, udef_a, udef_a, udef_a), - (tsc_a, str_to_bytes('user2'), udef_a, udef_a, udef_a, udef_a), - (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a) + (tscell_a, str_to_bytes('hash1'), udef_a, udef_a, udef_a, udef_a), + (tscell_a, str_to_bytes('user2'), udef_a, udef_a, udef_a, udef_a), + (tscell_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a) ] req = tsgetreq_a, str_to_bytes(table_name), keylist, udef_a req_test = encode(req) - req_encoded = self.c._encode_timeseries_keyreq_ttb(self.table, self.test_key) + test_key = ['hash1', 'user2', ts0] + req_encoded = self.c._encode_timeseries_keyreq_ttb(self.table, test_key) self.assertEqual(req_test, req_encoded) # def test_decode_riak_error(self): def test_decode_data_from_get(self): cols = [] - r0 = (tsr_a, [ - (tsc_a, bd0, udef_a, udef_a, udef_a, udef_a), - (tsc_a, udef_a, 0, udef_a, udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, 1.2), - (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, True, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + r0 = (tsrow_a, [ + (tscell_a, bd0, udef_a, udef_a, udef_a, udef_a), + (tscell_a, udef_a, 0, udef_a, udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, 1.2), + (tscell_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, True, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a), + (tscell_a, str1, udef_a, udef_a, udef_a, udef_a) ]) - r1 = (tsr_a, [ - (tsc_a, bd1, udef_a, udef_a, udef_a, udef_a), - (tsc_a, udef_a, 3, udef_a, udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, 4.5), - (tsc_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, False, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + r1 = (tsrow_a, [ + (tscell_a, bd1, udef_a, udef_a, udef_a, udef_a), + (tscell_a, udef_a, 3, udef_a, udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, 4.5), + (tscell_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, False, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a), + (tscell_a, str1, udef_a, udef_a, udef_a, udef_a) ]) rows = [r0, r1] # { tsgetresp, [cols], [rows] } @@ -94,41 +94,63 @@ def test_decode_data_from_get(self): self.c._decode_timeseries_ttb(decode(rsp_ttb), tsobj) for i in range(0, 1): + self.assertEqual(tsrow_a, rows[i][0]) dr = rows[i][1] - r = tsobj.rows[i] - self.assertEqual(r[0], dr[0][1]) + r = tsobj.rows[i] # encoded + + # cells + self.assertEqual(tscell_a, dr[0][0]) + self.assertEqual(r[0], dr[0][1].encode('utf-8')) + + self.assertEqual(tscell_a, dr[1][0]) self.assertEqual(r[1], dr[1][2]) + + self.assertEqual(tscell_a, dr[2][0]) self.assertEqual(r[2], dr[2][5]) + + self.assertEqual(tscell_a, dr[3][0]) self.assertEqual(r[3], datetime_from_unix_time_millis(dr[3][3])) + + self.assertEqual(tscell_a, dr[4][0]) if i == 0: self.assertEqual(r[4], True) else: self.assertEqual(r[4], False) + + self.assertEqual(tscell_a, dr[5][0]) self.assertEqual(r[5], None) + self.assertEqual(tscell_a, dr[6][0]) + self.assertEqual(r[6], dr[6][1].encode('ascii')) + def test_encode_data_for_put(self): - r0 = (tsr_a, [ - (tsc_a, bd0, udef_a, udef_a, udef_a, udef_a), - (tsc_a, udef_a, 0, udef_a, udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, 1.2), - (tsc_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, True, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + r0 = (tsrow_a, [ + (tscell_a, bd0, udef_a, udef_a, udef_a, udef_a), + (tscell_a, udef_a, 0, udef_a, udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, 1.2), + (tscell_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, True, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) ]) - r1 = (tsr_a, [ - (tsc_a, bd1, udef_a, udef_a, udef_a, udef_a), - (tsc_a, udef_a, 3, udef_a, udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, 4.5), - (tsc_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), - (tsc_a, udef_a, udef_a, udef_a, False, udef_a), - (tsc_a, udef_a, udef_a, udef_a, udef_a, udef_a) + r1 = (tsrow_a, [ + (tscell_a, bd1, udef_a, udef_a, udef_a, udef_a), + (tscell_a, udef_a, 3, udef_a, udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, 4.5), + (tscell_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), + (tscell_a, udef_a, udef_a, udef_a, False, udef_a), + (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) ]) rows = [r0, r1] req = tsputreq_a, str_to_bytes(table_name), udef_a, rows req_test = encode(req) - tsobj = TsObject(None, self.table, self.rows, None) + rows_to_encode = [ + [bd0, 0, 1.2, ts0, True, None], + [bd1, 3, 4.5, ts1, False, None] + ] + + tsobj = TsObject(None, self.table, rows_to_encode, None) req_encoded = self.c._encode_timeseries_put_ttb(tsobj) self.assertEqual(req_test, req_encoded) diff --git a/riak/tests/test_util.py b/riak/tests/test_util.py index 3cc69e95..af704516 100644 --- a/riak/tests/test_util.py +++ b/riak/tests/test_util.py @@ -1,12 +1,7 @@ -import platform +import unittest from riak.util import is_timeseries_supported -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - class UtilUnitTests(unittest.TestCase): def test_is_timeseries_supported(self): diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index 52f9af88..a4f325f1 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -1,15 +1,11 @@ # -*- coding: utf-8 -*- -import platform +import unittest + from riak.tests import RUN_YZ from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison from riak.tests.yz_setup import yzSetUp, yzTearDown -if platform.python_version() < '2.7': - unittest = __import__('unittest2') -else: - import unittest - def wait_for_yz_index(bucket, key, index=None): """ diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index fb7a14ef..93995902 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -2,11 +2,12 @@ from erlastic import decode, encode from erlastic.types import Atom -from six import string_types, PY2 +from six import text_type, binary_type, \ + string_types, PY2 from riak import RiakError -from riak.util import str_to_bytes, bytes_to_str, \ - unix_time_millis, datetime_from_unix_time_millis +from riak.util import unix_time_millis, \ + datetime_from_unix_time_millis udef_a = Atom('undefined') @@ -36,8 +37,10 @@ def _encode_to_ts_cell_ttb(self, cell): return (tscell_a, udef_a, udef_a, ts, udef_a, udef_a) elif isinstance(cell, bool): return (tscell_a, udef_a, udef_a, udef_a, cell, udef_a) - elif isinstance(cell, string_types): - return (tscell_a, str_to_bytes(cell), + elif isinstance(cell, text_type) or \ + isinstance(cell, binary_type) or \ + isinstance(cell, string_types): + return (tscell_a, cell, udef_a, udef_a, udef_a, udef_a) elif (isinstance(cell, int) or (PY2 and isinstance(cell, long))): # noqa @@ -55,7 +58,7 @@ def _encode_timeseries_keyreq_ttb(self, table, key): key_vals = key else: raise ValueError("key must be a list") - req = tsgetreq_a, str_to_bytes(table.name), \ + req = tsgetreq_a, table.name, \ [self._encode_to_ts_cell_ttb(k) for k in key_vals], udef_a return encode(req) @@ -79,7 +82,7 @@ def _encode_timeseries_put_ttb(self, tsobj): req_r.append(self._encode_to_ts_cell_ttb(cell)) req_t = (tsrow_a, req_r) req_rows.append(req_t) - req = tsputreq_a, str_to_bytes(tsobj.table.name), \ + req = tsputreq_a, tsobj.table.name, \ udef_a, req_rows return encode(req) else: @@ -95,6 +98,7 @@ def _decode_timeseries_ttb(self, resp_ttb, tsobj): :param tsobj: a TsObject :type tsobj: TsObject """ + # TODO TODO RTS-842 CLIENTS-814 GH-445 # if tsobj.columns is not None: # for col in resp.columns: # col_name = bytes_to_str(col.name) @@ -125,10 +129,9 @@ def _decode_timeseries_row_ttb(self, tsrow_ttb, tscols=None): if tsrow_ttb[0] == tsrow_a: row = [] for tsc_ttb in tsrow_ttb[1]: - val = None if tsc_ttb[0] == tscell_a: if tsc_ttb[1] != udef_a: - row.append(bytes_to_str(tsc_ttb[1])) + row.append(tsc_ttb[1]) elif tsc_ttb[2] != udef_a: row.append(tsc_ttb[2]) elif tsc_ttb[3] != udef_a: diff --git a/setup.py b/setup.py index 9fd8f156..e7109118 100755 --- a/setup.py +++ b/setup.py @@ -1,17 +1,19 @@ #!/usr/bin/env python import platform +import six + from setuptools import setup, find_packages from version import get_version from commands import setup_timeseries, build_messages -install_requires = ['six >= 1.8.0', 'erlastic >= 2.0.0'] +install_requires = ['six >= 1.8.0', 'erlastic >= 2.1.0'] requires = ['six(>=1.8.0)', 'erlastic(>= 2.0.0)'] if platform.python_version() < '2.7.9': install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") -if platform.python_version() < '3.0': +if six.PY2: install_requires.append('protobuf >=2.4.1, <2.7.0') requires.append('protobuf(>=2.4.1, <2.7.0)') else: @@ -19,17 +21,12 @@ requires.append('python3_protobuf(>=2.4.1, <2.6.0)') -tests_require = [] -if platform.python_version() < '2.7.0': - tests_require.append("unittest2") - setup( name='riak', version=get_version(), packages=find_packages(), requires=requires, install_requires=install_requires, - tests_require=tests_require, package_data={'riak': ['erl_src/*']}, description='Python client for Riak', zip_safe=True, From f9176969894eaa872a5f5377809e82f9b5fe2a03 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 28 Mar 2016 14:51:32 -0700 Subject: [PATCH 133/324] Fix setup version check, TTB for Python 2 --- riak/transports/ttb/__init__.py | 0 setup.py | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 riak/transports/ttb/__init__.py diff --git a/riak/transports/ttb/__init__.py b/riak/transports/ttb/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/setup.py b/setup.py index e7109118..f20bf524 100755 --- a/setup.py +++ b/setup.py @@ -9,7 +9,8 @@ install_requires = ['six >= 1.8.0', 'erlastic >= 2.1.0'] requires = ['six(>=1.8.0)', 'erlastic(>= 2.0.0)'] -if platform.python_version() < '2.7.9': + +if platform.python_version_tuple() <= (2, 7, 9): install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") From 77f09de7fa9da117f48f41ebd74f56c2c8a46027 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 28 Mar 2016 15:21:03 -0700 Subject: [PATCH 134/324] make lint happy --- riak/benchmarks/timeseries.py | 10 ++++++---- riak/tests/test_2i.py | 1 - riak/tests/test_timeseries_ttb.py | 20 ++++++++++---------- riak/transports/pbc/codec.py | 4 ++-- riak/transports/pbc/connection.py | 3 ++- riak/transports/pbc/transport.py | 8 ++++---- riak/transports/ttb/codec.py | 17 ++++++++++------- 7 files changed, 34 insertions(+), 29 deletions(-) diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py index 4bd8a676..84960962 100644 --- a/riak/benchmarks/timeseries.py +++ b/riak/benchmarks/timeseries.py @@ -1,5 +1,4 @@ import datetime -import logging import random import sys @@ -14,13 +13,15 @@ # batch sizes 8, 16, 32, 64, 128, 256 if len(sys.argv) != 3: - raise AssertionError('first arg is batch size, second arg is true / false for use_ttb') + raise AssertionError( + 'first arg is batch size, second arg is true / false' + 'for use_ttb') rowcount = 32768 batchsz = int(sys.argv[1]) if rowcount % batchsz != 0: raise AssertionError('rowcount must be divisible by batchsz') -use_ttb = sys.argv[2].lower() == 'true' +use_ttb = sys.argv[2].lower() == 'true' epoch = datetime.datetime.utcfromtimestamp(0) onesec = datetime.timedelta(0, 1) @@ -56,7 +57,8 @@ {'host': h, 'pb_port': 10047}, {'host': h, 'pb_port': 10057} ] -client = RiakClient(nodes=n, protocol='pbc', transport_options={'use_ttb': use_ttb}) +client = RiakClient(nodes=n, protocol='pbc', + transport_options={'use_ttb': use_ttb}) table = client.table(tbl) with benchmark.measure() as b: diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index f6e6d54a..6db10602 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -6,7 +6,6 @@ from riak.tests.base import IntegrationTestBase - class TwoITests(IntegrationTestBase, unittest.TestCase): def is_2i_supported(self): # Immediate test to see if 2i is even supported w/ the backend diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 70ae6d88..47c5c39a 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -1,8 +1,6 @@ # -*- coding: utf-8 -*- import datetime -import random import six -import string import unittest from erlastic import decode, encode @@ -60,7 +58,8 @@ def test_encode_data_for_get(self): req_test = encode(req) test_key = ['hash1', 'user2', ts0] - req_encoded = self.c._encode_timeseries_keyreq_ttb(self.table, test_key) + req_encoded = self.c._encode_timeseries_keyreq_ttb( + self.table, test_key) self.assertEqual(req_test, req_encoded) # def test_decode_riak_error(self): @@ -87,7 +86,7 @@ def test_decode_data_from_get(self): ]) rows = [r0, r1] # { tsgetresp, [cols], [rows] } - rsp_data = tsgetresp_a, cols, rows # NB: Python tuple notation + rsp_data = tsgetresp_a, cols, rows # NB: Python tuple notation rsp_ttb = encode(rsp_data) tsobj = TsObject(None, self.table, [], []) @@ -96,7 +95,7 @@ def test_decode_data_from_get(self): for i in range(0, 1): self.assertEqual(tsrow_a, rows[i][0]) dr = rows[i][1] - r = tsobj.rows[i] # encoded + r = tsobj.rows[i] # encoded # cells self.assertEqual(tscell_a, dr[0][0]) @@ -109,8 +108,8 @@ def test_decode_data_from_get(self): self.assertEqual(r[2], dr[2][5]) self.assertEqual(tscell_a, dr[3][0]) - self.assertEqual(r[3], - datetime_from_unix_time_millis(dr[3][3])) + dt = datetime_from_unix_time_millis(dr[3][3]) + self.assertEqual(r[3], dt) self.assertEqual(tscell_a, dr[4][0]) if i == 0: @@ -170,10 +169,11 @@ def test_store_and_fetch_ttb(self): twentyMinsAgo = fifteenMinsAgo - fiveMins twentyFiveMinsAgo = twentyMinsAgo - fiveMins + opts = {'use_ttb': True} client = RiakClient(protocol='pbc', - host='riak-test', - pb_port=10017, - transport_options={'use_ttb': True}) + host='riak-test', + pb_port=10017, + transport_options=opts) table = client.table(table_name) rows = [ diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py index e44ab42b..47932512 100644 --- a/riak/transports/pbc/codec.py +++ b/riak/transports/pbc/codec.py @@ -1,5 +1,4 @@ import datetime -import logging import riak.pb import riak.pb.riak_pb2 import riak.pb.riak_dt_pb2 @@ -695,7 +694,8 @@ def _decode_timeseries(self, resp, tsobj): metadata from a TsGetResp / TsQueryResp. :param resp: the protobuf message from which to process data - :type resp: riak.pb.riak_ts_pb2.TsQueryRsp or riak.pb.riak_ts_pb2.TsGetResp + :type resp: riak.pb.riak_ts_pb2.TsQueryRsp or + riak.pb.riak_ts_pb2.TsGetResp :param tsobj: a TsObject :type tsobj: TsObject """ diff --git a/riak/transports/pbc/connection.py b/riak/transports/pbc/connection.py index f13c75a4..dfaee06b 100644 --- a/riak/transports/pbc/connection.py +++ b/riak/transports/pbc/connection.py @@ -201,7 +201,8 @@ def _recv_msg(self, expect=None, is_ttb=False): if expect and msg_code != expect: raise RiakError("unexpected protocol buffer message code: %d, %r" % (msg_code, msg)) - # logging.debug("pbc/connection received msg_code %d msg %s", msg_code, msg) + # logging.debug("pbc/connection received msg_code %d msg %s", + # msg_code, msg) return msg_code, msg def _recv_pkt(self): diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py index 2e822adf..c7c4c387 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/pbc/transport.py @@ -1,4 +1,3 @@ -import logging import riak.pb.messages import riak.pb.riak_pb2 import riak.pb.riak_kv_pb2 @@ -22,7 +21,7 @@ class RiakPbcTransport(RiakTransport, RiakPbcConnection, - RiakPbcCodec, RiakTtbCodec): + RiakPbcCodec, RiakTtbCodec): """ The RiakPbcTransport object holds a connection to the protocol buffers interface on the riak server. @@ -217,8 +216,9 @@ def ts_put(self, tsobj): riak.pb.messages.MSG_CODE_TS_PUT_RESP, self._use_ttb) - if self._use_ttb and resp is None and \ - msg_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: + if self._use_ttb and \ + resp is None and \ + msg_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: return True if resp is not None: diff --git a/riak/transports/ttb/codec.py b/riak/transports/ttb/codec.py index 93995902..e0e7afdd 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/transports/ttb/codec.py @@ -1,6 +1,6 @@ import datetime -from erlastic import decode, encode +from erlastic import encode from erlastic.types import Atom from six import text_type, binary_type, \ string_types, PY2 @@ -20,6 +20,7 @@ tscell_empty = (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) + class RiakTtbCodec(object): ''' Erlang term-to-binary Encoding and decoding methods for RiakTtbTransport @@ -38,8 +39,8 @@ def _encode_to_ts_cell_ttb(self, cell): elif isinstance(cell, bool): return (tscell_a, udef_a, udef_a, udef_a, cell, udef_a) elif isinstance(cell, text_type) or \ - isinstance(cell, binary_type) or \ - isinstance(cell, string_types): + isinstance(cell, binary_type) or \ + isinstance(cell, string_types): return (tscell_a, cell, udef_a, udef_a, udef_a, udef_a) elif (isinstance(cell, int) or @@ -82,8 +83,7 @@ def _encode_timeseries_put_ttb(self, tsobj): req_r.append(self._encode_to_ts_cell_ttb(cell)) req_t = (tsrow_a, req_r) req_rows.append(req_t) - req = tsputreq_a, tsobj.table.name, \ - udef_a, req_rows + req = tsputreq_a, tsobj.table.name, udef_a, req_rows return encode(req) else: raise RiakError("TsObject requires a list of rows") @@ -99,6 +99,8 @@ def _decode_timeseries_ttb(self, resp_ttb, tsobj): :type tsobj: TsObject """ # TODO TODO RTS-842 CLIENTS-814 GH-445 + # TODO COLUMNS + # TODO TODO RTS-842 CLIENTS-814 GH-445 # if tsobj.columns is not None: # for col in resp.columns: # col_name = bytes_to_str(col.name) @@ -107,11 +109,12 @@ def _decode_timeseries_ttb(self, resp_ttb, tsobj): # tsobj.columns.append(col) resp_a = resp_ttb[0] if resp_a == tsgetresp_a: - resp_cols = resp_ttb[1] + # TODO resp_cols = resp_ttb[1] resp_rows = resp_ttb[2] for row_ttb in resp_rows: tsobj.rows.append( - self._decode_timeseries_row_ttb(row_ttb, None)) # TODO cols + self._decode_timeseries_row_ttb(row_ttb, None)) + # TODO # elif resp_a == rpberrorresp_a: else: raise RiakError("Unknown TTB response type: {}".format(resp_a)) From 80aac21258af45ecb5405254f55a419d6ff6922a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 28 Mar 2016 15:52:13 -0700 Subject: [PATCH 135/324] Add basho-erlastic as a dependency --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index f20bf524..bdd0ff1d 100755 --- a/setup.py +++ b/setup.py @@ -7,8 +7,8 @@ from version import get_version from commands import setup_timeseries, build_messages -install_requires = ['six >= 1.8.0', 'erlastic >= 2.1.0'] -requires = ['six(>=1.8.0)', 'erlastic(>= 2.0.0)'] +install_requires = ['six >= 1.8.0', 'basho_erlastic >= 2.1.0'] +requires = ['six(>=1.8.0)', 'basho_erlastic(>= 2.1.0)'] if platform.python_version_tuple() <= (2, 7, 9): install_requires.append("pyOpenSSL >= 0.14") From 5cf97fa97179dba046febe9ddbad453e132cea1a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 28 Mar 2016 16:10:31 -0700 Subject: [PATCH 136/324] Use Python version comparison that uses ints instead of strings --- setup.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index bdd0ff1d..a408c1b7 100755 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ #!/usr/bin/env python -import platform import six +import sys from setuptools import setup, find_packages from version import get_version @@ -10,7 +10,7 @@ install_requires = ['six >= 1.8.0', 'basho_erlastic >= 2.1.0'] requires = ['six(>=1.8.0)', 'basho_erlastic(>= 2.1.0)'] -if platform.python_version_tuple() <= (2, 7, 9): +if sys.version_info[0:3] <= (2, 7, 9): install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") From 432513040c293b2154ac5197967516ecfc65dbad Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 29 Mar 2016 10:03:00 -0700 Subject: [PATCH 137/324] Improve README generation --- .gitignore | 3 +++ MANIFEST.in | 1 + setup.py | 6 +++++- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 81fd6c28..68a831bf 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +README.rst + *.pyc .python-version __pycache__/ @@ -11,6 +13,7 @@ docs/_build .*.swp .coverage +riak-*/ py-build/ dist/ diff --git a/MANIFEST.in b/MANIFEST.in index d9f9a3fd..ddf59c00 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,7 @@ include docs/* include riak/erl_src/* include README.md +include README.rst include LICENSE include RELNOTES.md include version.py diff --git a/setup.py b/setup.py index 65dd9995..6338efd8 100755 --- a/setup.py +++ b/setup.py @@ -1,5 +1,6 @@ #!/usr/bin/env python +import codecs import six import sys @@ -24,8 +25,11 @@ try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') + with codecs.open('README.rst', 'w', 'utf-8') as f: + f.write(long_description) except(IOError, ImportError): - long_description = open('README.md').read() + with open('README.md') as f: + long_description = f.read() setup( name='riak', From 85158938c832372e63d367773b43170a940645d0 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 29 Mar 2016 10:49:13 -0700 Subject: [PATCH 138/324] Move transports/pbc dir to transports/tcp --- docs/advanced.rst | 4 ++-- riak/client/__init__.py | 2 +- riak/client/transport.py | 19 +---------------- riak/tests/test_timeseries.py | 2 +- riak/transports/{pbc => tcp}/__init__.py | 24 ++-------------------- riak/transports/{pbc => tcp}/codec.py | 0 riak/transports/{pbc => tcp}/connection.py | 0 riak/transports/{pbc => tcp}/stream.py | 2 +- riak/transports/{pbc => tcp}/transport.py | 6 +++--- 9 files changed, 11 insertions(+), 48 deletions(-) rename riak/transports/{pbc => tcp}/__init__.py (63%) rename riak/transports/{pbc => tcp}/codec.py (100%) rename riak/transports/{pbc => tcp}/connection.py (100%) rename riak/transports/{pbc => tcp}/stream.py (98%) rename riak/transports/{pbc => tcp}/transport.py (99%) diff --git a/docs/advanced.rst b/docs/advanced.rst index 523b465d..b9e209c9 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -132,10 +132,10 @@ HTTP Transport :members: ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Protocol Buffers Transport +TCP Transport ^^^^^^^^^^^^^^^^^^^^^^^^^^ -.. currentmodule:: riak.transports.pbc +.. currentmodule:: riak.transports.tcp .. autoclass:: RiakPbcTransport :members: diff --git a/riak/client/__init__.py b/riak/client/__init__.py index d623f970..548275da 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -12,7 +12,7 @@ from riak.resolver import default_resolver from riak.table import Table from riak.transports.http import RiakHttpPool -from riak.transports.pbc import RiakPbcPool +from riak.transports.tcp import RiakPbcPool from riak.security import SecurityCreds from riak.util import lazy_property, bytes_to_str, str_to_bytes from six import string_types, PY2 diff --git a/riak/client/transport.py b/riak/client/transport.py index 027951d6..8c3eb92e 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -1,23 +1,6 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" from contextlib import contextmanager from riak.transports.pool import BadResource -from riak.transports.pbc import is_retryable as is_pbc_retryable +from riak.transports.tcp import is_retryable as is_pbc_retryable from riak.transports.http import is_retryable as is_http_retryable import threading from six import PY2 diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index bace0543..d2f76a9a 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -9,7 +9,7 @@ from riak import RiakError from riak.table import Table from riak.ts_object import TsObject -from riak.transports.pbc.codec import RiakPbcCodec +from riak.transports.tcp.codec import RiakPbcCodec from riak.util import str_to_bytes, bytes_to_str, \ unix_time_millis, datetime_from_unix_time_millis, \ is_timeseries_supported diff --git a/riak/transports/pbc/__init__.py b/riak/transports/tcp/__init__.py similarity index 63% rename from riak/transports/pbc/__init__.py rename to riak/transports/tcp/__init__.py index fc8914b6..8ccdaa10 100644 --- a/riak/transports/pbc/__init__.py +++ b/riak/transports/tcp/__init__.py @@ -1,28 +1,8 @@ -""" -Copyright 2012 Basho Technologies, Inc. -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import errno import socket + from riak.transports.pool import Pool -from riak.transports.pbc.transport import RiakPbcTransport +from riak.transports.tcp.transport import RiakPbcTransport class RiakPbcPool(Pool): diff --git a/riak/transports/pbc/codec.py b/riak/transports/tcp/codec.py similarity index 100% rename from riak/transports/pbc/codec.py rename to riak/transports/tcp/codec.py diff --git a/riak/transports/pbc/connection.py b/riak/transports/tcp/connection.py similarity index 100% rename from riak/transports/pbc/connection.py rename to riak/transports/tcp/connection.py diff --git a/riak/transports/pbc/stream.py b/riak/transports/tcp/stream.py similarity index 98% rename from riak/transports/pbc/stream.py rename to riak/transports/tcp/stream.py index ed649279..38357bbd 100644 --- a/riak/transports/pbc/stream.py +++ b/riak/transports/tcp/stream.py @@ -3,7 +3,7 @@ from riak.util import decode_index_value, bytes_to_str from riak.client.index_page import CONTINUATION -from riak.transports.pbc.codec import RiakPbcCodec +from riak.transports.tcp.codec import RiakPbcCodec from six import PY2 diff --git a/riak/transports/pbc/transport.py b/riak/transports/tcp/transport.py similarity index 99% rename from riak/transports/pbc/transport.py rename to riak/transports/tcp/transport.py index c7c4c387..01eddfa8 100644 --- a/riak/transports/pbc/transport.py +++ b/riak/transports/tcp/transport.py @@ -8,13 +8,13 @@ from riak.riak_object import VClock from riak.ts_object import TsObject from riak.util import decode_index_value, str_to_bytes, bytes_to_str -from riak.transports.pbc.connection import RiakPbcConnection -from riak.transports.pbc.stream import (RiakPbcKeyStream, +from riak.transports.tcp.connection import RiakPbcConnection +from riak.transports.tcp.stream import (RiakPbcKeyStream, RiakPbcMapredStream, RiakPbcBucketStream, RiakPbcIndexStream, RiakPbcTsKeyStream) -from riak.transports.pbc.codec import RiakPbcCodec +from riak.transports.tcp.codec import RiakPbcCodec from riak.transports.ttb.codec import RiakTtbCodec from six import PY2, PY3 From cb1cffd40b4ac27cd1a0bd43feaa08d6860c35b4 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 29 Mar 2016 11:27:32 -0700 Subject: [PATCH 139/324] Test suite fixes for Windows --- riak/tests/test_kv.py | 3 +++ riak/tests/test_server_test.py | 5 ++++- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index abcf4e95..5513c603 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -1,6 +1,7 @@ # -*- coding: utf-8 -*- import copy import os +import sys import unittest from six import string_types, PY2, PY3 @@ -690,7 +691,9 @@ def test_store_binary_object_from_file(self): obj.store() obj = bucket.get(self.key_name) self.assertNotEqual(obj.encoded_data, None) + is_win32 = sys.platform == 'win32' self.assertTrue(obj.content_type == 'text/x-python' or + (is_win32 and obj.content_type == 'text/plain') or obj.content_type == 'application/x-python-code') def test_store_binary_object_from_file_should_use_default_mimetype(self): diff --git a/riak/tests/test_server_test.py b/riak/tests/test_server_test.py index d02debe6..45dcbd55 100644 --- a/riak/tests/test_server_test.py +++ b/riak/tests/test_server_test.py @@ -1,7 +1,10 @@ -from riak.test_server import TestServer +import sys import unittest +from riak.test_server import TestServer + +@unittest.skipIf(sys.platform == 'win32', 'Windows is not supported') class TestServerTestCase(unittest.TestCase): def setUp(self): self.test_server = TestServer() From ac43d1be947d554dd9590f55014d8408b396711e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 29 Mar 2016 13:04:24 -0700 Subject: [PATCH 140/324] Moving code around to separate codecs from transports. Make lint happy. --- docs/advanced.rst | 16 ++++-- riak/client/__init__.py | 14 ++--- riak/client/transport.py | 6 +- riak/{transports/ttb => codecs}/__init__.py | 0 .../http/codec.py => codecs/http.py} | 25 +------- .../tcp/codec.py => codecs/pbuf.py} | 8 +-- .../ttb/codec.py => codecs/ttb.py} | 6 +- riak/node.py | 18 +----- riak/tests/test_client.py | 4 +- riak/tests/test_timeseries.py | 12 ++-- riak/tests/test_timeseries_ttb.py | 13 ++--- riak/transports/feature_detect.py | 2 +- riak/transports/http/__init__.py | 35 ++++-------- riak/transports/http/connection.py | 26 ++------- riak/transports/http/resources.py | 26 ++------- riak/transports/http/stream.py | 41 ++++--------- riak/transports/http/transport.py | 57 +++++++------------ riak/transports/pool.py | 20 +------ riak/transports/tcp/__init__.py | 18 +++--- riak/transports/tcp/connection.py | 12 ++-- riak/transports/tcp/stream.py | 38 ++++++------- riak/transports/tcp/transport.py | 41 +++++++------ riak/transports/transport.py | 22 +------ 23 files changed, 152 insertions(+), 308 deletions(-) rename riak/{transports/ttb => codecs}/__init__.py (100%) rename riak/{transports/http/codec.py => codecs/http.py} (93%) rename riak/{transports/tcp/codec.py => codecs/pbuf.py} (99%) rename riak/{transports/ttb/codec.py => codecs/ttb.py} (97%) diff --git a/docs/advanced.rst b/docs/advanced.rst index b9e209c9..45475dd8 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -93,7 +93,7 @@ Transports .. currentmodule:: riak.transports.transport -.. autoclass:: RiakTransport +.. autoclass:: Transport :members: :private-members: @@ -124,20 +124,24 @@ HTTP Transport .. currentmodule:: riak.transports.http -.. autoclass:: RiakHttpPool +.. autoclass:: HttpPool .. autofunction:: is_retryable -.. autoclass:: RiakHttpTransport +.. autoclass:: HttpTransport :members: -^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^ TCP Transport -^^^^^^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^ .. currentmodule:: riak.transports.tcp -.. autoclass:: RiakPbcTransport +.. autoclass:: TcpPool + +.. autofunction:: is_retryable + +.. autoclass:: TcpTransport :members: --------- diff --git a/riak/client/__init__.py b/riak/client/__init__.py index 548275da..3a3caad5 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -11,8 +11,8 @@ from riak.mapreduce import RiakMapReduceChain from riak.resolver import default_resolver from riak.table import Table -from riak.transports.http import RiakHttpPool -from riak.transports.tcp import RiakPbcPool +from riak.transports.http import HttpPool +from riak.transports.tcp import TcpPool from riak.security import SecurityCreds from riak.util import lazy_property, bytes_to_str, str_to_bytes from six import string_types, PY2 @@ -99,8 +99,8 @@ def __init__(self, protocol='pbc', transport_options={}, nodes=None, self.protocol = protocol or 'pbc' self._resolver = None self._credentials = self._create_credentials(credentials) - self._http_pool = RiakHttpPool(self, **transport_options) - self._pb_pool = RiakPbcPool(self, **transport_options) + self._http_pool = HttpPool(self, **transport_options) + self._tcp_pool = TcpPool(self, **transport_options) if PY2: self._encoders = {'application/json': default_encoder, @@ -167,7 +167,7 @@ def _get_client_id(self): def _set_client_id(self, client_id): for http in self._http_pool: http.client_id = client_id - for pb in self._pb_pool: + for pb in self._tcp_pool: pb.client_id = client_id client_id = property(_get_client_id, _set_client_id, @@ -298,8 +298,8 @@ def close(self): """ if self._http_pool is not None: self._http_pool.clear() - if self._pb_pool is not None: - self._pb_pool.clear() + if self._tcp_pool is not None: + self._tcp_pool.clear() def _create_node(self, n): if isinstance(n, RiakNode): diff --git a/riak/client/transport.py b/riak/client/transport.py index 8c3eb92e..6aca7f24 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -32,7 +32,7 @@ class RiakClientTransport(object): # These will be set or redefined by the RiakClient initializer protocol = 'pbc' _http_pool = None - _pb_pool = None + _tcp_pool = None _locals = _client_locals() def _get_retry_count(self): @@ -146,8 +146,8 @@ def _choose_pool(self, protocol=None): protocol = self.protocol if protocol == 'http': pool = self._http_pool - elif protocol == 'pbc': - pool = self._pb_pool + elif protocol == 'tcp' or protocol == 'pbc': + pool = self._tcp_pool else: raise ValueError("invalid protocol %s" % protocol) return pool diff --git a/riak/transports/ttb/__init__.py b/riak/codecs/__init__.py similarity index 100% rename from riak/transports/ttb/__init__.py rename to riak/codecs/__init__.py diff --git a/riak/transports/http/codec.py b/riak/codecs/http.py similarity index 93% rename from riak/transports/http/codec.py rename to riak/codecs/http.py index 9f040220..1078dd43 100644 --- a/riak/transports/http/codec.py +++ b/riak/codecs/http.py @@ -1,26 +1,6 @@ -""" -Copyright 2012 Basho Technologies, Inc. -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import re import csv + from six import PY2, PY3 from cgi import parse_header from email import message_from_string @@ -32,6 +12,7 @@ from riak.multidict import MultiDict from riak.transports.http.search import XMLSearchResult from riak.util import decode_index_value, bytes_to_str + if PY2: from urllib import unquote_plus else: @@ -42,7 +23,7 @@ MAX_LINK_HEADER_SIZE = 8192 - 8 -class RiakHttpCodec(object): +class HttpCodec(object): """ Methods for HTTP transport that marshals and unmarshals HTTP messages. diff --git a/riak/transports/tcp/codec.py b/riak/codecs/pbuf.py similarity index 99% rename from riak/transports/tcp/codec.py rename to riak/codecs/pbuf.py index 47932512..2d5231d8 100644 --- a/riak/transports/tcp/codec.py +++ b/riak/codecs/pbuf.py @@ -71,15 +71,15 @@ def _invert(d): } -class RiakPbcCodec(object): +class PbufCodec(object): """ - Protobuffs Encoding and decoding methods for RiakPbcTransport. + Protobuffs Encoding and decoding methods for TcpTransport. """ def __init__(self, **unused_args): if riak.pb is None: - raise NotImplementedError("this transport is not available") - super(RiakPbcCodec, self).__init__(**unused_args) + raise NotImplementedError("this codec is not available") + super(PbufCodec, self).__init__(**unused_args) def _unix_time_millis(self, dt): return unix_time_millis(dt) diff --git a/riak/transports/ttb/codec.py b/riak/codecs/ttb.py similarity index 97% rename from riak/transports/ttb/codec.py rename to riak/codecs/ttb.py index e0e7afdd..2ecb846d 100644 --- a/riak/transports/ttb/codec.py +++ b/riak/codecs/ttb.py @@ -21,13 +21,13 @@ tscell_empty = (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) -class RiakTtbCodec(object): +class TtbCodec(object): ''' - Erlang term-to-binary Encoding and decoding methods for RiakTtbTransport + Erlang term-to-binary Encoding and decoding methods for TcpTransport ''' def __init__(self, **unused_args): - super(RiakTtbCodec, self).__init__(**unused_args) + super(TtbCodec, self).__init__(**unused_args) def _encode_to_ts_cell_ttb(self, cell): if cell is None: diff --git a/riak/node.py b/riak/node.py index 332dc654..9a999ece 100644 --- a/riak/node.py +++ b/riak/node.py @@ -1,22 +1,6 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" import math import time + from threading import RLock diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index ffcd6ea0..19379d06 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -197,10 +197,10 @@ def test_pool_close(self): # Do something to add to the connection pool self.test_multiget_bucket() if self.client.protocol == 'pbc': - self.assertGreater(len(self.client._pb_pool.resources), 1) + self.assertGreater(len(self.client._tcp_pool.resources), 1) else: self.assertGreater(len(self.client._http_pool.resources), 1) # Now close them all up self.client.close() self.assertEqual(len(self.client._http_pool.resources), 0) - self.assertEqual(len(self.client._pb_pool.resources), 0) + self.assertEqual(len(self.client._tcp_pool.resources), 0) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index d2f76a9a..6f906726 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -5,17 +5,17 @@ import unittest import riak.pb.riak_ts_pb2 +from riak.pb.riak_ts_pb2 import TsColumnType from riak import RiakError +from riak.codecs.pbuf import PbufCodec from riak.table import Table +from riak.tests import RUN_TIMESERIES +from riak.tests.base import IntegrationTestBase from riak.ts_object import TsObject -from riak.transports.tcp.codec import RiakPbcCodec from riak.util import str_to_bytes, bytes_to_str, \ unix_time_millis, datetime_from_unix_time_millis, \ is_timeseries_supported -from riak.tests import RUN_TIMESERIES -from riak.tests.base import IntegrationTestBase -from riak.pb.riak_ts_pb2 import TsColumnType table_name = 'GeoCheckin' @@ -35,7 +35,7 @@ class TimeseriesUnitTests(unittest.TestCase): @classmethod def setUpClass(cls): - cls.c = RiakPbcCodec() + cls.c = PbufCodec() cls.ts0ms = unix_time_millis(ts0) if cls.ts0ms != ex0ms: raise AssertionError( @@ -151,7 +151,7 @@ def test_decode_data_from_query(self): r1c4.boolean_value = self.rows[1][4] tsobj = TsObject(None, self.table, [], []) - c = RiakPbcCodec() + c = PbufCodec() c._decode_timeseries(tqr, tsobj) self.assertEqual(len(self.rows), len(tsobj.rows)) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 47c5c39a..aae5f73e 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -9,7 +9,7 @@ from riak.client import RiakClient from riak.table import Table from riak.ts_object import TsObject -from riak.transports.ttb.codec import RiakTtbCodec +from riak.codecs.ttb import TtbCodec from riak.util import str_to_bytes, \ unix_time_millis, datetime_from_unix_time_millis, \ is_timeseries_supported @@ -29,13 +29,8 @@ str0 = 'ascii-0' str1 = 'ascii-1' -if six.PY2: - # https://docs.python.org/2/library/functions.html#unicode - bd0 = unicode('时间序列', 'utf-8') - bd1 = unicode('временные ряды', 'utf-8') -else: - bd0 = u'时间序列' - bd1 = u'временные ряды' +bd0 = six.text_type('时间序列') +bd1 = six.text_type('временные ряды') fiveMins = datetime.timedelta(0, 300) ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) @@ -45,7 +40,7 @@ @unittest.skipUnless(is_timeseries_supported(), "Timeseries not supported") class TimeseriesTtbUnitTests(unittest.TestCase): def setUp(self): - self.c = RiakTtbCodec() + self.c = TtbCodec() self.table = Table(None, table_name) def test_encode_data_for_get(self): diff --git a/riak/transports/feature_detect.py b/riak/transports/feature_detect.py index c73ba37d..8f5808ac 100644 --- a/riak/transports/feature_detect.py +++ b/riak/transports/feature_detect.py @@ -40,7 +40,7 @@ class FeatureDetection(object): should return the server's version as a string. :class:`FeatureDetection` is a parent class of - :class:`RiakTransport `. + :class:`Transport `. """ def _server_version(self): diff --git a/riak/transports/http/__init__.py b/riak/transports/http/__init__.py index d7e69c3d..69c7de8c 100644 --- a/riak/transports/http/__init__.py +++ b/riak/transports/http/__init__.py @@ -1,27 +1,11 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import socket import select + from six import PY2 from riak.security import SecurityError, USE_STDLIB_SSL from riak.transports.pool import Pool -from riak.transports.http.transport import RiakHttpTransport +from riak.transports.http.transport import HttpTransport + if USE_STDLIB_SSL: import ssl from riak.transports.security import configure_ssl_context @@ -29,6 +13,7 @@ import OpenSSL.SSL from riak.transports.security import RiakWrappedSocket,\ configure_pyopenssl_context + if PY2: from httplib import HTTPConnection, \ NotConnected, \ @@ -149,7 +134,7 @@ def connect(self): self.sock.context = ssl_ctx -class RiakHttpPool(Pool): +class HttpPool(Pool): """ A pool of HTTP(S) transport connections. """ @@ -160,14 +145,14 @@ def __init__(self, client, **options): if self.client._credentials: self.connection_class = RiakHTTPSConnection - super(RiakHttpPool, self).__init__() + super(HttpPool, self).__init__() def create_resource(self): node = self.client._choose_node() - return RiakHttpTransport(node=node, - client=self.client, - connection_class=self.connection_class, - **self.options) + return HttpTransport(node=node, + client=self.client, + connection_class=self.connection_class, + **self.options) def destroy_resource(self, transport): transport.close() diff --git a/riak/transports/http/connection.py b/riak/transports/http/connection.py index e1f570e0..87a5716d 100644 --- a/riak/transports/http/connection.py +++ b/riak/transports/http/connection.py @@ -1,33 +1,17 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" +import base64 from six import PY2 -import base64 from riak.util import str_to_bytes + if PY2: from httplib import NotConnected, HTTPConnection else: from http.client import NotConnected, HTTPConnection -class RiakHttpConnection(object): +class HttpConnection(object): """ - Connection and low-level request methods for RiakHttpTransport. + Connection and low-level request methods for HttpTransport. """ def _request(self, method, uri, headers={}, body='', stream=False): @@ -93,7 +77,7 @@ def close(self): except NotConnected: pass - # These are set by the RiakHttpTransport initializer + # These are set by the HttpTransport initializer _connection_class = HTTPConnection _node = None diff --git a/riak/transports/http/resources.py b/riak/transports/http/resources.py index c13925bc..2017f420 100644 --- a/riak/transports/http/resources.py +++ b/riak/transports/http/resources.py @@ -1,34 +1,18 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - -http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import re + from six import PY2 from riak import RiakError from riak.util import lazy_property, bytes_to_str + if PY2: from urllib import quote_plus, urlencode else: from urllib.parse import quote_plus, urlencode -class RiakHttpResources(object): +class HttpResources(object): """ - Methods for RiakHttpTransport related to URL generation, i.e. + Methods for HttpTransport related to URL generation, i.e. creating the proper paths. """ @@ -204,7 +188,7 @@ def index_term_regex(self): if self.riak_kv_wm_bucket_type is not None: return True else: - return super(RiakHttpResources, self).index_term_regex() + return super(HttpResources, self).index_term_regex() # Resource root paths @lazy_property diff --git a/riak/transports/http/stream.py b/riak/transports/http/stream.py index edb1c818..b5ec00d7 100644 --- a/riak/transports/http/stream.py +++ b/riak/transports/http/stream.py @@ -1,23 +1,6 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import json import re + from cgi import parse_header from email import message_from_string from riak.util import decode_index_value @@ -26,7 +9,7 @@ from six import PY2 -class RiakHttpStream(object): +class HttpStream(object): """ Base class for HTTP streaming iterators. """ @@ -66,7 +49,7 @@ def close(self): self.resource.release() -class RiakHttpJsonStream(RiakHttpStream): +class HttpJsonStream(HttpStream): _json_field = None def next(self): @@ -92,26 +75,26 @@ def __next__(self): return self.next() -class RiakHttpKeyStream(RiakHttpJsonStream): +class HttpKeyStream(HttpJsonStream): """ Streaming iterator for list-keys over HTTP """ _json_field = u'keys' -class RiakHttpBucketStream(RiakHttpJsonStream): +class HttpBucketStream(HttpJsonStream): """ Streaming iterator for list-buckets over HTTP """ _json_field = u'buckets' -class RiakHttpMultipartStream(RiakHttpStream): +class HttpMultipartStream(HttpStream): """ Streaming iterator for multipart messages over HTTP """ def __init__(self, response): - super(RiakHttpMultipartStream, self).__init__(response) + super(HttpMultipartStream, self).__init__(response) ctypehdr = response.getheader('content-type') _, params = parse_header(ctypehdr) self.boundary_re = re.compile('\r?\n--%s(?:--)?\r?\n' % @@ -154,13 +137,13 @@ def read_until_boundary(self): self._read() -class RiakHttpMapReduceStream(RiakHttpMultipartStream): +class HttpMapReduceStream(HttpMultipartStream): """ Streaming iterator for MapReduce over HTTP """ def next(self): - message = super(RiakHttpMapReduceStream, self).next() + message = super(HttpMapReduceStream, self).next() payload = json.loads(message.get_payload()) return payload['phase'], payload['data'] @@ -169,18 +152,18 @@ def __next__(self): return self.next() -class RiakHttpIndexStream(RiakHttpMultipartStream): +class HttpIndexStream(HttpMultipartStream): """ Streaming iterator for secondary indexes over HTTP """ def __init__(self, response, index, return_terms): - super(RiakHttpIndexStream, self).__init__(response) + super(HttpIndexStream, self).__init__(response) self.index = index self.return_terms = return_terms def next(self): - message = super(RiakHttpIndexStream, self).next() + message = super(HttpIndexStream, self).next() payload = json.loads(message.get_payload()) if u'error' in payload: raise RiakError(payload[u'error']) diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index c139b3ea..10238d32 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -1,24 +1,3 @@ -""" -Copyright 2015 Basho Technologies, Inc. -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - try: import simplejson as json except ImportError: @@ -26,28 +5,30 @@ from six import PY2 from xml.dom.minidom import Document -from riak.transports.transport import RiakTransport -from riak.transports.http.resources import RiakHttpResources -from riak.transports.http.connection import RiakHttpConnection -from riak.transports.http.codec import RiakHttpCodec -from riak.transports.http.stream import ( - RiakHttpKeyStream, - RiakHttpMapReduceStream, - RiakHttpBucketStream, - RiakHttpIndexStream) + from riak import RiakError +from riak.codecs.http import HttpCodec +from riak.transports.transport import Transport +from riak.transports.http.resources import HttpResources +from riak.transports.http.connection import HttpConnection +from riak.transports.http.stream import ( + HttpKeyStream, + HttpMapReduceStream, + HttpBucketStream, + HttpIndexStream) from riak.security import SecurityError from riak.util import decode_index_value, bytes_to_str, str_to_long + if PY2: from httplib import HTTPConnection else: from http.client import HTTPConnection -class RiakHttpTransport(RiakHttpConnection, RiakHttpResources, RiakHttpCodec, - RiakTransport): +class HttpTransport(Transport, + HttpConnection, HttpResources, HttpCodec): """ - The RiakHttpTransport object holds information necessary to + The HttpTransport object holds information necessary to connect to Riak via HTTP. """ @@ -59,7 +40,7 @@ def __init__(self, node=None, """ Construct a new HTTP connection to Riak. """ - super(RiakHttpTransport, self).__init__() + super(HttpTransport, self).__init__() self._client = client self._node = node @@ -219,7 +200,7 @@ def stream_keys(self, bucket, timeout=None): status, headers, response = self._request('GET', url, stream=True) if status == 200: - return RiakHttpKeyStream(response) + return HttpKeyStream(response) else: raise RiakError('Error listing keys.') @@ -252,7 +233,7 @@ def stream_buckets(self, bucket_type=None, timeout=None): status, headers, response = self._request('GET', url, stream=True) if status == 200: - return RiakHttpBucketStream(response) + return HttpBucketStream(response) else: raise RiakError('Error listing buckets.') @@ -371,7 +352,7 @@ def stream_mapred(self, inputs, query, timeout=None): content, stream=True) if status == 200: - return RiakHttpMapReduceStream(response) + return HttpMapReduceStream(response) else: raise RiakError( 'Error running MapReduce operation. Headers: %s Body: %s' % @@ -441,7 +422,7 @@ def stream_index(self, bucket, index, startkey, endkey=None, status, headers, response = self._request('GET', url, stream=True) if status == 200: - return RiakHttpIndexStream(response, index, return_terms) + return HttpIndexStream(response, index, return_terms) else: raise RiakError('Error streaming secondary index.') diff --git a/riak/transports/pool.py b/riak/transports/pool.py index 4b21fd8e..d0a9ee7f 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -1,24 +1,8 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 +from __future__ import print_function -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" +import threading -from __future__ import print_function from contextlib import contextmanager -import threading # This file is a rough port of the Innertube Ruby library diff --git a/riak/transports/tcp/__init__.py b/riak/transports/tcp/__init__.py index 8ccdaa10..312f9194 100644 --- a/riak/transports/tcp/__init__.py +++ b/riak/transports/tcp/__init__.py @@ -2,26 +2,26 @@ import socket from riak.transports.pool import Pool -from riak.transports.tcp.transport import RiakPbcTransport +from riak.transports.tcp.transport import TcpTransport -class RiakPbcPool(Pool): +class TcpPool(Pool): """ - A resource pool of PBC transports. + A resource pool of TCP transports. """ def __init__(self, client, **options): - super(RiakPbcPool, self).__init__() + super(TcpPool, self).__init__() self._client = client self._options = options def create_resource(self): node = self._client._choose_node() - return RiakPbcTransport(node=node, - client=self._client, - **self._options) + return TcpTransport(node=node, + client=self._client, + **self._options) - def destroy_resource(self, pbc): - pbc.close() + def destroy_resource(self, tcp): + tcp.close() # These are a specific set of socket errors # that could be raised on send/recv that indicate diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index dfaee06b..8468a72d 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -18,9 +18,9 @@ from riak.transports.security import configure_ssl_context -class RiakPbcConnection(object): +class TcpConnection(object): """ - Connection-related methods for RiakPbcTransport. + Connection-related methods for TcpTransport. """ def __init__(self): @@ -91,7 +91,7 @@ def _enable_ttb(self): if self._ttb_enabled: return True else: - logging.debug("pbc/connection enabling TTB") + logging.debug("tcp/connection enabling TTB") req = riak.pb.riak_pb2.RpbToggleEncodingReq() req.use_native = True msg_code, _ = self._non_connect_request( @@ -100,7 +100,7 @@ def _enable_ttb(self): riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP) if msg_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: self._ttb_enabled = True - logging.debug("pbc/connection TTB IS ENABLED") + logging.debug("tcp/connection TTB IS ENABLED") return True else: return False @@ -201,7 +201,7 @@ def _recv_msg(self, expect=None, is_ttb=False): if expect and msg_code != expect: raise RiakError("unexpected protocol buffer message code: %d, %r" % (msg_code, msg)) - # logging.debug("pbc/connection received msg_code %d msg %s", + # logging.debug("tcp/connection received msg_code %d msg %s", # msg_code, msg) return msg_code, msg @@ -272,6 +272,6 @@ def _parse_msg(self, code, packet, is_ttb=False): pbo.ParseFromString(packet) return pbo - # These are set in the RiakPbcTransport initializer + # These are set in the TcpTransport initializer _address = None _timeout = None diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 38357bbd..986059d8 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -3,13 +3,13 @@ from riak.util import decode_index_value, bytes_to_str from riak.client.index_page import CONTINUATION -from riak.transports.tcp.codec import RiakPbcCodec +from riak.codecs.ttb import TtbCodec from six import PY2 -class RiakPbcStream(object): +class PbufStream(object): """ - Used internally by RiakPbcTransport to implement streaming + Used internally by TcpTransport to implement streaming operations. Implements the iterator interface. """ @@ -62,15 +62,15 @@ def close(self): self.resource.release() -class RiakPbcKeyStream(RiakPbcStream): +class PbufKeyStream(PbufStream): """ - Used internally by RiakPbcTransport to implement key-list streams. + Used internally by TcpTransport to implement key-list streams. """ _expect = riak.pb.messages.MSG_CODE_LIST_KEYS_RESP def next(self): - response = super(RiakPbcKeyStream, self).next() + response = super(PbufKeyStream, self).next() if response.done and len(response.keys) is 0: raise StopIteration @@ -82,16 +82,16 @@ def __next__(self): return self.next() -class RiakPbcMapredStream(RiakPbcStream): +class PbufMapredStream(PbufStream): """ - Used internally by RiakPbcTransport to implement MapReduce + Used internally by TcpTransport to implement MapReduce streams. """ _expect = riak.pb.messages.MSG_CODE_MAP_RED_RESP def next(self): - response = super(RiakPbcMapredStream, self).next() + response = super(PbufMapredStream, self).next() if response.done and not response.HasField('response'): raise StopIteration @@ -103,15 +103,15 @@ def __next__(self): return self.next() -class RiakPbcBucketStream(RiakPbcStream): +class PbufBucketStream(PbufStream): """ - Used internally by RiakPbcTransport to implement key-list streams. + Used internally by TcpTransport to implement key-list streams. """ _expect = riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP def next(self): - response = super(RiakPbcBucketStream, self).next() + response = super(PbufBucketStream, self).next() if response.done and len(response.buckets) is 0: raise StopIteration @@ -123,21 +123,21 @@ def __next__(self): return self.next() -class RiakPbcIndexStream(RiakPbcStream): +class PbufIndexStream(PbufStream): """ - Used internally by RiakPbcTransport to implement Secondary Index + Used internally by TcpTransport to implement Secondary Index streams. """ _expect = riak.pb.messages.MSG_CODE_INDEX_RESP def __init__(self, transport, index, return_terms=False): - super(RiakPbcIndexStream, self).__init__(transport) + super(PbufIndexStream, self).__init__(transport) self.index = index self.return_terms = return_terms def next(self): - response = super(RiakPbcIndexStream, self).next() + response = super(PbufIndexStream, self).next() if response.done and not (response.keys or response.results or @@ -161,15 +161,15 @@ def __next__(self): return self.next() -class RiakPbcTsKeyStream(RiakPbcStream, RiakPbcCodec): +class PbufTsKeyStream(PbufStream, TtbCodec): """ - Used internally by RiakPbcTransport to implement key-list streams. + Used internally by TcpTransport to implement TS key-list streams. """ _expect = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP def next(self): - response = super(RiakPbcTsKeyStream, self).next() + response = super(PbufTsKeyStream, self).next() if response.done and len(response.keys) is 0: raise StopIteration diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 01eddfa8..f07ed0b2 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -4,27 +4,27 @@ import riak.pb.riak_ts_pb2 from riak import RiakError -from riak.transports.transport import RiakTransport +from riak.codecs.pbuf import PbufCodec +from riak.codecs.ttb import TtbCodec +from riak.transports.transport import Transport from riak.riak_object import VClock from riak.ts_object import TsObject from riak.util import decode_index_value, str_to_bytes, bytes_to_str -from riak.transports.tcp.connection import RiakPbcConnection -from riak.transports.tcp.stream import (RiakPbcKeyStream, - RiakPbcMapredStream, - RiakPbcBucketStream, - RiakPbcIndexStream, - RiakPbcTsKeyStream) -from riak.transports.tcp.codec import RiakPbcCodec -from riak.transports.ttb.codec import RiakTtbCodec +from riak.transports.tcp.connection import TcpConnection +from riak.transports.tcp.stream import (PbufKeyStream, + PbufMapredStream, + PbufBucketStream, + PbufIndexStream, + PbufTsKeyStream) from six import PY2, PY3 -class RiakPbcTransport(RiakTransport, RiakPbcConnection, - RiakPbcCodec, RiakTtbCodec): +class TcpTransport(Transport, TcpConnection, + PbufCodec, TtbCodec): """ - The RiakPbcTransport object holds a connection to the protocol - buffers interface on the riak server. + The TcpTransport object holds a connection to the TCP + socket on the Riak server. """ def __init__(self, @@ -32,10 +32,7 @@ def __init__(self, client=None, timeout=None, **transport_options): - """ - Construct a new RiakPbcTransport object. - """ - super(RiakPbcTransport, self).__init__() + super(TcpTransport, self).__init__() self._client = client self._node = node @@ -269,7 +266,7 @@ def ts_stream_keys(self, table, timeout=None): self._send_msg(riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ, req) - return RiakPbcTsKeyStream(self) + return PbufTsKeyStream(self) def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): @@ -331,7 +328,7 @@ def stream_keys(self, bucket, timeout=None): self._send_msg(riak.pb.messages.MSG_CODE_LIST_KEYS_REQ, req) - return RiakPbcKeyStream(self) + return PbufKeyStream(self) def get_buckets(self, bucket_type=None, timeout=None): """ @@ -367,7 +364,7 @@ def stream_buckets(self, bucket_type=None, timeout=None): self._send_msg(riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req) - return RiakPbcBucketStream(self) + return PbufBucketStream(self) def get_bucket_props(self, bucket): """ @@ -480,7 +477,7 @@ def stream_mapred(self, inputs, query, timeout=None): self._send_msg(riak.pb.messages.MSG_CODE_MAP_RED_REQ, req) - return RiakPbcMapredStream(self) + return PbufMapredStream(self) def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, @@ -532,7 +529,7 @@ def stream_index(self, bucket, index, startkey, endkey=None, self._send_msg(riak.pb.messages.MSG_CODE_INDEX_REQ, req) - return RiakPbcIndexStream(self, index, return_terms) + return PbufIndexStream(self, index, return_terms) def create_search_index(self, index, schema=None, n_val=None, timeout=None): diff --git a/riak/transports/transport.py b/riak/transports/transport.py index 4f33168c..e30f2d5e 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -1,33 +1,15 @@ -""" -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" import base64 import random import threading import os import json import platform + from six import PY2 from riak.transports.feature_detect import FeatureDetection -class RiakTransport(FeatureDetection): +class Transport(FeatureDetection): """ Class to encapsulate transport details and methods. All protocol transports are subclasses of this class. From 0bcea331160d9ffb8eafcd63c1cde4ce86d7faff Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 29 Mar 2016 13:06:47 -0700 Subject: [PATCH 141/324] fix test on PY2 --- riak/tests/test_timeseries_ttb.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index aae5f73e..64ff0888 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -29,8 +29,8 @@ str0 = 'ascii-0' str1 = 'ascii-1' -bd0 = six.text_type('时间序列') -bd1 = six.text_type('временные ряды') +bd0 = six.u('时间序列') +bd1 = six.u('временные ряды') fiveMins = datetime.timedelta(0, 300) ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) From dd52c945094b3f98d990468538efd4411812d40f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 08:29:32 -0700 Subject: [PATCH 142/324] Rewrite TCP connection class to be much more efficient. --- .gitignore | 10 +----- riak/benchmarks/multiget.py | 39 ++++++++++++++--------- riak/transports/tcp/connection.py | 52 +++++++++++++++---------------- 3 files changed, 52 insertions(+), 49 deletions(-) diff --git a/.gitignore b/.gitignore index 68a831bf..e20b3037 100644 --- a/.gitignore +++ b/.gitignore @@ -1,25 +1,17 @@ README.rst - *.pyc .python-version __pycache__/ - .tox/ - -.tox/ - docs/_build - .*.swp .coverage - riak-*/ py-build/ dist/ - riak.egg-info/ *.egg .eggs/ - #*# *~ +*.ps1 diff --git a/riak/benchmarks/multiget.py b/riak/benchmarks/multiget.py index 505069a4..22c01c8b 100644 --- a/riak/benchmarks/multiget.py +++ b/riak/benchmarks/multiget.py @@ -1,18 +1,31 @@ -from riak import RiakClient -from multiprocessing import cpu_count import binascii import os + import riak.benchmark as benchmark -import riak.client.multiget as mget -client = RiakClient(protocol='pbc') +from riak import RiakClient +from multiprocessing import cpu_count + +nodes = [ + ('riak-test', 8098, 8087), + # ('riak-test', 10018, 10017), + # ('riak-test', 10028, 10027), + # ('riak-test', 10038, 10037), + # ('riak-test', 10048, 10047), + # ('riak-test', 10058, 10057), +] +client = RiakClient( + nodes=nodes, + protocol='pbc', + multiget_pool_size=128) + bkeys = [('default', 'multiget', str(key)) for key in range(10000)] data = binascii.b2a_hex(os.urandom(1024)) print("Benchmarking multiget:") print(" CPUs: {0}".format(cpu_count())) -print(" Threads: {0}".format(mget.POOL_SIZE)) +print(" Threads: {0}".format(client._multiget_pool._size)) print(" Keys: {0}".format(len(bkeys))) print() @@ -23,18 +36,16 @@ content_type='text/plain' ).store() for b in benchmark.measure_with_rehearsal(): - client.protocol = 'http' - with b.report('http seq'): - for _, bucket, key in bkeys: - client.bucket(bucket).get(key) - - with b.report('http multi'): - mget.multiget(client, bkeys) + # client.protocol = 'http' + # with b.report('http seq'): + # for _, bucket, key in bkeys: + # client.bucket(bucket).get(key) + # with b.report('http multi'): + # client.multiget(bkeys) client.protocol = 'pbc' with b.report('pbc seq'): for _, bucket, key in bkeys: client.bucket(bucket).get(key) - with b.report('pbc multi'): - mget.multiget(client, bkeys) + client.multiget(bkeys) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 8468a72d..dfaf3e55 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -185,16 +185,17 @@ def _ssl_handshake(self): raise SecurityError(e) def _recv_msg(self, expect=None, is_ttb=False): - self._recv_pkt() - msg_code, = struct.unpack("B", self._inbuf[:1]) + msgbuf = self._recv_pkt() + mv = memoryview(msgbuf) + msg_code, = struct.unpack("B", mv[0:1]) if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: - err = self._parse_msg(msg_code, self._inbuf[1:], is_ttb) + err = self._parse_msg(msg_code, mv[1:].tobytes(), is_ttb) if err is None: raise RiakError('no error provided!') else: raise RiakError(bytes_to_str(err.errmsg)) elif msg_code in riak.pb.messages.MESSAGE_CLASSES: - msg = self._parse_msg(msg_code, self._inbuf[1:], is_ttb) + msg = self._parse_msg(msg_code, mv[1:].tobytes(), is_ttb) else: raise Exception("unknown msg code %s" % msg_code) @@ -206,31 +207,30 @@ def _recv_msg(self, expect=None, is_ttb=False): return msg_code, msg def _recv_pkt(self): - nmsglen = self._socket.recv(4) - while len(nmsglen) < 4: - x = self._socket.recv(4 - len(nmsglen)) - if not x: - break - nmsglen += x - if len(nmsglen) != 4: + # TODO FUTURE re-use buffer + msglen_buf = bytearray(4) + recv_len = self._socket.recv_into(msglen_buf) + if recv_len != 4: raise RiakError( "Socket returned short packet length %d - expected 4" - % len(nmsglen)) - msglen, = struct.unpack('!i', nmsglen) - self._inbuf_len = msglen - if PY2: - self._inbuf = '' - else: - self._inbuf = bytes() - while len(self._inbuf) < msglen: - want_len = min(8192, msglen - len(self._inbuf)) - recv_buf = self._socket.recv(want_len) - if not recv_buf: - break - self._inbuf += recv_buf - if len(self._inbuf) != self._inbuf_len: + % recv_len) + # NB: msg length is an unsigned int + msglen, = struct.unpack('!I', msglen_buf) + # TODO FUTURE re-use buffer + # http://stackoverflow.com/a/15964489 + msgbuf = bytearray(msglen) + view = memoryview(msgbuf) + nread = 0 + toread = msglen + while toread: + nbytes = self._socket.recv_into(view, toread) + view = view[nbytes:] # slicing views is cheap + toread -= nbytes + nread += nbytes + if nread != msglen: raise RiakError("Socket returned short packet %d - expected %d" - % (len(self._inbuf), self._inbuf_len)) + % (nread, msglen)) + return msgbuf def _connect(self): if not self._socket: From 50b9eb7f7d5b69b7b09662c188568cb0d8a58a71 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 08:36:33 -0700 Subject: [PATCH 143/324] multiget benchmark improvement --- riak/benchmarks/multiget.py | 51 +++++++++++++++++++++++++++++++++++++ riak/client/multiget.py | 40 ----------------------------- 2 files changed, 51 insertions(+), 40 deletions(-) create mode 100644 riak/benchmarks/multiget.py diff --git a/riak/benchmarks/multiget.py b/riak/benchmarks/multiget.py new file mode 100644 index 00000000..22c01c8b --- /dev/null +++ b/riak/benchmarks/multiget.py @@ -0,0 +1,51 @@ +import binascii +import os + +import riak.benchmark as benchmark + +from riak import RiakClient +from multiprocessing import cpu_count + +nodes = [ + ('riak-test', 8098, 8087), + # ('riak-test', 10018, 10017), + # ('riak-test', 10028, 10027), + # ('riak-test', 10038, 10037), + # ('riak-test', 10048, 10047), + # ('riak-test', 10058, 10057), +] +client = RiakClient( + nodes=nodes, + protocol='pbc', + multiget_pool_size=128) + +bkeys = [('default', 'multiget', str(key)) for key in range(10000)] + +data = binascii.b2a_hex(os.urandom(1024)) + +print("Benchmarking multiget:") +print(" CPUs: {0}".format(cpu_count())) +print(" Threads: {0}".format(client._multiget_pool._size)) +print(" Keys: {0}".format(len(bkeys))) +print() + +with benchmark.measure() as b: + with b.report('populate'): + for _, bucket, key in bkeys: + client.bucket(bucket).new(key, encoded_data=data, + content_type='text/plain' + ).store() +for b in benchmark.measure_with_rehearsal(): + # client.protocol = 'http' + # with b.report('http seq'): + # for _, bucket, key in bkeys: + # client.bucket(bucket).get(key) + # with b.report('http multi'): + # client.multiget(bkeys) + + client.protocol = 'pbc' + with b.report('pbc seq'): + for _, bucket, key in bkeys: + client.bucket(bucket).get(key) + with b.report('pbc multi'): + client.multiget(bkeys) diff --git a/riak/client/multiget.py b/riak/client/multiget.py index 20d02801..64a0e3b4 100644 --- a/riak/client/multiget.py +++ b/riak/client/multiget.py @@ -201,43 +201,3 @@ def multiget(client, keys, **options): outq.task_done() return results - -if __name__ == '__main__': - # Run a benchmark! - from riak import RiakClient - import riak.benchmark as benchmark - client = RiakClient(protocol='pbc') - bkeys = [('default', 'multiget', str(key)) for key in range(10000)] - - data = None - with open(__file__) as f: - data = f.read() - - print("Benchmarking multiget:") - print(" CPUs: {0}".format(cpu_count())) - print(" Threads: {0}".format(POOL_SIZE)) - print(" Keys: {0}".format(len(bkeys))) - print() - - with benchmark.measure() as b: - with b.report('populate'): - for _, bucket, key in bkeys: - client.bucket(bucket).new(key, encoded_data=data, - content_type='text/plain' - ).store() - for b in benchmark.measure_with_rehearsal(): - client.protocol = 'http' - with b.report('http seq'): - for _, bucket, key in bkeys: - client.bucket(bucket).get(key) - - with b.report('http multi'): - multiget(client, bkeys) - - client.protocol = 'pbc' - with b.report('pbc seq'): - for _, bucket, key in bkeys: - client.bucket(bucket).get(key) - - with b.report('pbc multi'): - multiget(client, bkeys) From 460b0151ffea6625510ed43baacae0952f9ebd00 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 12:49:58 -0700 Subject: [PATCH 144/324] Moving code to PbufCodec --- riak/codecs/pbuf.py | 354 ++++++++++++++++- riak/codecs/ttb.py | 16 +- riak/transports/tcp/connection.py | 101 ++--- riak/transports/tcp/stream.py | 6 +- riak/transports/tcp/transport.py | 611 ++++++++++-------------------- riak/transports/transport.py | 3 +- 6 files changed, 595 insertions(+), 496 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 2d5231d8..4c746f08 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -7,6 +7,7 @@ from riak import RiakError from riak.content import RiakContent +from riak.riak_object import VClock from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ unix_time_millis, datetime_from_unix_time_millis from riak.multidict import MultiDict @@ -76,10 +77,15 @@ class PbufCodec(object): Protobuffs Encoding and decoding methods for TcpTransport. """ - def __init__(self, **unused_args): + def __init__(self, + client_timeouts=False, quorum_controls=False, + tombstone_vclocks=False, bucket_types=False): if riak.pb is None: raise NotImplementedError("this codec is not available") - super(PbufCodec, self).__init__(**unused_args) + self._client_timeouts = client_timeouts + self._quorum_controls = quorum_controls + self._tombstone_vclocks = tombstone_vclocks + self._bucket_types = bucket_types def _unix_time_millis(self, dt): return unix_time_millis(dt) @@ -305,7 +311,6 @@ def _decode_bucket_props(self, msg): :rtype dict """ props = {} - for prop in NORMAL_PROPS: if msg.HasField(prop): props[prop] = getattr(msg, prop) @@ -322,7 +327,6 @@ def _decode_bucket_props(self, msg): props[prop] = self._decode_quorum(getattr(msg, prop)) if msg.HasField('repl'): props['repl'] = REPL_TO_PY[msg.repl] - return props def _decode_modfun(self, modfun): @@ -411,7 +415,8 @@ def _encode_hook(self, hook, msg): def _encode_index_req(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, - continuation=None, timeout=None, term_regex=None): + continuation=None, timeout=None, term_regex=None, + streaming=False): """ Encodes a secondary index request into the protobuf message. @@ -434,6 +439,8 @@ def _encode_index_req(self, bucket, index, startkey, endkey=None, :type timeout: int :param term_regex: a regular expression used to filter index terms :type term_regex: string + :param streaming: encode as streaming request + :type streaming: bool :rtype riak.pb.riak_kv_pb2.RpbIndexReq """ req = riak.pb.riak_kv_pb2.RpbIndexReq( @@ -460,7 +467,8 @@ def _encode_index_req(self, bucket, index, startkey, endkey=None, req.timeout = timeout if term_regex: req.term_regex = str_to_bytes(term_regex) - return req + req.stream = streaming + return req.SerializeToString() def _decode_search_index(self, index): """ @@ -480,7 +488,7 @@ def _decode_search_index(self, index): def _add_bucket_type(self, req, bucket_type): if bucket_type and not bucket_type.is_default(): - if not self.bucket_types(): + if not self._bucket_types: raise NotImplementedError( 'Server does not support bucket-types') req.type = str_to_bytes(bucket_type.name) @@ -645,22 +653,30 @@ def _encode_to_ts_cell(self, cell, ts_cell): raise RiakError("can't serialize type '{}', value '{}'" .format(t, cell)) - def _encode_timeseries_keyreq(self, table, key, req): + def _encode_timeseries_keyreq(self, table, key, is_delete=False): key_vals = None if isinstance(key, list): key_vals = key else: raise ValueError("key must be a list") + if is_delete: + req = riak.pb.riak_ts_pb2.TsDelReq() + else: + req = riak.pb.riak_ts_pb2.TsGetReq() + req.table = str_to_bytes(table.name) for cell in key_vals: ts_cell = req.key.add() self._encode_to_ts_cell(cell, ts_cell) + return req.SerializeToString() - def _encode_timeseries_listkeysreq(self, table, req, timeout=None): + def _encode_timeseries_listkeysreq(self, table, timeout=None): + req = riak.pb.riak_ts_pb2.TsListKeysReq() req.table = str_to_bytes(table.name) - if timeout: + if self._client_timeouts and timeout: req.timeout = timeout + return req.SerializeToString() def _encode_timeseries_put(self, tsobj, req): """ @@ -672,6 +688,7 @@ def _encode_timeseries_put(self, tsobj, req): :param req: the protobuf message to fill :type req: riak.pb.riak_ts_pb2.TsPutReq """ + req = riak.pb.riak_ts_pb2.TsPutReq() req.table = str_to_bytes(tsobj.table.name) if tsobj.columns: @@ -687,6 +704,15 @@ def _encode_timeseries_put(self, tsobj, req): self._encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") + return req.SerializeToString() + + def _encode_timeseries_query(self, table, query, interpolations=None): + req = riak.pb.riak_ts_pb2.TsQueryReq() + q = query + if '{table}' in q: + q = q.format(table=table.name) + req.query.base = str_to_bytes(q) + return req.SerializeToString() def _decode_timeseries(self, resp, tsobj): """ @@ -769,3 +795,311 @@ def _decode_preflist(self, item): 'node': bytes_to_str(item.node), 'primary': item. primary} return result + + def _encode_get(self, robj, r=None, pr=None, timeout=None, + basic_quorum=None, notfound_ok=None): + bucket = robj.bucket + req = riak.pb.riak_kv_pb2.RpbGetReq() + if r: + req.r = self._encode_quorum(r) + if self._quorum_controls: + if pr: + req.pr = self._encode_quorum(pr) + if basic_quorum is not None: + req.basic_quorum = basic_quorum + if notfound_ok is not None: + req.notfound_ok = notfound_ok + if self._client_timeouts and timeout: + req.timeout = timeout + if self._tombstone_vclocks: + req.deletedvclock = True + req.bucket = str_to_bytes(bucket.name) + self._add_bucket_type(req, bucket.bucket_type) + req.key = str_to_bytes(robj.key) + return req.SerializeToString() + + def _encode_put(self, robj, w=None, dw=None, pw=None, + return_body=True, if_none_match=False, timeout=None): + bucket = robj.bucket + req = riak.pb.riak_kv_pb2.RpbPutReq() + if w: + req.w = self._encode_quorum(w) + if dw: + req.dw = self._encode_quorum(dw) + if self._quorum_controls and pw: + req.pw = self._encode_quorum(pw) + if return_body: + req.return_body = 1 + if if_none_match: + req.if_none_match = 1 + if self._client_timeouts and timeout: + req.timeout = timeout + req.bucket = str_to_bytes(bucket.name) + self._add_bucket_type(req, bucket.bucket_type) + if robj.key: + req.key = str_to_bytes(robj.key) + if robj.vclock: + req.vclock = robj.vclock.encode('binary') + self._encode_content(robj, req.content) + return req.SerializeToString() + + def _decode_get(self, robj, resp): + if resp is not None: + if resp.HasField('vclock'): + robj.vclock = VClock(resp.vclock, 'binary') + # We should do this even if there are no contents, i.e. + # the object is tombstoned + self._decode_contents(resp.content, robj) + else: + # "not found" returns an empty message, + # so let's make sure to clear the siblings + robj.siblings = [] + return robj + + def _decode_put(self, robj, resp): + if resp is not None: + if resp.HasField('key'): + robj.key = bytes_to_str(resp.key) + if resp.HasField("vclock"): + robj.vclock = VClock(resp.vclock, 'binary') + if resp.content: + self._decode_contents(resp.content, robj) + elif not robj.key: + raise RiakError("missing response object") + return robj + + def _encode_delete(self, robj, rw=None, r=None, + w=None, dw=None, pr=None, pw=None, + timeout=None): + req = riak.pb.riak_kv_pb2.RpbDelReq() + if rw: + req.rw = self._encode_quorum(rw) + if r: + req.r = self._encode_quorum(r) + if w: + req.w = self._encode_quorum(w) + if dw: + req.dw = self._encode_quorum(dw) + + if self._quorum_controls: + if pr: + req.pr = self._encode_quorum(pr) + if pw: + req.pw = self._encode_quorum(pw) + + if self._client_timeouts and timeout: + req.timeout = timeout + + use_vclocks = (self._tombstone_vclocks and + hasattr(robj, 'vclock') and robj.vclock) + if use_vclocks: + req.vclock = robj.vclock.encode('binary') + + bucket = robj.bucket + req.bucket = str_to_bytes(bucket.name) + self._add_bucket_type(req, bucket.bucket_type) + req.key = str_to_bytes(robj.key) + return req.SerializeToString() + + def _encode_stream_keys(self, bucket, timeout=None): + req = riak.pb.riak_kv_pb2.RpbListKeysReq() + req.bucket = str_to_bytes(bucket.name) + if self._client_timeouts and timeout: + req.timeout = timeout + self._add_bucket_type(req, bucket.bucket_type) + return req.SerializeToString() + + def _decode_get_keys(self, stream): + keys = [] + for keylist in stream: + for key in keylist: + keys.append(bytes_to_str(key)) + return keys + + def _decode_get_server_info(self, resp): + return {'node': bytes_to_str(resp.node), + 'server_version': bytes_to_str(resp.server_version)} + + def _decode_get_client_id(self, resp): + return bytes_to_str(resp.client_id) + + def _encode_set_client_id(self, client_id): + req = riak.pb.riak_kv_pb2.RpbSetClientIdReq() + req.client_id = str_to_bytes(client_id) + return req.SerializeToString() + + def _encode_get_buckets(self, bucket_type, timeout): + req = riak.pb.riak_kv_pb2.RpbListBucketsReq() + self._add_bucket_type(req, bucket_type) + if self._client_timeouts and timeout: + req.timeout = timeout + return req.SerializeToString() + + def _encode_stream_buckets(self, bucket_type, timeout): + req = riak.pb.riak_kv_pb2.RpbListBucketsReq() + req.stream = True + self._add_bucket_type(req, bucket_type) + # Bucket streaming landed in the same release as timeouts, so + # we don't need to check the capability. + if timeout: + req.timeout = timeout + return req.SerializeToString() + + def _encode_get_bucket_props(self, bucket): + req = riak.pb.riak_pb2.RpbGetBucketReq() + req.bucket = str_to_bytes(bucket.name) + self._add_bucket_type(req, bucket.bucket_type) + return req.SerializeToString() + + def _encode_set_bucket_props(self, bucket, props): + req = riak.pb.riak_pb2.RpbSetBucketReq() + req.bucket = str_to_bytes(bucket.name) + self._add_bucket_type(req, bucket.bucket_type) + self._encode_bucket_props(props, req) + return req.SerializeToString() + + def _encode_clear_bucket_props(self, bucket): + req = riak.pb.riak_pb2.RpbResetBucketReq() + req.bucket = str_to_bytes(bucket.name) + self._add_bucket_type(req, bucket.bucket_type) + return req.SerializeToString() + + def _encode_get_bucket_type_props(self, bucket_type): + req = riak.pb.riak_pb2.RpbGetBucketTypeReq() + req.type = str_to_bytes(bucket_type.name) + return req.SerializeToString() + + def _encode_set_bucket_type_props(self, bucket_type, props): + req = riak.pb.riak_pb2.RpbSetBucketTypeReq() + req.type = str_to_bytes(bucket_type.name) + self._encode_bucket_props(props, req) + return req.SerializeToString() + + def _encode_stream_mapred(content): + req = riak.pb.riak_kv_pb2.RpbMapRedReq() + req.request = str_to_bytes(content) + req.content_type = str_to_bytes("application/json") + return req.SerializeToString() + + def _encode_create_search_index(self, index, schema=None, + n_val=None, timeout=None): + index = str_to_bytes(index) + idx = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) + if schema: + idx.schema = str_to_bytes(schema) + if n_val: + idx.n_val = n_val + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) + if timeout is not None: + req.timeout = timeout + return req.SerializeToString() + + def _encode_get_search_index(self, index): + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( + name=str_to_bytes(index)) + return req.SerializeToString() + + def _encode_list_search_indexes(self): + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() + return req.SerializeToString() + + def _encode_delete_search_indexes(self): + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( + name=str_to_bytes(index)) + return req.SerializeToString() + + def _encode_create_search_schema(self, schema, content): + scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( + name=str_to_bytes(schema), + content=str_to_bytes(content)) + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( + schema=scma) + return req.SerializeToString() + + def _encode_get_search_schema(self, schema): + req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( + name=str_to_bytes(schema)) + return req.SerializeToString() + + def _decode_get_search_schema(self, resp): + result = {} + result['name'] = bytes_to_str(resp.schema.name) + result['content'] = bytes_to_str(resp.schema.content) + return result + + def _encode_search(self, index, query, **params): + req = riak.pb.riak_search_pb2.RpbSearchQueryReq( + index=str_to_bytes(index), + q=str_to_bytes(query)) + self._encode_search_query(req, params) + return req.SerializeToString() + + def _decode_search(resp): + result = {} + if resp.HasField('max_score'): + result['max_score'] = resp.max_score + if resp.HasField('num_found'): + result['num_found'] = resp.num_found + result['docs'] = [self._decode_search_doc(doc) for doc in resp.docs] + return result + + def _encode_get_counter(self, bucket, key, **params): + req = riak.pb.riak_kv_pb2.RpbCounterGetReq() + req.bucket = str_to_bytes(bucket.name) + req.key = str_to_bytes(key) + if params.get('r') is not None: + req.r = self._encode_quorum(params['r']) + if params.get('pr') is not None: + req.pr = self._encode_quorum(params['pr']) + if params.get('basic_quorum') is not None: + req.basic_quorum = params['basic_quorum'] + if params.get('notfound_ok') is not None: + req.notfound_ok = params['notfound_ok'] + return req.SerializeToString() + + def _encode_update_counter(self, bucket, key, value, **params): + req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() + req.bucket = str_to_bytes(bucket.name) + req.key = str_to_bytes(key) + req.amount = value + if params.get('w') is not None: + req.w = self._encode_quorum(params['w']) + if params.get('dw') is not None: + req.dw = self._encode_quorum(params['dw']) + if params.get('pw') is not None: + req.pw = self._encode_quorum(params['pw']) + if params.get('returnvalue') is not None: + req.returnvalue = params['returnvalue'] + return req.SerializeToString() + + def _encode_fetch_datatype(self, bucket, key, **options): + req = riak.pb.riak_dt_pb2.DtFetchReq() + req.type = str_to_bytes(bucket.bucket_type.name) + req.bucket = str_to_bytes(bucket.name) + req.key = str_to_bytes(key) + self._encode_dt_options(req, options) + return req.SerializeToString() + + def _encode_update_datatype(self, datatype, **options): + op = datatype.to_op() + type_name = datatype.type_name + if not op: + raise ValueError("No operation to send on datatype {!r}". + format(datatype)) + req = riak.pb.riak_dt_pb2.DtUpdateReq() + req.bucket = str_to_bytes(datatype.bucket.name) + req.type = str_to_bytes(datatype.bucket.bucket_type.name) + if datatype.key: + req.key = str_to_bytes(datatype.key) + if datatype._context: + req.context = datatype._context + self._encode_dt_options(req, options) + self._encode_dt_op(type_name, req, op) + return req.SerializeToString() + + def _encode_get_preflist(self, bucket, key): + req = riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() + req.bucket = str_to_bytes(bucket.name) + req.key = str_to_bytes(key) + req.type = str_to_bytes(bucket.bucket_type.name) + return req.SerializeToString() diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 2ecb846d..8d795675 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -29,7 +29,7 @@ class TtbCodec(object): def __init__(self, **unused_args): super(TtbCodec, self).__init__(**unused_args) - def _encode_to_ts_cell_ttb(self, cell): + def _encode_to_ts_cell(self, cell): if cell is None: return tscell_empty else: @@ -53,17 +53,17 @@ def _encode_to_ts_cell_ttb(self, cell): raise RiakError("can't serialize type '{}', value '{}'" .format(t, cell)) - def _encode_timeseries_keyreq_ttb(self, table, key): + def _encode_timeseries_keyreq(self, table, key): key_vals = None if isinstance(key, list): key_vals = key else: raise ValueError("key must be a list") req = tsgetreq_a, table.name, \ - [self._encode_to_ts_cell_ttb(k) for k in key_vals], udef_a + [self._encode_to_ts_cell(k) for k in key_vals], udef_a return encode(req) - def _encode_timeseries_put_ttb(self, tsobj): + def _encode_timeseries_put(self, tsobj): ''' Returns an Erlang-TTB encoded tuple with the appropriate data and metadata from a TsObject. @@ -80,7 +80,7 @@ def _encode_timeseries_put_ttb(self, tsobj): for row in tsobj.rows: req_r = [] for cell in row: - req_r.append(self._encode_to_ts_cell_ttb(cell)) + req_r.append(self._encode_to_ts_cell(cell)) req_t = (tsrow_a, req_r) req_rows.append(req_t) req = tsputreq_a, tsobj.table.name, udef_a, req_rows @@ -88,7 +88,7 @@ def _encode_timeseries_put_ttb(self, tsobj): else: raise RiakError("TsObject requires a list of rows") - def _decode_timeseries_ttb(self, resp_ttb, tsobj): + def _decode_timeseries(self, resp_ttb, tsobj): """ Fills an TsObject with the appropriate data and metadata from a TTB-encoded TsGetResp / TsQueryResp. @@ -113,13 +113,13 @@ def _decode_timeseries_ttb(self, resp_ttb, tsobj): resp_rows = resp_ttb[2] for row_ttb in resp_rows: tsobj.rows.append( - self._decode_timeseries_row_ttb(row_ttb, None)) + self._decode_timeseries_row(row_ttb, None)) # TODO # elif resp_a == rpberrorresp_a: else: raise RiakError("Unknown TTB response type: {}".format(resp_a)) - def _decode_timeseries_row_ttb(self, tsrow_ttb, tscols=None): + def _decode_timeseries_row(self, tsrow_ttb, tscols=None): """ Decodes a TTB-encoded TsRow into a list diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index dfaf3e55..6ba885a1 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -5,11 +5,12 @@ import riak.pb.messages import erlastic -from riak.security import SecurityError, USE_STDLIB_SSL from riak import RiakError -from riak.util import bytes_to_str, str_to_bytes +from riak.security import SecurityError, USE_STDLIB_SSL +from riak.util import str_to_bytes from six import PY2 + if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection from riak.transports.security import configure_pyopenssl_context @@ -22,47 +23,37 @@ class TcpConnection(object): """ Connection-related methods for TcpTransport. """ - def __init__(self): self._ttb_enabled = False - def _encode_msg(self, msg_code, msg=None, is_ttb=False): - if msg is None: + def _encode_msg(self, msg_code, data=None): + if data is None: return struct.pack("!iB", 1, msg_code) - - if is_ttb: - data = msg - else: - data = msg.SerializeToString() - - datalen = len(data) - hdr = struct.pack("!iB", 1 + datalen, msg_code) + hdr = struct.pack("!iB", 1 + len(data), msg_code) return hdr + data - def _request(self, msg_code, msg=None, expect=None, is_ttb=False): - self._send_msg(msg_code, msg, is_ttb) - return self._recv_msg(expect, is_ttb) + def _send_recv(self, msg_code, data=None, expect=None): + self._send_msg(msg_code, data) + return self._recv_msg(expect) - def _non_connect_request(self, msg_code, msg=None, expect=None): + def _non_connect_send_recv(self, msg_code, data=None): """ - Similar to self._request, but doesn't try to initiate a connection, + Similar to self._send_recv, but doesn't try to initiate a connection, thus preventing an infinite loop. """ - self._non_connect_send_msg(msg_code, msg) + self._non_connect_send_msg(msg_code, data) return self._recv_msg(expect) - def _non_connect_send_msg(self, msg_code, msg, is_ttb=False): + def _non_connect_send_msg(self, msg_code, data): """ Similar to self._send, but doesn't try to initiate a connection, thus preventing an infinite loop. """ - self._socket.sendall(self._encode_msg(msg_code, msg, is_ttb)) + self._socket.sendall(self._encode_msg(msg_code, data)) - def _send_msg(self, msg_code, msg, is_ttb=False): + def _send_msg(self, msg_code, data): self._connect() - if is_ttb and not self._enable_ttb(): - raise RiakError('could not switch to TTB encoding!') - self._non_connect_send_msg(msg_code, msg, is_ttb) + self._non_connect_send_msg(msg_code, data) def _init_security(self): """ @@ -80,7 +71,7 @@ def _starttls(self): Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response, False otherwise """ - msg_code, _ = self._non_connect_request( + msg_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_START_TLS) if msg_code == riak.pb.messages.MSG_CODE_START_TLS: return True @@ -94,7 +85,7 @@ def _enable_ttb(self): logging.debug("tcp/connection enabling TTB") req = riak.pb.riak_pb2.RpbToggleEncodingReq() req.use_native = True - msg_code, _ = self._non_connect_request( + msg_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ, req, riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP) @@ -118,7 +109,7 @@ def _auth(self): if not password: password = '' req.password = str_to_bytes(password) - msg_code, _ = self._non_connect_request( + msg_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_AUTH_REQ, req, riak.pb.messages.MSG_CODE_AUTH_RESP) @@ -176,7 +167,6 @@ def _ssl_handshake(self): # ssl handshake successful ssl_socket.do_handshake() self._socket = ssl_socket - return True except ssl.SSLError as e: raise SecurityError(e) @@ -184,38 +174,21 @@ def _ssl_handshake(self): # fail if *any* exceptions are thrown during SSL handshake raise SecurityError(e) - def _recv_msg(self, expect=None, is_ttb=False): + def _recv_msg(self, expect=None): msgbuf = self._recv_pkt() mv = memoryview(msgbuf) msg_code, = struct.unpack("B", mv[0:1]) - if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: - err = self._parse_msg(msg_code, mv[1:].tobytes(), is_ttb) - if err is None: - raise RiakError('no error provided!') - else: - raise RiakError(bytes_to_str(err.errmsg)) - elif msg_code in riak.pb.messages.MESSAGE_CLASSES: - msg = self._parse_msg(msg_code, mv[1:].tobytes(), is_ttb) - else: - raise Exception("unknown msg code %s" % msg_code) - - if expect and msg_code != expect: - raise RiakError("unexpected protocol buffer message code: %d, %r" - % (msg_code, msg)) - # logging.debug("tcp/connection received msg_code %d msg %s", - # msg_code, msg) - return msg_code, msg + data = mv[1:].tobytes() + return (msg_code, data) def _recv_pkt(self): # TODO FUTURE re-use buffer - msglen_buf = bytearray(4) - recv_len = self._socket.recv_into(msglen_buf) - if recv_len != 4: - raise RiakError( - "Socket returned short packet length %d - expected 4" - % recv_len) + msglen_buf = self._recv(4) # NB: msg length is an unsigned int msglen, = struct.unpack('!I', msglen_buf) + return self._recv(msglen) + + def _recv(self, msglen): # TODO FUTURE re-use buffer # http://stackoverflow.com/a/15964489 msgbuf = bytearray(msglen) @@ -250,28 +223,6 @@ def close(self): self._socket.close() del self._socket - def _parse_msg(self, code, packet, is_ttb=False): - if is_ttb: - if code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ - code != riak.pb.messages.MSG_CODE_TS_PUT_RESP: - raise RiakError("TTB can't parse code: %d" % code) - if len(packet) > 0: - return erlastic.decode(packet) - else: - return None - else: - try: - pbclass = riak.pb.messages.MESSAGE_CLASSES[code] - except KeyError: - pbclass = None - - if pbclass is None: - return None - - pbo = pbclass() - pbo.ParseFromString(packet) - return pbo - # These are set in the TcpTransport initializer _address = None _timeout = None diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 986059d8..2288c607 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -1,4 +1,5 @@ import json + import riak.pb.messages from riak.util import decode_index_value, bytes_to_str @@ -28,7 +29,10 @@ def next(self): raise StopIteration try: - msg_code, resp = self.transport._recv_msg(expect=self._expect) + expected_code = self._expect + msg_code, data = self.transport._recv_msg(expect=expected_code) + self.transport._maybe_riak_error(msg_code, data) + resp = self.transport._parse_msg(expected_code, data, is_ttb=False) except: self.finished = True raise diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index f07ed0b2..e73e82ac 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -1,15 +1,16 @@ +# TODO RTS-842 codecs should return msg codes too +import six import riak.pb.messages -import riak.pb.riak_pb2 -import riak.pb.riak_kv_pb2 -import riak.pb.riak_ts_pb2 from riak import RiakError from riak.codecs.pbuf import PbufCodec from riak.codecs.ttb import TtbCodec from riak.transports.transport import Transport -from riak.riak_object import VClock from riak.ts_object import TsObject -from riak.util import decode_index_value, str_to_bytes, bytes_to_str + +# TODO RTS-842 ideally these would not be needed +from riak.util import decode_index_value, bytes_to_str + from riak.transports.tcp.connection import TcpConnection from riak.transports.tcp.stream import (PbufKeyStream, PbufMapredStream, @@ -17,16 +18,12 @@ PbufIndexStream, PbufTsKeyStream) -from six import PY2, PY3 - -class TcpTransport(Transport, TcpConnection, - PbufCodec, TtbCodec): +class TcpTransport(Transport, TcpConnection): """ The TcpTransport object holds a connection to the TCP socket on the Riak server. """ - def __init__(self, node=None, client=None, @@ -39,18 +36,41 @@ def __init__(self, self._address = (node.host, node.pb_port) self._timeout = timeout self._socket = None - self._use_ttb = transport_options.get('use_ttb', False) + self._pbuf_c = None + self._ttb_c = None + self._use_ttb = transport_options.get('use_ttb', True) + + def _get_pbuf_codec(self): + if not self._pbuf_c: + self._pbuf_c = PbufCodec( + self.client_timeouts(), self.quorum_controls(), + self.tombstone_vclocks(), self.bucket_types()) + return self._pbuf_c + + def _get_codec(self, ttb_supported=False): + if ttb_supported: + if self._use_ttb: + if not self._enable_ttb(): + raise RiakError('could not switch to TTB encoding!') + if not self._ttb_c: + self._ttb_c = TtbCodec() + codec = self._ttb_c + else: + codec = self._get_pbuf_codec() + else: + codec = self._get_pbuf_codec() + return codec # FeatureDetection API def _server_version(self): - return bytes_to_str(self.get_server_info()['server_version']) + server_info = self.get_server_info() + return server_info['server_version'] def ping(self): """ Ping the remote server """ - - msg_code, msg = self._request(riak.pb.messages.MSG_CODE_PING_REQ) + msg_code, _ = self._request(riak.pb.messages.MSG_CODE_PING_REQ) if msg_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: @@ -60,26 +80,27 @@ def get_server_info(self): """ Get information about the server """ + # NB: can't do it this way due to recursion + # codec = self._get_codec(ttb_supported=False) + codec = PbufCodec() msg_code, resp = self._request( riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, expect=riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) - return {'node': bytes_to_str(resp.node), - 'server_version': bytes_to_str(resp.server_version)} + return codec._decode_get_server_info(resp) def _get_client_id(self): + codec = self._get_codec(ttb_supported=False) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ, expect=riak.pb.messages.MSG_CODE_GET_CLIENT_ID_RESP) - return bytes_to_str(resp.client_id) + return codec._decode_get_client_id(resp) def _set_client_id(self, client_id): - req = riak.pb.riak_kv_pb2.RpbSetClientIdReq() - req.client_id = str_to_bytes(client_id) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_set_client_id(client_id) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ, req, + riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ, data, riak.pb.messages.MSG_CODE_SET_CLIENT_ID_RESP) - self._client_id = client_id client_id = property(_get_client_id, _set_client_id, @@ -90,165 +111,74 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, """ Serialize get request and deserialize response """ - bucket = robj.bucket - - req = riak.pb.riak_kv_pb2.RpbGetReq() - if r: - req.r = self._encode_quorum(r) - if self.quorum_controls(): - if pr: - req.pr = self._encode_quorum(pr) - if basic_quorum is not None: - req.basic_quorum = basic_quorum - if notfound_ok is not None: - req.notfound_ok = notfound_ok - if self.client_timeouts() and timeout: - req.timeout = timeout - if self.tombstone_vclocks(): - req.deletedvclock = True - - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - - req.key = str_to_bytes(robj.key) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get(robj, r, pr, + timeout, basic_quorum, notfound_ok) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_REQ, req, + riak.pb.messages.MSG_CODE_GET_REQ, data, riak.pb.messages.MSG_CODE_GET_RESP) - - if resp is not None: - if resp.HasField('vclock'): - robj.vclock = VClock(resp.vclock, 'binary') - # We should do this even if there are no contents, i.e. - # the object is tombstoned - self._decode_contents(resp.content, robj) - else: - # "not found" returns an empty message, - # so let's make sure to clear the siblings - robj.siblings = [] - - return robj + return codec._decode_get(robj, resp) def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): - bucket = robj.bucket - - req = riak.pb.riak_kv_pb2.RpbPutReq() - if w: - req.w = self._encode_quorum(w) - if dw: - req.dw = self._encode_quorum(dw) - if self.quorum_controls() and pw: - req.pw = self._encode_quorum(pw) - - if return_body: - req.return_body = 1 - if if_none_match: - req.if_none_match = 1 - if self.client_timeouts() and timeout: - req.timeout = timeout - - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - - if robj.key: - req.key = str_to_bytes(robj.key) - if robj.vclock: - req.vclock = robj.vclock.encode('binary') - - self._encode_content(robj, req.content) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_put(robj, w, dw, pw, return_body, + if_none_match, timeout) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_PUT_REQ, req, + riak.pb.messages.MSG_CODE_PUT_REQ, data, riak.pb.messages.MSG_CODE_PUT_RESP) - - if resp is not None: - if resp.HasField('key'): - robj.key = bytes_to_str(resp.key) - if resp.HasField("vclock"): - robj.vclock = VClock(resp.vclock, 'binary') - if resp.content: - self._decode_contents(resp.content, robj) - elif not robj.key: - raise RiakError("missing response object") - - return robj + return codec._decode_put(robj, resp) def ts_describe(self, table): query = 'DESCRIBE {table}'.format(table=table.name) return self.ts_query(table, query) def ts_get(self, table, key): - ts_get_resp = None - if self._use_ttb: - req = self._encode_timeseries_keyreq_ttb(table, key) - else: - req = riak.pb.riak_ts_pb2.TsGetReq() - self._encode_timeseries_keyreq(table, key, req) - + codec = self._get_codec(ttb_supported=True) + data = codec._encode_timeseries_keyreq(table, key) msg_code, ts_get_resp = self._request( - riak.pb.messages.MSG_CODE_TS_GET_REQ, req, - riak.pb.messages.MSG_CODE_TS_GET_RESP, - self._use_ttb) - + riak.pb.messages.MSG_CODE_TS_GET_REQ, data, + riak.pb.messages.MSG_CODE_TS_GET_RESP) tsobj = TsObject(self._client, table, [], None) - if self._use_ttb: - self._decode_timeseries_ttb(ts_get_resp, tsobj) - else: - self._decode_timeseries(ts_get_resp, tsobj) + codec._decode_timeseries(ts_get_resp, tsobj) return tsobj def ts_put(self, tsobj): - if self._use_ttb: - req = self._encode_timeseries_put_ttb(tsobj) - else: - req = riak.pb.riak_ts_pb2.TsPutReq() - self._encode_timeseries_put(tsobj, req) - + codec = self._get_codec(ttb_supported=True) + # TODO RTS-842 codecs should return msg codes too + data = codec._encode_timeseries_put(tsobj) # logging.debug("pbc/transport ts_put _use_ttb: '%s'", # self._use_ttb) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_TS_PUT_REQ, req, + riak.pb.messages.MSG_CODE_TS_PUT_REQ, data, riak.pb.messages.MSG_CODE_TS_PUT_RESP, self._use_ttb) - if self._use_ttb and \ resp is None and \ msg_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: return True - if resp is not None: return True else: raise RiakError("missing response object") def ts_delete(self, table, key): - req = riak.pb.riak_ts_pb2.TsDelReq() - self._encode_timeseries_keyreq(table, key, req) - + codec = self._get_codec(ttb_supported=True) + data = codec._encode_timeseries_keyreq(table, key, is_delete=True) msg_code, ts_del_resp = self._request( - riak.pb.messages.MSG_CODE_TS_DEL_REQ, req, + riak.pb.messages.MSG_CODE_TS_DEL_REQ, data, riak.pb.messages.MSG_CODE_TS_DEL_RESP) - if ts_del_resp is not None: return True else: raise RiakError("missing response object") def ts_query(self, table, query, interpolations=None): - req = riak.pb.riak_ts_pb2.TsQueryReq() - - q = query - if '{table}' in q: - q = q.format(table=table.name) - - req.query.base = str_to_bytes(q) - + codec = self._get_codec(ttb_supported=True) + data = codec._encode_timeseries_query(table, query, interpolations) msg_code, ts_query_resp = self._request( - riak.pb.messages.MSG_CODE_TS_QUERY_REQ, req, + riak.pb.messages.MSG_CODE_TS_QUERY_REQ, data, riak.pb.messages.MSG_CODE_TS_QUERY_RESP) - tsobj = TsObject(self._client, table, [], []) self._decode_timeseries(ts_query_resp, tsobj) return tsobj @@ -258,49 +188,17 @@ def ts_stream_keys(self, table, timeout=None): Streams keys from a timeseries table, returning an iterator that yields lists of keys. """ - req = riak.pb.riak_ts_pb2.TsListKeysReq() - t = None - if self.client_timeouts() and timeout: - t = timeout - self._encode_timeseries_listkeysreq(table, req, t) - - self._send_msg(riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ, req) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_timeseries_listkeysreq(table, t) + self._send_msg(riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ, data) return PbufTsKeyStream(self) def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): - req = riak.pb.riak_kv_pb2.RpbDelReq() - if rw: - req.rw = self._encode_quorum(rw) - if r: - req.r = self._encode_quorum(r) - if w: - req.w = self._encode_quorum(w) - if dw: - req.dw = self._encode_quorum(dw) - - if self.quorum_controls(): - if pr: - req.pr = self._encode_quorum(pr) - if pw: - req.pw = self._encode_quorum(pw) - - if self.client_timeouts() and timeout: - req.timeout = timeout - - use_vclocks = (self.tombstone_vclocks() and - hasattr(robj, 'vclock') and robj.vclock) - if use_vclocks: - req.vclock = robj.vclock.encode('binary') - - bucket = robj.bucket - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - req.key = str_to_bytes(robj.key) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_delete(robj, rw, r, w, dw, pr, pw, timeout) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DEL_REQ, req, + riak.pb.messages.MSG_CODE_DEL_REQ, data, riak.pb.messages.MSG_CODE_DEL_RESP) return self @@ -308,40 +206,28 @@ def get_keys(self, bucket, timeout=None): """ Lists all keys within a bucket. """ - keys = [] - for keylist in self.stream_keys(bucket, timeout=timeout): - for key in keylist: - keys.append(bytes_to_str(key)) - - return keys + codec = self._get_codec(ttb_supported=False) + stream = self.stream_keys(bucket, timeout=timeout) + return codec._decode_get_keys(stream) def stream_keys(self, bucket, timeout=None): """ Streams keys from a bucket, returning an iterator that yields lists of keys. """ - req = riak.pb.riak_kv_pb2.RpbListKeysReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - if self.client_timeouts() and timeout: - req.timeout = timeout - - self._send_msg(riak.pb.messages.MSG_CODE_LIST_KEYS_REQ, req) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_stream_keys(bucket, timeout) + self._send_msg(riak.pb.messages.MSG_CODE_LIST_KEYS_REQ, data) return PbufKeyStream(self) def get_buckets(self, bucket_type=None, timeout=None): """ Serialize bucket listing request and deserialize response """ - req = riak.pb.riak_kv_pb2.RpbListBucketsReq() - self._add_bucket_type(req, bucket_type) - - if self.client_timeouts() and timeout: - req.timeout = timeout - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get_buckets(bucket_type, timeout) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req, + riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, data, riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP) return resp.buckets @@ -349,55 +235,38 @@ def stream_buckets(self, bucket_type=None, timeout=None): """ Stream list of buckets through an iterator """ - if not self.bucket_stream(): raise NotImplementedError('Streaming list-buckets is not ' 'supported') - - req = riak.pb.riak_kv_pb2.RpbListBucketsReq() - req.stream = True - self._add_bucket_type(req, bucket_type) - # Bucket streaming landed in the same release as timeouts, so - # we don't need to check the capability. - if timeout: - req.timeout = timeout - - self._send_msg(riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_stream_buckets(bucket_type, timeout) + self._send_msg(riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, data) return PbufBucketStream(self) def get_bucket_props(self, bucket): """ Serialize bucket property request and deserialize response """ - req = riak.pb.riak_pb2.RpbGetBucketReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get_bucket_props(bucket) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_REQ, req, + riak.pb.messages.MSG_CODE_GET_BUCKET_REQ, data, riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) - - return self._decode_bucket_props(resp.props) + return codec._decode_bucket_props(resp.props) def set_bucket_props(self, bucket, props): """ Serialize set bucket property request and deserialize response """ - req = riak.pb.riak_pb2.RpbSetBucketReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - if not self.pb_all_bucket_props(): for key in props: if key not in ('n_val', 'allow_mult'): raise NotImplementedError('Server only supports n_val and ' 'allow_mult properties over PBC') - - self._encode_bucket_props(props, req) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_set_bucket_props(bucket, props) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_BUCKET_REQ, req, + riak.pb.messages.MSG_CODE_SET_BUCKET_REQ, data, riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) return True @@ -407,12 +276,10 @@ def clear_bucket_props(self, bucket): """ if not self.pb_clear_bucket_props(): return False - - req = riak.pb.riak_pb2.RpbResetBucketReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) + codec = self._get_codec(ttb_supported=False) + data = codec._encode_clear_bucket_props(bucket) self._request( - riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ, req, + riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ, data, riak.pb.messages.MSG_CODE_RESET_BUCKET_RESP) return True @@ -421,31 +288,23 @@ def get_bucket_type_props(self, bucket_type): Fetch bucket-type properties """ self._check_bucket_types(bucket_type) - - req = riak.pb.riak_pb2.RpbGetBucketTypeReq() - req.type = str_to_bytes(bucket_type.name) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get_bucket_type_props(bucket_type) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ, req, + riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ, data, riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) - - return self._decode_bucket_props(resp.props) + return codec._decode_bucket_props(resp.props) def set_bucket_type_props(self, bucket_type, props): """ Set bucket-type properties """ self._check_bucket_types(bucket_type) - - req = riak.pb.riak_pb2.RpbSetBucketTypeReq() - req.type = str_to_bytes(bucket_type.name) - - self._encode_bucket_props(props, req) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_set_bucket_type_props(bucket_type, props) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ, req, + riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ, data, riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) - return True def mapred(self, inputs, query, timeout=None): @@ -457,7 +316,6 @@ def mapred(self, inputs, query, timeout=None): result[phase] += content else: result[phase] = content - # If a single result - return the same as the HTTP interface does # otherwise return all the phase information if not len(result): @@ -470,18 +328,15 @@ def mapred(self, inputs, query, timeout=None): def stream_mapred(self, inputs, query, timeout=None): # Construct the job, optionally set the timeout... content = self._construct_mapred_json(inputs, query, timeout) - - req = riak.pb.riak_kv_pb2.RpbMapRedReq() - req.request = str_to_bytes(content) - req.content_type = str_to_bytes("application/json") - - self._send_msg(riak.pb.messages.MSG_CODE_MAP_RED_REQ, req) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_stream_mapred(content) + self._send_msg(riak.pb.messages.MSG_CODE_MAP_RED_REQ, data) return PbufMapredStream(self) def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): + # TODO RTS-842 NUKE THIS if not self.pb_indexes(): return self._get_index_mapred_emu(bucket, index, startkey, endkey) @@ -489,12 +344,13 @@ def get_index(self, bucket, index, startkey, endkey=None, raise NotImplementedError("Secondary index term_regex is not " "supported") - req = self._encode_index_req(bucket, index, startkey, endkey, + codec = self._get_codec(ttb_supported=False) + data = codec._encode_index_req(bucket, index, startkey, endkey, return_terms, max_results, continuation, - timeout, term_regex) + timeout, term_regex, streaming=False) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_INDEX_REQ, req, + riak.pb.messages.MSG_CODE_INDEX_REQ, data, riak.pb.messages.MSG_CODE_INDEX_RESP) if return_terms and resp.results: @@ -503,7 +359,7 @@ def get_index(self, bucket, index, startkey, endkey=None, for pair in resp.results] else: results = resp.keys[:] - if PY3: + if six.PY3: results = [bytes_to_str(key) for key in resp.keys] if max_results is not None and resp.HasField('continuation'): @@ -517,18 +373,14 @@ def stream_index(self, bucket, index, startkey, endkey=None, if not self.stream_indexes(): raise NotImplementedError("Secondary index streaming is not " "supported") - if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported") - - req = self._encode_index_req(bucket, index, startkey, endkey, + codec = self._get_codec(ttb_supported=False) + data = codec._encode_index_req(bucket, index, startkey, endkey, return_terms, max_results, continuation, - timeout, term_regex) - req.stream = True - - self._send_msg(riak.pb.messages.MSG_CODE_INDEX_REQ, req) - + timeout, term_regex, streaming=True) + self._send_msg(riak.pb.messages.MSG_CODE_INDEX_REQ, data) return PbufIndexStream(self, index, return_terms) def create_search_index(self, index, schema=None, n_val=None, @@ -536,34 +388,24 @@ def create_search_index(self, index, schema=None, n_val=None, if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - index = str_to_bytes(index) - idx = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) - if schema: - idx.schema = str_to_bytes(schema) - if n_val: - idx.n_val = n_val - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) - if timeout is not None: - req.timeout = timeout - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_create_search_index(index, schema, n_val, timeout) self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, data, riak.pb.messages.MSG_CODE_PUT_RESP) - return True def get_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( - name=str_to_bytes(index)) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get_search_index(index) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, data, riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) if len(resp.index) > 0: - return self._decode_search_index(resp.index[0]) + return codec._decode_search_index(resp.index[0]) else: raise RiakError('notfound') @@ -571,106 +413,71 @@ def list_search_indexes(self): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_list_search_indexes() msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, data, riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) - - return [self._decode_search_index(index) for index in resp.index] + return [codec._decode_search_index(index) for index in resp.index] def delete_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( - name=str_to_bytes(index)) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_delete_search_index(index) self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, data, riak.pb.messages.MSG_CODE_DEL_RESP) - return True def create_search_schema(self, schema, content): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( - name=str_to_bytes(schema), - content=str_to_bytes(content)) - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( - schema=scma) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_create_search_schema(schema, content) self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, data, riak.pb.messages.MSG_CODE_PUT_RESP) - return True def get_search_schema(self, schema): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( - name=str_to_bytes(schema)) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get_search_schema(schema) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req, + riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, data, riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) - - result = {} - result['name'] = bytes_to_str(resp.schema.name) - result['content'] = bytes_to_str(resp.schema.content) - return result + return codec._decode_get_search_schema(resp) def search(self, index, query, **params): + # TODO RTS-842 NUKE THIS if not self.pb_search(): return self._search_mapred_emu(index, query) - - if PY2 and isinstance(query, unicode): # noqa + # TODO RTS-842 six.u() instead? + if six.PY2 and isinstance(query, unicode): # noqa query = query.encode('utf8') - - req = riak.pb.riak_search_pb2.RpbSearchQueryReq( - index=str_to_bytes(index), - q=str_to_bytes(query)) - self._encode_search_query(req, params) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_search(index, query, params) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ, req, + riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ, data, riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP) - - result = {} - if resp.HasField('max_score'): - result['max_score'] = resp.max_score - if resp.HasField('num_found'): - result['num_found'] = resp.num_found - result['docs'] = [self._decode_search_doc(doc) for doc in resp.docs] - return result + return codec._decode_search(resp) def get_counter(self, bucket, key, **params): if not bucket.bucket_type.is_default(): raise NotImplementedError("Counters are not " "supported with bucket-types, " "use datatypes instead.") - if not self.counters(): raise NotImplementedError("Counters are not supported") - - req = riak.pb.riak_kv_pb2.RpbCounterGetReq() - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - if params.get('r') is not None: - req.r = self._encode_quorum(params['r']) - if params.get('pr') is not None: - req.pr = self._encode_quorum(params['pr']) - if params.get('basic_quorum') is not None: - req.basic_quorum = params['basic_quorum'] - if params.get('notfound_ok') is not None: - req.notfound_ok = params['notfound_ok'] - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get_counter(bucket, key, params) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_COUNTER_GET_REQ, req, + riak.pb.messages.MSG_CODE_COUNTER_GET_REQ, data, riak.pb.messages.MSG_CODE_COUNTER_GET_RESP) if resp.HasField('value'): return resp.value @@ -682,25 +489,12 @@ def update_counter(self, bucket, key, value, **params): raise NotImplementedError("Counters are not " "supported with bucket-types, " "use datatypes instead.") - if not self.counters(): raise NotImplementedError("Counters are not supported") - - req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - req.amount = value - if params.get('w') is not None: - req.w = self._encode_quorum(params['w']) - if params.get('dw') is not None: - req.dw = self._encode_quorum(params['dw']) - if params.get('pw') is not None: - req.pw = self._encode_quorum(params['pw']) - if params.get('returnvalue') is not None: - req.returnvalue = params['returnvalue'] - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_update_counter(bucket, key, value, params) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, req, + riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, data, riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP) if resp.HasField('value'): return resp.value @@ -708,65 +502,35 @@ def update_counter(self, bucket, key, value, **params): return True def fetch_datatype(self, bucket, key, **options): - if bucket.bucket_type.is_default(): raise NotImplementedError("Datatypes cannot be used in the default" " bucket-type.") - if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") - - req = riak.pb.riak_dt_pb2.DtFetchReq() - req.type = str_to_bytes(bucket.bucket_type.name) - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - self._encode_dt_options(req, options) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_fetch_datatype(bucket, key, options) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DT_FETCH_REQ, req, + riak.pb.messages.MSG_CODE_DT_FETCH_REQ, data, riak.pb.messages.MSG_CODE_DT_FETCH_RESP) - - return self._decode_dt_fetch(resp) + return codec._decode_dt_fetch(resp) def update_datatype(self, datatype, **options): - if datatype.bucket.bucket_type.is_default(): raise NotImplementedError("Datatypes cannot be used in the default" " bucket-type.") - if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") - - op = datatype.to_op() - type_name = datatype.type_name - if not op: - raise ValueError("No operation to send on datatype {!r}". - format(datatype)) - - req = riak.pb.riak_dt_pb2.DtUpdateReq() - req.bucket = str_to_bytes(datatype.bucket.name) - req.type = str_to_bytes(datatype.bucket.bucket_type.name) - - if datatype.key: - req.key = str_to_bytes(datatype.key) - if datatype._context: - req.context = datatype._context - - self._encode_dt_options(req, options) - - self._encode_dt_op(type_name, req, op) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_update_datatype(datatype, options) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DT_UPDATE_REQ, req, + riak.pb.messages.MSG_CODE_DT_UPDATE_REQ, data, riak.pb.messages.MSG_CODE_DT_UPDATE_RESP) if resp.HasField('key'): datatype.key = resp.key[:] if resp.HasField('context'): datatype._context = resp.context[:] - if options.get('return_body'): datatype._set_value(self._decode_dt_value(type_name, resp)) - return True def get_preflist(self, bucket, key): @@ -779,13 +543,58 @@ def get_preflist(self, bucket, key): :type key: string :rtype: list of dicts """ - req = riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - req.type = str_to_bytes(bucket.bucket_type.name) - + codec = self._get_codec(ttb_supported=False) + data = codec._encode_get_preflist(bucket, key) msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, req, + riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, data, riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) - - return [self._decode_preflist(item) for item in resp.preflist] + return [codec._decode_preflist(item) for item in resp.preflist] + + # TODO RTS-842 is_ttb + def _parse_msg(self, code, packet, is_ttb=False): + if is_ttb: + if code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ + code != riak.pb.messages.MSG_CODE_TS_PUT_RESP: + raise RiakError("TTB can't parse code: %d" % code) + if len(packet) > 0: + return erlastic.decode(packet) + else: + return None + else: + try: + pbclass = riak.pb.messages.MESSAGE_CLASSES[code] + except KeyError: + pbclass = None + + if pbclass is None: + return None + + pbo = pbclass() + pbo.ParseFromString(packet) + return pbo + + def _maybe_riak_error(self, msg_code, data=None, is_ttb=False): + if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: + if data is None: + raise RiakError('no error provided!') + err = self._parse_msg(msg_code, data, is_ttb) + if err is None: + raise RiakError('no error provided!') + else: + raise RiakError(bytes_to_str(err.errmsg)) + + # TODO RTS-842 is_ttb + def _request(self, msg_code, data=None, expect=None, is_ttb=False): + msg_code, data = self._send_recv(msg_code, data, expect) + self._maybe_riak_error(msg_code, data, is_ttb) + if msg_code in riak.pb.messages.MESSAGE_CLASSES: + msg = self._parse_msg(msg_code, data, is_ttb) + else: + raise Exception("unknown msg code %s" % msg_code) + + if expect and msg_code != expect: + raise RiakError("unexpected protocol buffer message code: %d, %r" + % (msg_code, msg)) + # logging.debug("tcp/connection received msg_code %d msg %s", + # msg_code, msg) + return msg_code, msg diff --git a/riak/transports/transport.py b/riak/transports/transport.py index e30f2d5e..6e5fee2c 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -295,6 +295,7 @@ def get_preflist(self, bucket, key): """ raise NotImplementedError + # TODO RTS-842 NUKE THIS def _search_mapred_emu(self, index, query): """ Emulates a search request via MapReduce. Used in the case @@ -320,6 +321,7 @@ def _search_mapred_emu(self, index, query): result['docs'].append({u'id': key}) return result + # TODO RTS-842 NUKE THIS def _get_index_mapred_emu(self, bucket, index, startkey, endkey=None): """ Emulates a secondary index request via MapReduce. Used in the @@ -360,6 +362,5 @@ def _construct_mapred_json(self, inputs, query, timeout=None): def _check_bucket_types(self, bucket_type): if not self.bucket_types(): raise NotImplementedError('Server does not support bucket-types') - if bucket_type.is_default(): raise ValueError('Cannot manipulate the default bucket-type') From 22ec2a46156ba5218e6a1f4c21df4f0312278c15 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 15:15:38 -0700 Subject: [PATCH 145/324] Standardize on kwargs for double-star parameter name. --- riak/codecs/pbuf.py | 117 ++++++++++++++++-------------- riak/tests/test_timeseries.py | 29 +++++--- riak/tests/test_timeseries_ttb.py | 11 +-- riak/transports/tcp/transport.py | 31 ++++---- 4 files changed, 101 insertions(+), 87 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 4c746f08..eb59317e 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -493,28 +493,28 @@ def _add_bucket_type(self, req, bucket_type): 'Server does not support bucket-types') req.type = str_to_bytes(bucket_type.name) - def _encode_search_query(self, req, params): - if 'rows' in params: - req.rows = params['rows'] - if 'start' in params: - req.start = params['start'] - if 'sort' in params: - req.sort = str_to_bytes(params['sort']) - if 'filter' in params: - req.filter = str_to_bytes(params['filter']) - if 'df' in params: - req.df = str_to_bytes(params['df']) - if 'op' in params: - req.op = str_to_bytes(params['op']) - if 'q.op' in params: - req.op = params['q.op'] - if 'fl' in params: - if isinstance(params['fl'], list): - req.fl.extend(params['fl']) + def _encode_search_query(self, req, **kwargs): + if 'rows' in kwargs: + req.rows = kwargs['rows'] + if 'start' in kwargs: + req.start = kwargs['start'] + if 'sort' in kwargs: + req.sort = str_to_bytes(kwargs['sort']) + if 'filter' in kwargs: + req.filter = str_to_bytes(kwargs['filter']) + if 'df' in kwargs: + req.df = str_to_bytes(kwargs['df']) + if 'op' in kwargs: + req.op = str_to_bytes(kwargs['op']) + if 'q.op' in kwargs: + req.op = kwargs['q.op'] + if 'fl' in kwargs: + if isinstance(kwargs['fl'], list): + req.fl.extend(kwargs['fl']) else: - req.fl.append(params['fl']) - if 'presort' in params: - req.presort = params['presort'] + req.fl.append(kwargs['fl']) + if 'presort' in kwargs: + req.presort = kwargs['presort'] def _decode_search_doc(self, doc): resultdoc = MultiDict() @@ -550,15 +550,15 @@ def _decode_dt_value(self, dtype, msg): elif dtype == 'map': return self._decode_map_value(msg.map_value) - def _encode_dt_options(self, req, params): + def _encode_dt_options(self, req, **kwargs): for q in ['r', 'pr', 'w', 'dw', 'pw']: - if q in params and params[q] is not None: - setattr(req, q, self._encode_quorum(params[q])) + if q in kwargs and kwargs[q] is not None: + setattr(req, q, self._encode_quorum(kwargs[q])) for o in ['basic_quorum', 'notfound_ok', 'timeout', 'return_body', 'include_context']: - if o in params and params[o] is not None: - setattr(req, o, params[o]) + if o in kwargs and kwargs[o] is not None: + setattr(req, o, kwargs[o]) def _decode_map_value(self, entries): out = {} @@ -678,7 +678,7 @@ def _encode_timeseries_listkeysreq(self, table, timeout=None): req.timeout = timeout return req.SerializeToString() - def _encode_timeseries_put(self, tsobj, req): + def _encode_timeseries_put(self, tsobj): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. @@ -975,7 +975,7 @@ def _encode_set_bucket_type_props(self, bucket_type, props): self._encode_bucket_props(props, req) return req.SerializeToString() - def _encode_stream_mapred(content): + def _encode_stream_mapred(self, content): req = riak.pb.riak_kv_pb2.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") @@ -1003,7 +1003,7 @@ def _encode_list_search_indexes(self): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() return req.SerializeToString() - def _encode_delete_search_indexes(self): + def _encode_delete_search_index(self, index): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( name=str_to_bytes(index)) return req.SerializeToString() @@ -1027,14 +1027,14 @@ def _decode_get_search_schema(self, resp): result['content'] = bytes_to_str(resp.schema.content) return result - def _encode_search(self, index, query, **params): + def _encode_search(self, index, query, **kwargs): req = riak.pb.riak_search_pb2.RpbSearchQueryReq( index=str_to_bytes(index), q=str_to_bytes(query)) - self._encode_search_query(req, params) + self._encode_search_query(req, **kwargs) return req.SerializeToString() - def _decode_search(resp): + def _decode_search(self, resp): result = {} if resp.HasField('max_score'): result['max_score'] = resp.max_score @@ -1043,44 +1043,44 @@ def _decode_search(resp): result['docs'] = [self._decode_search_doc(doc) for doc in resp.docs] return result - def _encode_get_counter(self, bucket, key, **params): + def _encode_get_counter(self, bucket, key, **kwargs): req = riak.pb.riak_kv_pb2.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) - if params.get('r') is not None: - req.r = self._encode_quorum(params['r']) - if params.get('pr') is not None: - req.pr = self._encode_quorum(params['pr']) - if params.get('basic_quorum') is not None: - req.basic_quorum = params['basic_quorum'] - if params.get('notfound_ok') is not None: - req.notfound_ok = params['notfound_ok'] + if kwargs.get('r') is not None: + req.r = self._encode_quorum(kwargs['r']) + if kwargs.get('pr') is not None: + req.pr = self._encode_quorum(kwargs['pr']) + if kwargs.get('basic_quorum') is not None: + req.basic_quorum = kwargs['basic_quorum'] + if kwargs.get('notfound_ok') is not None: + req.notfound_ok = kwargs['notfound_ok'] return req.SerializeToString() - def _encode_update_counter(self, bucket, key, value, **params): + def _encode_update_counter(self, bucket, key, value, **kwargs): req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.amount = value - if params.get('w') is not None: - req.w = self._encode_quorum(params['w']) - if params.get('dw') is not None: - req.dw = self._encode_quorum(params['dw']) - if params.get('pw') is not None: - req.pw = self._encode_quorum(params['pw']) - if params.get('returnvalue') is not None: - req.returnvalue = params['returnvalue'] + if kwargs.get('w') is not None: + req.w = self._encode_quorum(kwargs['w']) + if kwargs.get('dw') is not None: + req.dw = self._encode_quorum(kwargs['dw']) + if kwargs.get('pw') is not None: + req.pw = self._encode_quorum(kwargs['pw']) + if kwargs.get('returnvalue') is not None: + req.returnvalue = kwargs['returnvalue'] return req.SerializeToString() - def _encode_fetch_datatype(self, bucket, key, **options): + def _encode_fetch_datatype(self, bucket, key, **kwargs): req = riak.pb.riak_dt_pb2.DtFetchReq() req.type = str_to_bytes(bucket.bucket_type.name) req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) - self._encode_dt_options(req, options) + self._encode_dt_options(req, **kwargs) return req.SerializeToString() - def _encode_update_datatype(self, datatype, **options): + def _encode_update_datatype(self, datatype, **kwargs): op = datatype.to_op() type_name = datatype.type_name if not op: @@ -1093,10 +1093,19 @@ def _encode_update_datatype(self, datatype, **options): req.key = str_to_bytes(datatype.key) if datatype._context: req.context = datatype._context - self._encode_dt_options(req, options) + self._encode_dt_options(req, **kwargs) self._encode_dt_op(type_name, req, op) return req.SerializeToString() + def _decode_update_datatype(self, datatype, resp, **kwargs): + type_name = datatype.type_name + if resp.HasField('key'): + datatype.key = resp.key[:] + if resp.HasField('context'): + datatype._context = resp.context[:] + if kwargs.get('return_body'): + datatype._set_value(self._decode_dt_value(type_name, resp)) + def _encode_get_preflist(self, bucket, key): req = riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() req.bucket = str_to_bytes(bucket.name) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 6f906726..30210099 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -35,7 +35,6 @@ class TimeseriesUnitTests(unittest.TestCase): @classmethod def setUpClass(cls): - cls.c = PbufCodec() cls.ts0ms = unix_time_millis(ts0) if cls.ts0ms != ex0ms: raise AssertionError( @@ -67,25 +66,33 @@ def test_encode_decode_timestamp(self): self.assertEqual(ts0, ts0_d) def test_encode_data_for_get(self): + c = PbufCodec() + data = c._encode_timeseries_keyreq( + self.table, self.test_key, is_delete=False) req = riak.pb.riak_ts_pb2.TsGetReq() - self.c._encode_timeseries_keyreq(self.table, self.test_key, req) + req.ParseFromString(data) self.validate_keyreq(req) def test_encode_data_for_delete(self): + c = PbufCodec() + data = c._encode_timeseries_keyreq( + self.table, self.test_key, is_delete=True) req = riak.pb.riak_ts_pb2.TsDelReq() - self.c._encode_timeseries_keyreq(self.table, self.test_key, req) + req.ParseFromString(data) self.validate_keyreq(req) def test_encode_data_for_put(self): + c = PbufCodec() tsobj = TsObject(None, self.table, self.rows, None) - ts_put_req = riak.pb.riak_ts_pb2.TsPutReq() - self.c._encode_timeseries_put(tsobj, ts_put_req) + data = c._encode_timeseries_put(tsobj) + req = riak.pb.riak_ts_pb2.TsPutReq() + req.ParseFromString(data) # NB: expected, actual - self.assertEqual(self.table.name, bytes_to_str(ts_put_req.table)) - self.assertEqual(len(self.rows), len(ts_put_req.rows)) + self.assertEqual(self.table.name, bytes_to_str(req.table)) + self.assertEqual(len(self.rows), len(req.rows)) - r0 = ts_put_req.rows[0] + r0 = req.rows[0] self.assertEqual(bytes_to_str(r0.cells[0].varchar_value), self.rows[0][0]) self.assertEqual(r0.cells[1].sint64_value, self.rows[0][1]) @@ -93,7 +100,7 @@ def test_encode_data_for_put(self): self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) - r1 = ts_put_req.rows[1] + r1 = req.rows[1] self.assertEqual(bytes_to_str(r1.cells[0].varchar_value), self.rows[1][0]) self.assertEqual(r1.cells[1].sint64_value, self.rows[1][1]) @@ -102,8 +109,10 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) def test_encode_data_for_listkeys(self): + c = PbufCodec(client_timeouts=True) + data = c._encode_timeseries_listkeysreq(self.table, 1234) req = riak.pb.riak_ts_pb2.TsListKeysReq() - self.c._encode_timeseries_listkeysreq(self.table, req, 1234) + req.ParseFromString(data) self.assertEqual(self.table.name, bytes_to_str(req.table)) self.assertEqual(1234, req.timeout) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 64ff0888..959b0467 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -40,7 +40,6 @@ @unittest.skipUnless(is_timeseries_supported(), "Timeseries not supported") class TimeseriesTtbUnitTests(unittest.TestCase): def setUp(self): - self.c = TtbCodec() self.table = Table(None, table_name) def test_encode_data_for_get(self): @@ -53,8 +52,8 @@ def test_encode_data_for_get(self): req_test = encode(req) test_key = ['hash1', 'user2', ts0] - req_encoded = self.c._encode_timeseries_keyreq_ttb( - self.table, test_key) + c = TtbCodec() + req_encoded = c._encode_timeseries_keyreq(self.table, test_key) self.assertEqual(req_test, req_encoded) # def test_decode_riak_error(self): @@ -85,7 +84,8 @@ def test_decode_data_from_get(self): rsp_ttb = encode(rsp_data) tsobj = TsObject(None, self.table, [], []) - self.c._decode_timeseries_ttb(decode(rsp_ttb), tsobj) + c = TtbCodec() + c._decode_timeseries(decode(rsp_ttb), tsobj) for i in range(0, 1): self.assertEqual(tsrow_a, rows[i][0]) @@ -145,7 +145,8 @@ def test_encode_data_for_put(self): ] tsobj = TsObject(None, self.table, rows_to_encode, None) - req_encoded = self.c._encode_timeseries_put_ttb(tsobj) + c = TtbCodec() + req_encoded = c._encode_timeseries_put(tsobj) self.assertEqual(req_test, req_encoded) diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index e73e82ac..9679cdd7 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -28,7 +28,7 @@ def __init__(self, node=None, client=None, timeout=None, - **transport_options): + **kwargs): super(TcpTransport, self).__init__() self._client = client @@ -38,7 +38,7 @@ def __init__(self, self._socket = None self._pbuf_c = None self._ttb_c = None - self._use_ttb = transport_options.get('use_ttb', True) + self._use_ttb = kwargs.get('use_ttb', True) def _get_pbuf_codec(self): if not self._pbuf_c: @@ -453,7 +453,7 @@ def get_search_schema(self, schema): riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) return codec._decode_get_search_schema(resp) - def search(self, index, query, **params): + def search(self, index, query, **kwargs): # TODO RTS-842 NUKE THIS if not self.pb_search(): return self._search_mapred_emu(index, query) @@ -461,13 +461,13 @@ def search(self, index, query, **params): if six.PY2 and isinstance(query, unicode): # noqa query = query.encode('utf8') codec = self._get_codec(ttb_supported=False) - data = codec._encode_search(index, query, params) + data = codec._encode_search(index, query, **kwargs) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ, data, riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP) return codec._decode_search(resp) - def get_counter(self, bucket, key, **params): + def get_counter(self, bucket, key, **kwargs): if not bucket.bucket_type.is_default(): raise NotImplementedError("Counters are not " "supported with bucket-types, " @@ -475,7 +475,7 @@ def get_counter(self, bucket, key, **params): if not self.counters(): raise NotImplementedError("Counters are not supported") codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_counter(bucket, key, params) + data = codec._encode_get_counter(bucket, key, **kwargs) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_COUNTER_GET_REQ, data, riak.pb.messages.MSG_CODE_COUNTER_GET_RESP) @@ -484,7 +484,7 @@ def get_counter(self, bucket, key, **params): else: return None - def update_counter(self, bucket, key, value, **params): + def update_counter(self, bucket, key, value, **kwargs): if not bucket.bucket_type.is_default(): raise NotImplementedError("Counters are not " "supported with bucket-types, " @@ -492,7 +492,7 @@ def update_counter(self, bucket, key, value, **params): if not self.counters(): raise NotImplementedError("Counters are not supported") codec = self._get_codec(ttb_supported=False) - data = codec._encode_update_counter(bucket, key, value, params) + data = codec._encode_update_counter(bucket, key, value, **kwargs) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, data, riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP) @@ -501,36 +501,31 @@ def update_counter(self, bucket, key, value, **params): else: return True - def fetch_datatype(self, bucket, key, **options): + def fetch_datatype(self, bucket, key, **kwargs): if bucket.bucket_type.is_default(): raise NotImplementedError("Datatypes cannot be used in the default" " bucket-type.") if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") codec = self._get_codec(ttb_supported=False) - data = codec._encode_fetch_datatype(bucket, key, options) + data = codec._encode_fetch_datatype(bucket, key, **kwargs) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_DT_FETCH_REQ, data, riak.pb.messages.MSG_CODE_DT_FETCH_RESP) return codec._decode_dt_fetch(resp) - def update_datatype(self, datatype, **options): + def update_datatype(self, datatype, **kwargs): if datatype.bucket.bucket_type.is_default(): raise NotImplementedError("Datatypes cannot be used in the default" " bucket-type.") if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") codec = self._get_codec(ttb_supported=False) - data = codec._encode_update_datatype(datatype, options) + data = codec._encode_update_datatype(datatype, **kwargs) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_DT_UPDATE_REQ, data, riak.pb.messages.MSG_CODE_DT_UPDATE_RESP) - if resp.HasField('key'): - datatype.key = resp.key[:] - if resp.HasField('context'): - datatype._context = resp.context[:] - if options.get('return_body'): - datatype._set_value(self._decode_dt_value(type_name, resp)) + codec._decode_update_datatype(datatype, resp, **kwargs) return True def get_preflist(self, bucket, key): From 38bd2862e0014bc0a96a774fc69621557475a1cb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 15:22:55 -0700 Subject: [PATCH 146/324] Make linters happy. --- riak/codecs/pbuf.py | 17 +++++++++-------- riak/transports/tcp/connection.py | 8 +++----- riak/transports/tcp/transport.py | 17 ++++++++++------- 3 files changed, 22 insertions(+), 20 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index eb59317e..44f5889b 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -78,8 +78,8 @@ class PbufCodec(object): """ def __init__(self, - client_timeouts=False, quorum_controls=False, - tombstone_vclocks=False, bucket_types=False): + client_timeouts=False, quorum_controls=False, + tombstone_vclocks=False, bucket_types=False): if riak.pb is None: raise NotImplementedError("this codec is not available") self._client_timeouts = client_timeouts @@ -797,7 +797,7 @@ def _decode_preflist(self, item): return result def _encode_get(self, robj, r=None, pr=None, timeout=None, - basic_quorum=None, notfound_ok=None): + basic_quorum=None, notfound_ok=None): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbGetReq() if r: @@ -819,7 +819,8 @@ def _encode_get(self, robj, r=None, pr=None, timeout=None, return req.SerializeToString() def _encode_put(self, robj, w=None, dw=None, pw=None, - return_body=True, if_none_match=False, timeout=None): + return_body=True, if_none_match=False, + timeout=None): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbPutReq() if w: @@ -869,8 +870,8 @@ def _decode_put(self, robj, resp): return robj def _encode_delete(self, robj, rw=None, r=None, - w=None, dw=None, pr=None, pw=None, - timeout=None): + w=None, dw=None, pr=None, pw=None, + timeout=None): req = riak.pb.riak_kv_pb2.RpbDelReq() if rw: req.rw = self._encode_quorum(rw) @@ -982,7 +983,7 @@ def _encode_stream_mapred(self, content): return req.SerializeToString() def _encode_create_search_index(self, index, schema=None, - n_val=None, timeout=None): + n_val=None, timeout=None): index = str_to_bytes(index) idx = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) if schema: @@ -1007,7 +1008,7 @@ def _encode_delete_search_index(self, index): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( name=str_to_bytes(index)) return req.SerializeToString() - + def _encode_create_search_schema(self, schema, content): scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( name=str_to_bytes(schema), diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 6ba885a1..776ef4a6 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,16 +1,14 @@ import logging import socket import struct + import riak.pb.riak_pb2 import riak.pb.messages -import erlastic from riak import RiakError from riak.security import SecurityError, USE_STDLIB_SSL from riak.util import str_to_bytes -from six import PY2 - if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection from riak.transports.security import configure_pyopenssl_context @@ -36,7 +34,7 @@ def _send_recv(self, msg_code, data=None, expect=None): self._send_msg(msg_code, data) return self._recv_msg(expect) - def _non_connect_send_recv(self, msg_code, data=None): + def _non_connect_send_recv(self, msg_code, data=None, expect=None): """ Similar to self._send_recv, but doesn't try to initiate a connection, thus preventing an infinite loop. @@ -197,7 +195,7 @@ def _recv(self, msglen): toread = msglen while toread: nbytes = self._socket.recv_into(view, toread) - view = view[nbytes:] # slicing views is cheap + view = view[nbytes:] # slicing views is cheap toread -= nbytes nread += nbytes if nread != msglen: diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 9679cdd7..c6072f82 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -1,4 +1,5 @@ # TODO RTS-842 codecs should return msg codes too +import erlastic import six import riak.pb.messages @@ -113,7 +114,7 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, """ codec = self._get_codec(ttb_supported=False) data = codec._encode_get(robj, r, pr, - timeout, basic_quorum, notfound_ok) + timeout, basic_quorum, notfound_ok) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_GET_REQ, data, riak.pb.messages.MSG_CODE_GET_RESP) @@ -123,7 +124,7 @@ def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): codec = self._get_codec(ttb_supported=False) data = codec._encode_put(robj, w, dw, pw, return_body, - if_none_match, timeout) + if_none_match, timeout) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_PUT_REQ, data, riak.pb.messages.MSG_CODE_PUT_RESP) @@ -189,7 +190,7 @@ def ts_stream_keys(self, table, timeout=None): yields lists of keys. """ codec = self._get_codec(ttb_supported=False) - data = codec._encode_timeseries_listkeysreq(table, t) + data = codec._encode_timeseries_listkeysreq(table, timeout) self._send_msg(riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ, data) return PbufTsKeyStream(self) @@ -346,8 +347,9 @@ def get_index(self, bucket, index, startkey, endkey=None, codec = self._get_codec(ttb_supported=False) data = codec._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, continuation, - timeout, term_regex, streaming=False) + return_terms, max_results, + continuation, timeout, term_regex, + streaming=False) msg_code, resp = self._request( riak.pb.messages.MSG_CODE_INDEX_REQ, data, @@ -378,8 +380,9 @@ def stream_index(self, bucket, index, startkey, endkey=None, "supported") codec = self._get_codec(ttb_supported=False) data = codec._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, continuation, - timeout, term_regex, streaming=True) + return_terms, max_results, + continuation, timeout, + term_regex, streaming=True) self._send_msg(riak.pb.messages.MSG_CODE_INDEX_REQ, data) return PbufIndexStream(self, index, return_terms) From 5dff949754424a85e6fbc646b99534ef9a286482 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 15:52:09 -0700 Subject: [PATCH 147/324] Fix message encoding for Auth and ToggleEncoding PB requests --- riak/transports/tcp/connection.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 776ef4a6..0f414bee 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -83,9 +83,10 @@ def _enable_ttb(self): logging.debug("tcp/connection enabling TTB") req = riak.pb.riak_pb2.RpbToggleEncodingReq() req.use_native = True + data = req.SerializeToString() msg_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ, - req, + data, riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP) if msg_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: self._ttb_enabled = True @@ -107,9 +108,10 @@ def _auth(self): if not password: password = '' req.password = str_to_bytes(password) + data = req.SerializeToString() msg_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_AUTH_REQ, - req, + data, riak.pb.messages.MSG_CODE_AUTH_RESP) if msg_code == riak.pb.messages.MSG_CODE_AUTH_RESP: return True From 3ac26e86db859d70d9e97a24332794798ee5f6f2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 16:29:00 -0700 Subject: [PATCH 148/324] Added Msg named tuple to contain data between codecs and transport layer. --- riak/codecs/__init__.py | 4 ++++ riak/codecs/pbuf.py | 11 ++++++++- riak/transports/tcp/connection.py | 40 +++++++++++++++---------------- riak/transports/tcp/stream.py | 10 ++++---- riak/transports/tcp/transport.py | 34 +++++++++++++++----------- 5 files changed, 60 insertions(+), 39 deletions(-) diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index e69de29b..479675d4 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -0,0 +1,4 @@ +import collections + +Msg = collections.namedtuple('Msg', + ['msg_code', 'data', 'resp_code'], verbose=False) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 44f5889b..7e860e28 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -1,11 +1,12 @@ import datetime -import riak.pb +import riak.pb.messages import riak.pb.riak_pb2 import riak.pb.riak_dt_pb2 import riak.pb.riak_kv_pb2 import riak.pb.riak_ts_pb2 from riak import RiakError +from riak.codecs import Msg from riak.content import RiakContent from riak.riak_object import VClock from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ @@ -93,6 +94,14 @@ def _unix_time_millis(self, dt): def _datetime_from_unix_time_millis(self, ut): return datetime_from_unix_time_millis(ut) + def _encode_auth(self, username, password): + req = riak.pb.riak_pb2.RpbAuthReq() + req.user = str_to_bytes(username) + req.password = str_to_bytes(password) + return Msg(riak.pb.messages.MSG_CODE_AUTH_REQ, + req.SerializeToString(), + riak.pb.messages.MSG_CODE_AUTH_RESP) + def _encode_quorum(self, rw): """ Converts a symbolic quorum value into its on-the-wire diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 0f414bee..d888cd6a 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -6,6 +6,7 @@ import riak.pb.messages from riak import RiakError +from riak.codecs.pbuf import PbufCodec from riak.security import SecurityError, USE_STDLIB_SSL from riak.util import str_to_bytes @@ -30,17 +31,21 @@ def _encode_msg(self, msg_code, data=None): hdr = struct.pack("!iB", 1 + len(data), msg_code) return hdr + data - def _send_recv(self, msg_code, data=None, expect=None): + def _send_recv(self, msg_code, data=None): self._send_msg(msg_code, data) - return self._recv_msg(expect) + return self._recv_msg() - def _non_connect_send_recv(self, msg_code, data=None, expect=None): + def _non_connect_send_recv(self, msg_code, data=None): """ Similar to self._send_recv, but doesn't try to initiate a connection, thus preventing an infinite loop. """ self._non_connect_send_msg(msg_code, data) - return self._recv_msg(expect) + return self._recv_msg() + + def _non_connect_send_recv_msg(self, msg): + self._non_connect_send_msg(msg.msg_code, msg.data) + return self._recv_msg() def _non_connect_send_msg(self, msg_code, data): """ @@ -69,9 +74,9 @@ def _starttls(self): Exchange a STARTTLS message with Riak to initiate secure communications return True is Riak responds with a STARTTLS response, False otherwise """ - msg_code, _ = self._non_connect_send_recv( + resp_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_START_TLS) - if msg_code == riak.pb.messages.MSG_CODE_START_TLS: + if resp_code == riak.pb.messages.MSG_CODE_START_TLS: return True else: return False @@ -84,11 +89,10 @@ def _enable_ttb(self): req = riak.pb.riak_pb2.RpbToggleEncodingReq() req.use_native = True data = req.SerializeToString() - msg_code, _ = self._non_connect_send_recv( + resp_code, _ = self._non_connect_send_recv( riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ, - data, - riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP) - if msg_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: + data) + if resp_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: self._ttb_enabled = True logging.debug("tcp/connection TTB IS ENABLED") return True @@ -102,18 +106,14 @@ def _auth(self): Note: Riak will sleep for a short period of time upon a failed auth request/response to prevent denial of service attacks """ - req = riak.pb.riak_pb2.RpbAuthReq() - req.user = str_to_bytes(self._client._credentials.username) + c = PbufCodec() + username = self._client._credentials.username password = self._client._credentials.password if not password: password = '' - req.password = str_to_bytes(password) - data = req.SerializeToString() - msg_code, _ = self._non_connect_send_recv( - riak.pb.messages.MSG_CODE_AUTH_REQ, - data, - riak.pb.messages.MSG_CODE_AUTH_RESP) - if msg_code == riak.pb.messages.MSG_CODE_AUTH_RESP: + msg = c._encode_auth(username, password) + resp_code, _ = self._non_connect_send_recv_msg(msg) + if resp_code == riak.pb.messages.MSG_CODE_AUTH_RESP: return True else: return False @@ -174,7 +174,7 @@ def _ssl_handshake(self): # fail if *any* exceptions are thrown during SSL handshake raise SecurityError(e) - def _recv_msg(self, expect=None): + def _recv_msg(self): msgbuf = self._recv_pkt() mv = memoryview(msgbuf) msg_code, = struct.unpack("B", mv[0:1]) diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 2288c607..8c290e7c 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -29,10 +29,12 @@ def next(self): raise StopIteration try: - expected_code = self._expect - msg_code, data = self.transport._recv_msg(expect=expected_code) - self.transport._maybe_riak_error(msg_code, data) - resp = self.transport._parse_msg(expected_code, data, is_ttb=False) + # TODO RTS-842 - should be part of passed-in codec + resp_code, data = self.transport._recv_msg() + self.transport._maybe_riak_error(resp_code, data) + expect = self._expect + self.transport._maybe_incorrect_code(resp_code, expect) + resp = self.transport._parse_msg(expect, data, is_ttb=False) except: self.finished = True raise diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index c6072f82..e688da41 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -71,8 +71,11 @@ def ping(self): """ Ping the remote server """ - msg_code, _ = self._request(riak.pb.messages.MSG_CODE_PING_REQ) - if msg_code == riak.pb.messages.MSG_CODE_PING_RESP: + resp_code, _ = self._request( + riak.pb.messages.MSG_CODE_PING_REQ, + None, + riak.pb.messages.MSG_CODE_PING_RESP) + if resp_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: return False @@ -575,24 +578,27 @@ def _maybe_riak_error(self, msg_code, data=None, is_ttb=False): if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: if data is None: raise RiakError('no error provided!') + # TODO RTS-842 TTB-specific version err = self._parse_msg(msg_code, data, is_ttb) if err is None: raise RiakError('no error provided!') else: raise RiakError(bytes_to_str(err.errmsg)) + def _maybe_incorrect_code(self, resp_code, expect=None): + if expect and resp_code != expect: + raise RiakError("unexpected message code: %d, expected %d" + % (resp_code, expect)) + # TODO RTS-842 is_ttb def _request(self, msg_code, data=None, expect=None, is_ttb=False): - msg_code, data = self._send_recv(msg_code, data, expect) - self._maybe_riak_error(msg_code, data, is_ttb) - if msg_code in riak.pb.messages.MESSAGE_CLASSES: - msg = self._parse_msg(msg_code, data, is_ttb) + resp_code, data = self._send_recv(msg_code, data) + self._maybe_riak_error(resp_code, data, is_ttb) + self._maybe_incorrect_code(resp_code, expect) + if resp_code in riak.pb.messages.MESSAGE_CLASSES: + msg = self._parse_msg(resp_code, data, is_ttb) else: - raise Exception("unknown msg code %s" % msg_code) - - if expect and msg_code != expect: - raise RiakError("unexpected protocol buffer message code: %d, %r" - % (msg_code, msg)) - # logging.debug("tcp/connection received msg_code %d msg %s", - # msg_code, msg) - return msg_code, msg + raise Exception("unknown msg code %s" % resp_code) + # logging.debug("tcp/connection received resp_code %d msg %s", + # resp_code, msg) + return resp_code, msg From 86511b442ab4064a80a6077adeb17fb65b5d0605 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Mar 2016 16:32:25 -0700 Subject: [PATCH 149/324] make linters happy --- riak/codecs/__init__.py | 3 ++- riak/codecs/pbuf.py | 4 ++-- riak/transports/tcp/connection.py | 1 - 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index 479675d4..8aab82f0 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -1,4 +1,5 @@ import collections Msg = collections.namedtuple('Msg', - ['msg_code', 'data', 'resp_code'], verbose=False) + ['msg_code', 'data', 'resp_code'], + verbose=False) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 7e860e28..1ecb35d1 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -99,8 +99,8 @@ def _encode_auth(self, username, password): req.user = str_to_bytes(username) req.password = str_to_bytes(password) return Msg(riak.pb.messages.MSG_CODE_AUTH_REQ, - req.SerializeToString(), - riak.pb.messages.MSG_CODE_AUTH_RESP) + req.SerializeToString(), + riak.pb.messages.MSG_CODE_AUTH_RESP) def _encode_quorum(self, rw): """ diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index d888cd6a..92802009 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -8,7 +8,6 @@ from riak import RiakError from riak.codecs.pbuf import PbufCodec from riak.security import SecurityError, USE_STDLIB_SSL -from riak.util import str_to_bytes if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection From 47eaee13766dea9fd7b0b5d6b81b39b95093c625 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 31 Mar 2016 09:52:41 -0700 Subject: [PATCH 150/324] Move more code into the codecs. --- buildbot/Makefile | 1 - riak/codecs/pbuf.py | 185 ++++++++++----- riak/tests/test_timeseries.py | 16 +- riak/transports/tcp/transport.py | 388 +++++++++++++++---------------- 4 files changed, 321 insertions(+), 269 deletions(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index 8272fdf1..abf4863f 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -26,7 +26,6 @@ compile: lint: @pip install --upgrade pep8 flake8 - @cd ..; pep8 --exclude=riak/pb riak *.py @cd ..; flake8 --exclude=riak/pb riak *.py test: setup test_normal test_security diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 1ecb35d1..9432d3c9 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -1,4 +1,6 @@ import datetime +import six + import riak.pb.messages import riak.pb.riak_pb2 import riak.pb.riak_dt_pb2 @@ -14,8 +16,6 @@ from riak.multidict import MultiDict from riak.pb.riak_ts_pb2 import TsColumnType -from six import string_types, PY2 - def _invert(d): out = {} @@ -88,19 +88,13 @@ def __init__(self, self._tombstone_vclocks = tombstone_vclocks self._bucket_types = bucket_types - def _unix_time_millis(self, dt): - return unix_time_millis(dt) - - def _datetime_from_unix_time_millis(self, ut): - return datetime_from_unix_time_millis(ut) - def _encode_auth(self, username, password): req = riak.pb.riak_pb2.RpbAuthReq() req.user = str_to_bytes(username) req.password = str_to_bytes(password) - return Msg(riak.pb.messages.MSG_CODE_AUTH_REQ, - req.SerializeToString(), - riak.pb.messages.MSG_CODE_AUTH_RESP) + mc = riak.pb.messages.MSG_CODE_AUTH_REQ + rc = riak.pb.messages.MSG_CODE_AUTH_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_quorum(self, rw): """ @@ -233,7 +227,7 @@ def _encode_content(self, robj, rpb_content): pair.value = str_to_bytes(str(value)) # Python 2.x data is stored in a string - if PY2: + if six.PY2: rpb_content.value = str(robj.encoded_data) else: rpb_content.value = robj.encoded_data @@ -287,7 +281,7 @@ def _encode_bucket_props(self, props, msg): """ for prop in NORMAL_PROPS: if prop in props and props[prop] is not None: - if isinstance(props[prop], string_types): + if isinstance(props[prop], six.string_types): setattr(msg.props, prop, str_to_bytes(props[prop])) else: setattr(msg.props, prop, props[prop]) @@ -302,7 +296,7 @@ def _encode_bucket_props(self, props, msg): if prop in props and props[prop] not in (None, 'default'): value = self._encode_quorum(props[prop]) if value is not None: - if isinstance(value, string_types): + if isinstance(value, six.string_types): setattr(msg.props, prop, str_to_bytes(value)) else: setattr(msg.props, prop, value) @@ -477,7 +471,25 @@ def _encode_index_req(self, bucket, index, startkey, endkey=None, if term_regex: req.term_regex = str_to_bytes(term_regex) req.stream = streaming - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_INDEX_REQ + rc = riak.pb.messages.MSG_CODE_INDEX_RESP + return Msg(mc, req.SerializeToString(), rc) + + def _decode_index_req(self, resp, index, + return_terms=None, max_results=None): + if return_terms and resp.results: + results = [(decode_index_value(index, pair.key), + bytes_to_str(pair.value)) + for pair in resp.results] + else: + results = resp.keys[:] + if six.PY3: + results = [bytes_to_str(key) for key in resp.keys] + + if max_results is not None and resp.HasField('continuation'): + return (results, bytes_to_str(resp.continuation)) + else: + return (results, None) def _decode_search_index(self, index): """ @@ -528,7 +540,7 @@ def _encode_search_query(self, req, **kwargs): def _decode_search_doc(self, doc): resultdoc = MultiDict() for pair in doc.fields: - if PY2: + if six.PY2: ukey = unicode(pair.key, 'utf-8') # noqa uval = unicode(pair.value, 'utf-8') # noqa else: @@ -648,11 +660,11 @@ def _encode_to_ts_cell(self, cell, ts_cell): ts_cell.timestamp_value = unix_time_millis(cell) elif isinstance(cell, bool): ts_cell.boolean_value = cell - elif isinstance(cell, string_types): + elif isinstance(cell, six.string_types): # logging.debug("cell -> str: '%s'", cell) ts_cell.varchar_value = str_to_bytes(cell) elif (isinstance(cell, int) or - (PY2 and isinstance(cell, long))): # noqa + (six.PY2 and isinstance(cell, long))): # noqa # logging.debug("cell -> int/long: '%s'", cell) ts_cell.sint64_value = cell elif isinstance(cell, float): @@ -678,14 +690,21 @@ def _encode_timeseries_keyreq(self, table, key, is_delete=False): for cell in key_vals: ts_cell = req.key.add() self._encode_to_ts_cell(cell, ts_cell) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_TS_GET_REQ + rc = riak.pb.messages.MSG_CODE_TS_GET_RESP + if is_delete: + mc = riak.pb.messages.MSG_CODE_TS_DEL_REQ + rc = riak.pb.messages.MSG_CODE_TS_DEL_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_timeseries_listkeysreq(self, table, timeout=None): req = riak.pb.riak_ts_pb2.TsListKeysReq() req.table = str_to_bytes(table.name) if self._client_timeouts and timeout: req.timeout = timeout - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ + rc = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_timeseries_put(self, tsobj): """ @@ -713,7 +732,10 @@ def _encode_timeseries_put(self, tsobj): self._encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") - return req.SerializeToString() + + mc = riak.pb.messages.MSG_CODE_TS_PUT_REQ + rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_timeseries_query(self, table, query, interpolations=None): req = riak.pb.riak_ts_pb2.TsQueryReq() @@ -721,7 +743,9 @@ def _encode_timeseries_query(self, table, query, interpolations=None): if '{table}' in q: q = q.format(table=table.name) req.query.base = str_to_bytes(q) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_TS_QUERY_REQ + rc = riak.pb.messages.MSG_CODE_TS_QUERY_RESP + return Msg(mc, req.SerializeToString(), rc) def _decode_timeseries(self, resp, tsobj): """ @@ -825,7 +849,9 @@ def _encode_get(self, robj, r=None, pr=None, timeout=None, req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_GET_REQ + rc = riak.pb.messages.MSG_CODE_GET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, @@ -851,7 +877,9 @@ def _encode_put(self, robj, w=None, dw=None, pw=None, if robj.vclock: req.vclock = robj.vclock.encode('binary') self._encode_content(robj, req.content) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_PUT_REQ + rc = riak.pb.messages.MSG_CODE_PUT_RESP + return Msg(mc, req.SerializeToString(), rc) def _decode_get(self, robj, resp): if resp is not None: @@ -909,7 +937,9 @@ def _encode_delete(self, robj, rw=None, r=None, req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_DEL_REQ + rc = riak.pb.messages.MSG_CODE_DEL_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_stream_keys(self, bucket, timeout=None): req = riak.pb.riak_kv_pb2.RpbListKeysReq() @@ -917,7 +947,9 @@ def _encode_stream_keys(self, bucket, timeout=None): if self._client_timeouts and timeout: req.timeout = timeout self._add_bucket_type(req, bucket.bucket_type) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ + rc = riak.pb.messages.MSG_CODE_LIST_KEYS_RESP + return Msg(mc, req.SerializeToString(), rc) def _decode_get_keys(self, stream): keys = [] @@ -930,66 +962,81 @@ def _decode_get_server_info(self, resp): return {'node': bytes_to_str(resp.node), 'server_version': bytes_to_str(resp.server_version)} + def _encode_get_client_id(self): + mc = riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ + rc = riak.pb.messages.MSG_CODE_GET_CLIENT_ID_RESP + return Msg(mc, None, rc) + def _decode_get_client_id(self, resp): return bytes_to_str(resp.client_id) def _encode_set_client_id(self, client_id): req = riak.pb.riak_kv_pb2.RpbSetClientIdReq() req.client_id = str_to_bytes(client_id) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ + rc = riak.pb.messages.MSG_CODE_SET_CLIENT_ID_RESP + return Msg(mc, req.SerializeToString(), rc) - def _encode_get_buckets(self, bucket_type, timeout): + def _encode_get_buckets(self, bucket_type, + timeout=None, streaming=False): + # Bucket streaming landed in the same release as timeouts, so + # we don't need to check the capability. req = riak.pb.riak_kv_pb2.RpbListBucketsReq() + req.stream = streaming self._add_bucket_type(req, bucket_type) if self._client_timeouts and timeout: req.timeout = timeout - return req.SerializeToString() - - def _encode_stream_buckets(self, bucket_type, timeout): - req = riak.pb.riak_kv_pb2.RpbListBucketsReq() - req.stream = True - self._add_bucket_type(req, bucket_type) - # Bucket streaming landed in the same release as timeouts, so - # we don't need to check the capability. - if timeout: - req.timeout = timeout - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ + rc = riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_get_bucket_props(self, bucket): req = riak.pb.riak_pb2.RpbGetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_GET_BUCKET_REQ + rc = riak.pb.messages.MSG_CODE_GET_BUCKET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_set_bucket_props(self, bucket, props): req = riak.pb.riak_pb2.RpbSetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) self._encode_bucket_props(props, req) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ + rc = riak.pb.messages.MSG_CODE_SET_BUCKET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_clear_bucket_props(self, bucket): req = riak.pb.riak_pb2.RpbResetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ + rc = riak.pb.messages.MSG_CODE_RESET_BUCKET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_get_bucket_type_props(self, bucket_type): req = riak.pb.riak_pb2.RpbGetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ + rc = riak.pb.messages.MSG_CODE_GET_BUCKET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_set_bucket_type_props(self, bucket_type, props): req = riak.pb.riak_pb2.RpbSetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) self._encode_bucket_props(props, req) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ + rc = riak.pb.messages.MSG_CODE_SET_BUCKET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_stream_mapred(self, content): req = riak.pb.riak_kv_pb2.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_MAP_RED_REQ + rc = riak.pb.messages.MSG_CODE_MAP_RED_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_create_search_index(self, index, schema=None, n_val=None, timeout=None): @@ -1002,21 +1049,29 @@ def _encode_create_search_index(self, index, schema=None, req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) if timeout is not None: req.timeout = timeout - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ + rc = riak.pb.messages.MSG_CODE_PUT_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_get_search_index(self, index): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( name=str_to_bytes(index)) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ + rc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_list_search_indexes(self): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ + rc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_delete_search_index(self, index): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( name=str_to_bytes(index)) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ + rc = riak.pb.messages.MSG_CODE_DEL_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_create_search_schema(self, schema, content): scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( @@ -1024,12 +1079,16 @@ def _encode_create_search_schema(self, schema, content): content=str_to_bytes(content)) req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( schema=scma) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ + rc = riak.pb.messages.MSG_CODE_PUT_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_get_search_schema(self, schema): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( name=str_to_bytes(schema)) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ + rc = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP + return Msg(mc, req.SerializeToString(), rc) def _decode_get_search_schema(self, resp): result = {} @@ -1042,7 +1101,9 @@ def _encode_search(self, index, query, **kwargs): index=str_to_bytes(index), q=str_to_bytes(query)) self._encode_search_query(req, **kwargs) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ + rc = riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP + return Msg(mc, req.SerializeToString(), rc) def _decode_search(self, resp): result = {} @@ -1065,7 +1126,9 @@ def _encode_get_counter(self, bucket, key, **kwargs): req.basic_quorum = kwargs['basic_quorum'] if kwargs.get('notfound_ok') is not None: req.notfound_ok = kwargs['notfound_ok'] - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_COUNTER_GET_REQ + rc = riak.pb.messages.MSG_CODE_COUNTER_GET_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_update_counter(self, bucket, key, value, **kwargs): req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() @@ -1080,7 +1143,9 @@ def _encode_update_counter(self, bucket, key, value, **kwargs): req.pw = self._encode_quorum(kwargs['pw']) if kwargs.get('returnvalue') is not None: req.returnvalue = kwargs['returnvalue'] - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ + rc = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_fetch_datatype(self, bucket, key, **kwargs): req = riak.pb.riak_dt_pb2.DtFetchReq() @@ -1088,7 +1153,9 @@ def _encode_fetch_datatype(self, bucket, key, **kwargs): req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) self._encode_dt_options(req, **kwargs) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_DT_FETCH_REQ + rc = riak.pb.messages.MSG_CODE_DT_FETCH_RESP + return Msg(mc, req.SerializeToString(), rc) def _encode_update_datatype(self, datatype, **kwargs): op = datatype.to_op() @@ -1105,7 +1172,9 @@ def _encode_update_datatype(self, datatype, **kwargs): req.context = datatype._context self._encode_dt_options(req, **kwargs) self._encode_dt_op(type_name, req, op) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_DT_UPDATE_REQ + rc = riak.pb.messages.MSG_CODE_DT_UPDATE_RESP + return Msg(mc, req.SerializeToString(), rc) def _decode_update_datatype(self, datatype, resp, **kwargs): type_name = datatype.type_name @@ -1121,4 +1190,6 @@ def _encode_get_preflist(self, bucket, key): req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.type = str_to_bytes(bucket.bucket_type.name) - return req.SerializeToString() + mc = riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ + rc = riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP + return Msg(mc, req.SerializeToString(), rc) diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 30210099..7c5d68fb 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -67,26 +67,26 @@ def test_encode_decode_timestamp(self): def test_encode_data_for_get(self): c = PbufCodec() - data = c._encode_timeseries_keyreq( + msg = c._encode_timeseries_keyreq( self.table, self.test_key, is_delete=False) req = riak.pb.riak_ts_pb2.TsGetReq() - req.ParseFromString(data) + req.ParseFromString(msg.data) self.validate_keyreq(req) def test_encode_data_for_delete(self): c = PbufCodec() - data = c._encode_timeseries_keyreq( + msg = c._encode_timeseries_keyreq( self.table, self.test_key, is_delete=True) req = riak.pb.riak_ts_pb2.TsDelReq() - req.ParseFromString(data) + req.ParseFromString(msg.data) self.validate_keyreq(req) def test_encode_data_for_put(self): c = PbufCodec() tsobj = TsObject(None, self.table, self.rows, None) - data = c._encode_timeseries_put(tsobj) + msg = c._encode_timeseries_put(tsobj) req = riak.pb.riak_ts_pb2.TsPutReq() - req.ParseFromString(data) + req.ParseFromString(msg.data) # NB: expected, actual self.assertEqual(self.table.name, bytes_to_str(req.table)) @@ -110,9 +110,9 @@ def test_encode_data_for_put(self): def test_encode_data_for_listkeys(self): c = PbufCodec(client_timeouts=True) - data = c._encode_timeseries_listkeysreq(self.table, 1234) + msg = c._encode_timeseries_listkeysreq(self.table, 1234) req = riak.pb.riak_ts_pb2.TsListKeysReq() - req.ParseFromString(data) + req.ParseFromString(msg.data) self.assertEqual(self.table.name, bytes_to_str(req.table)) self.assertEqual(1234, req.timeout) diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index e688da41..c2a9c053 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -4,13 +4,14 @@ import riak.pb.messages from riak import RiakError +from riak.codecs import Msg from riak.codecs.pbuf import PbufCodec from riak.codecs.ttb import TtbCodec from riak.transports.transport import Transport from riak.ts_object import TsObject # TODO RTS-842 ideally these would not be needed -from riak.util import decode_index_value, bytes_to_str +from riak.util import bytes_to_str from riak.transports.tcp.connection import TcpConnection from riak.transports.tcp.stream import (PbufKeyStream, @@ -48,16 +49,24 @@ def _get_pbuf_codec(self): self.tombstone_vclocks(), self.bucket_types()) return self._pbuf_c - def _get_codec(self, ttb_supported=False): - if ttb_supported: - if self._use_ttb: - if not self._enable_ttb(): - raise RiakError('could not switch to TTB encoding!') - if not self._ttb_c: - self._ttb_c = TtbCodec() - codec = self._ttb_c - else: - codec = self._get_pbuf_codec() + def _get_ttb_codec(self): + if self._use_ttb: + if not self._enable_ttb(): + raise RiakError('could not switch to TTB encoding!') + if not self._ttb_c: + self._ttb_c = TtbCodec() + codec = self._ttb_c + else: + codec = self._get_pbuf_codec() + return codec + + def _get_codec(self, msg_code): + if msg_code == riak.pb.messages.MSG_CODE_TS_GET_REQ: + codec = self._get_ttb_codec() + elif msg_code == riak.pb.messages.MSG_CODE_TS_PUT_REQ: + codec = self._get_ttb_codec() + elif msg_code == riak.pb.messages.MSG_CODE_TS_DEL_REQ: + codec = self._get_ttb_codec() else: codec = self._get_pbuf_codec() return codec @@ -71,10 +80,9 @@ def ping(self): """ Ping the remote server """ - resp_code, _ = self._request( - riak.pb.messages.MSG_CODE_PING_REQ, - None, - riak.pb.messages.MSG_CODE_PING_RESP) + msg = Msg(riak.pb.messages.MSG_CODE_PING_REQ, None, + riak.pb.messages.MSG_CODE_PING_RESP) + resp_code, _ = self._request(msg) if resp_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: @@ -87,24 +95,23 @@ def get_server_info(self): # NB: can't do it this way due to recursion # codec = self._get_codec(ttb_supported=False) codec = PbufCodec() - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, - expect=riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) + msg = Msg(riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, None, + riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) + resp_code, resp = self._request(msg) return codec._decode_get_server_info(resp) def _get_client_id(self): - codec = self._get_codec(ttb_supported=False) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ, - expect=riak.pb.messages.MSG_CODE_GET_CLIENT_ID_RESP) + msg_code = riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_client_id() + resp_code, resp = self._request(msg) return codec._decode_get_client_id(resp) def _set_client_id(self, client_id): - codec = self._get_codec(ttb_supported=False) - data = codec._encode_set_client_id(client_id) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ, data, - riak.pb.messages.MSG_CODE_SET_CLIENT_ID_RESP) + msg_code = riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_set_client_id(client_id) + resp_code, resp = self._request(msg) self._client_id = client_id client_id = property(_get_client_id, _set_client_id, @@ -115,22 +122,21 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, """ Serialize get request and deserialize response """ - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get(robj, r, pr, - timeout, basic_quorum, notfound_ok) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_REQ, data, - riak.pb.messages.MSG_CODE_GET_RESP) + msg_code = riak.pb.messages.MSG_CODE_GET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get(robj, r, pr, + timeout, basic_quorum, + notfound_ok) + resp_code, resp = self._request(msg) return codec._decode_get(robj, resp) def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): - codec = self._get_codec(ttb_supported=False) - data = codec._encode_put(robj, w, dw, pw, return_body, - if_none_match, timeout) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_PUT_REQ, data, - riak.pb.messages.MSG_CODE_PUT_RESP) + msg_code = riak.pb.messages.MSG_CODE_PUT_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_put(robj, w, dw, pw, return_body, + if_none_match, timeout) + resp_code, resp = self._request(msg) return codec._decode_put(robj, resp) def ts_describe(self, table): @@ -138,28 +144,24 @@ def ts_describe(self, table): return self.ts_query(table, query) def ts_get(self, table, key): - codec = self._get_codec(ttb_supported=True) - data = codec._encode_timeseries_keyreq(table, key) - msg_code, ts_get_resp = self._request( - riak.pb.messages.MSG_CODE_TS_GET_REQ, data, - riak.pb.messages.MSG_CODE_TS_GET_RESP) + msg_code = riak.pb.messages.MSG_CODE_TS_GET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_timeseries_keyreq(table, key) + resp_code, resp = self._request(msg) tsobj = TsObject(self._client, table, [], None) - codec._decode_timeseries(ts_get_resp, tsobj) + codec._decode_timeseries(resp, tsobj) return tsobj def ts_put(self, tsobj): - codec = self._get_codec(ttb_supported=True) - # TODO RTS-842 codecs should return msg codes too - data = codec._encode_timeseries_put(tsobj) + msg_code = riak.pb.messages.MSG_CODE_TS_PUT_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_timeseries_put(tsobj) # logging.debug("pbc/transport ts_put _use_ttb: '%s'", # self._use_ttb) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_TS_PUT_REQ, data, - riak.pb.messages.MSG_CODE_TS_PUT_RESP, - self._use_ttb) + resp_code, resp = self._request(msg, self._use_ttb) if self._use_ttb and \ resp is None and \ - msg_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: + resp_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: return True if resp is not None: return True @@ -167,24 +169,22 @@ def ts_put(self, tsobj): raise RiakError("missing response object") def ts_delete(self, table, key): - codec = self._get_codec(ttb_supported=True) - data = codec._encode_timeseries_keyreq(table, key, is_delete=True) - msg_code, ts_del_resp = self._request( - riak.pb.messages.MSG_CODE_TS_DEL_REQ, data, - riak.pb.messages.MSG_CODE_TS_DEL_RESP) - if ts_del_resp is not None: + msg_code = riak.pb.messages.MSG_CODE_TS_DEL_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_timeseries_keyreq(table, key, is_delete=True) + resp_code, resp = self._request(msg) + if resp is not None: return True else: raise RiakError("missing response object") def ts_query(self, table, query, interpolations=None): - codec = self._get_codec(ttb_supported=True) - data = codec._encode_timeseries_query(table, query, interpolations) - msg_code, ts_query_resp = self._request( - riak.pb.messages.MSG_CODE_TS_QUERY_REQ, data, - riak.pb.messages.MSG_CODE_TS_QUERY_RESP) + msg_code = riak.pb.messages.MSG_CODE_TS_QUERY_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_timeseries_query(table, query, interpolations) + resp_code, resp = self._request(msg) tsobj = TsObject(self._client, table, [], []) - self._decode_timeseries(ts_query_resp, tsobj) + self._decode_timeseries(resp, tsobj) return tsobj def ts_stream_keys(self, table, timeout=None): @@ -192,25 +192,26 @@ def ts_stream_keys(self, table, timeout=None): Streams keys from a timeseries table, returning an iterator that yields lists of keys. """ - codec = self._get_codec(ttb_supported=False) - data = codec._encode_timeseries_listkeysreq(table, timeout) - self._send_msg(riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ, data) + msg_code = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_timeseries_listkeysreq(table, timeout) + self._send_msg(msg.msg_code, msg.data) return PbufTsKeyStream(self) def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): - codec = self._get_codec(ttb_supported=False) - data = codec._encode_delete(robj, rw, r, w, dw, pr, pw, timeout) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DEL_REQ, data, - riak.pb.messages.MSG_CODE_DEL_RESP) + msg_code = riak.pb.messages.MSG_CODE_DEL_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_delete(robj, rw, r, w, dw, pr, pw, timeout) + resp_code, resp = self._request(msg) return self def get_keys(self, bucket, timeout=None): """ Lists all keys within a bucket. """ - codec = self._get_codec(ttb_supported=False) + msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ + codec = self._get_codec(msg_code) stream = self.stream_keys(bucket, timeout=timeout) return codec._decode_get_keys(stream) @@ -219,20 +220,21 @@ def stream_keys(self, bucket, timeout=None): Streams keys from a bucket, returning an iterator that yields lists of keys. """ - codec = self._get_codec(ttb_supported=False) - data = codec._encode_stream_keys(bucket, timeout) - self._send_msg(riak.pb.messages.MSG_CODE_LIST_KEYS_REQ, data) + msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_stream_keys(bucket, timeout) + self._send_msg(msg.msg_code, msg.data) return PbufKeyStream(self) def get_buckets(self, bucket_type=None, timeout=None): """ Serialize bucket listing request and deserialize response """ - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_buckets(bucket_type, timeout) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, data, - riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP) + msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_buckets(bucket_type, + timeout, streaming=False) + resp_code, resp = self._request(msg) return resp.buckets def stream_buckets(self, bucket_type=None, timeout=None): @@ -242,20 +244,21 @@ def stream_buckets(self, bucket_type=None, timeout=None): if not self.bucket_stream(): raise NotImplementedError('Streaming list-buckets is not ' 'supported') - codec = self._get_codec(ttb_supported=False) - data = codec._encode_stream_buckets(bucket_type, timeout) - self._send_msg(riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, data) + msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_buckets(bucket_type, + timeout, streaming=True) + self._send_msg(msg.msg_code, msg.data) return PbufBucketStream(self) def get_bucket_props(self, bucket): """ Serialize bucket property request and deserialize response """ - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_bucket_props(bucket) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_REQ, data, - riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) + msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_bucket_props(bucket) + resp_code, resp = self._request(msg) return codec._decode_bucket_props(resp.props) def set_bucket_props(self, bucket, props): @@ -267,11 +270,10 @@ def set_bucket_props(self, bucket, props): if key not in ('n_val', 'allow_mult'): raise NotImplementedError('Server only supports n_val and ' 'allow_mult properties over PBC') - codec = self._get_codec(ttb_supported=False) - data = codec._encode_set_bucket_props(bucket, props) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_BUCKET_REQ, data, - riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) + msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_set_bucket_props(bucket, props) + resp_code, resp = self._request(msg) return True def clear_bucket_props(self, bucket): @@ -280,11 +282,10 @@ def clear_bucket_props(self, bucket): """ if not self.pb_clear_bucket_props(): return False - codec = self._get_codec(ttb_supported=False) - data = codec._encode_clear_bucket_props(bucket) - self._request( - riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ, data, - riak.pb.messages.MSG_CODE_RESET_BUCKET_RESP) + msg_code = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_clear_bucket_props(bucket) + self._request(msg) return True def get_bucket_type_props(self, bucket_type): @@ -292,11 +293,10 @@ def get_bucket_type_props(self, bucket_type): Fetch bucket-type properties """ self._check_bucket_types(bucket_type) - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_bucket_type_props(bucket_type) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ, data, - riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) + msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_bucket_type_props(bucket_type) + resp_code, resp = self._request(msg) return codec._decode_bucket_props(resp.props) def set_bucket_type_props(self, bucket_type, props): @@ -304,11 +304,10 @@ def set_bucket_type_props(self, bucket_type, props): Set bucket-type properties """ self._check_bucket_types(bucket_type) - codec = self._get_codec(ttb_supported=False) - data = codec._encode_set_bucket_type_props(bucket_type, props) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ, data, - riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) + msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_set_bucket_type_props(bucket_type, props) + resp_code, resp = self._request(msg) return True def mapred(self, inputs, query, timeout=None): @@ -331,10 +330,11 @@ def mapred(self, inputs, query, timeout=None): def stream_mapred(self, inputs, query, timeout=None): # Construct the job, optionally set the timeout... + msg_code = riak.pb.messages.MSG_CODE_MAP_RED_REQ + codec = self._get_codec(msg_code) content = self._construct_mapred_json(inputs, query, timeout) - codec = self._get_codec(ttb_supported=False) - data = codec._encode_stream_mapred(content) - self._send_msg(riak.pb.messages.MSG_CODE_MAP_RED_REQ, data) + msg = codec._encode_stream_mapred(content) + self._send_msg(msg.msg_code, msg.data) return PbufMapredStream(self) def get_index(self, bucket, index, startkey, endkey=None, @@ -348,29 +348,15 @@ def get_index(self, bucket, index, startkey, endkey=None, raise NotImplementedError("Secondary index term_regex is not " "supported") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, - continuation, timeout, term_regex, - streaming=False) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_INDEX_REQ, data, - riak.pb.messages.MSG_CODE_INDEX_RESP) - - if return_terms and resp.results: - results = [(decode_index_value(index, pair.key), - bytes_to_str(pair.value)) - for pair in resp.results] - else: - results = resp.keys[:] - if six.PY3: - results = [bytes_to_str(key) for key in resp.keys] - - if max_results is not None and resp.HasField('continuation'): - return (results, bytes_to_str(resp.continuation)) - else: - return (results, None) + msg_code = riak.pb.messages.MSG_CODE_INDEX_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_index_req(bucket, index, startkey, endkey, + return_terms, max_results, + continuation, timeout, + term_regex, streaming=False) + resp_code, resp = self._request(msg) + return codec._decode_index_req(resp, index, + return_terms, max_results) def stream_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, @@ -381,12 +367,13 @@ def stream_index(self, bucket, index, startkey, endkey=None, if term_regex and not self.index_term_regex(): raise NotImplementedError("Secondary index term_regex is not " "supported") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, - continuation, timeout, - term_regex, streaming=True) - self._send_msg(riak.pb.messages.MSG_CODE_INDEX_REQ, data) + msg_code = riak.pb.messages.MSG_CODE_INDEX_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_index_req(bucket, index, startkey, endkey, + return_terms, max_results, + continuation, timeout, + term_regex, streaming=True) + self._send_msg(msg.msg_code, msg.data) return PbufIndexStream(self, index, return_terms) def create_search_index(self, index, schema=None, n_val=None, @@ -394,22 +381,20 @@ def create_search_index(self, index, schema=None, n_val=None, if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_create_search_index(index, schema, n_val, timeout) - self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, data, - riak.pb.messages.MSG_CODE_PUT_RESP) + msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_create_search_index(index, schema, n_val, timeout) + self._request(msg) return True def get_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_search_index(index) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, data, - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) + msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_search_index(index) + resp_code, resp = self._request(msg) if len(resp.index) > 0: return codec._decode_search_index(resp.index[0]) else: @@ -419,44 +404,40 @@ def list_search_indexes(self): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_list_search_indexes() - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, data, - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) + msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_list_search_indexes() + resp_code, resp = self._request(msg) return [codec._decode_search_index(index) for index in resp.index] def delete_search_index(self, index): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_delete_search_index(index) - self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, data, - riak.pb.messages.MSG_CODE_DEL_RESP) + msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_delete_search_index(index) + self._request(msg) return True def create_search_schema(self, schema, content): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_create_search_schema(schema, content) - self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, data, - riak.pb.messages.MSG_CODE_PUT_RESP) + msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_create_search_schema(schema, content) + self._request(msg) return True def get_search_schema(self, schema): if not self.pb_search_admin(): raise NotImplementedError("Search 2.0 administration is not " "supported for this version") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_search_schema(schema) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, data, - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) + msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_search_schema(schema) + resp_code, resp = self._request(msg) return codec._decode_get_search_schema(resp) def search(self, index, query, **kwargs): @@ -466,11 +447,10 @@ def search(self, index, query, **kwargs): # TODO RTS-842 six.u() instead? if six.PY2 and isinstance(query, unicode): # noqa query = query.encode('utf8') - codec = self._get_codec(ttb_supported=False) - data = codec._encode_search(index, query, **kwargs) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ, data, - riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP) + msg_code = riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_search(index, query, **kwargs) + resp_code, resp = self._request(msg) return codec._decode_search(resp) def get_counter(self, bucket, key, **kwargs): @@ -480,11 +460,10 @@ def get_counter(self, bucket, key, **kwargs): "use datatypes instead.") if not self.counters(): raise NotImplementedError("Counters are not supported") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_counter(bucket, key, **kwargs) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_COUNTER_GET_REQ, data, - riak.pb.messages.MSG_CODE_COUNTER_GET_RESP) + msg_code = riak.pb.messages.MSG_CODE_COUNTER_GET_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_counter(bucket, key, **kwargs) + resp_code, resp = self._request(msg) if resp.HasField('value'): return resp.value else: @@ -497,11 +476,10 @@ def update_counter(self, bucket, key, value, **kwargs): "use datatypes instead.") if not self.counters(): raise NotImplementedError("Counters are not supported") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_update_counter(bucket, key, value, **kwargs) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, data, - riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP) + msg_code = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_update_counter(bucket, key, value, **kwargs) + resp_code, resp = self._request(msg) if resp.HasField('value'): return resp.value else: @@ -513,11 +491,10 @@ def fetch_datatype(self, bucket, key, **kwargs): " bucket-type.") if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_fetch_datatype(bucket, key, **kwargs) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DT_FETCH_REQ, data, - riak.pb.messages.MSG_CODE_DT_FETCH_RESP) + msg_code = riak.pb.messages.MSG_CODE_DT_FETCH_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_fetch_datatype(bucket, key, **kwargs) + resp_code, resp = self._request(msg) return codec._decode_dt_fetch(resp) def update_datatype(self, datatype, **kwargs): @@ -526,11 +503,10 @@ def update_datatype(self, datatype, **kwargs): " bucket-type.") if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") - codec = self._get_codec(ttb_supported=False) - data = codec._encode_update_datatype(datatype, **kwargs) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DT_UPDATE_REQ, data, - riak.pb.messages.MSG_CODE_DT_UPDATE_RESP) + msg_code = riak.pb.messages.MSG_CODE_DT_UPDATE_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_update_datatype(datatype, **kwargs) + resp_code, resp = self._request(msg) codec._decode_update_datatype(datatype, resp, **kwargs) return True @@ -544,11 +520,10 @@ def get_preflist(self, bucket, key): :type key: string :rtype: list of dicts """ - codec = self._get_codec(ttb_supported=False) - data = codec._encode_get_preflist(bucket, key) - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, data, - riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) + msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ + codec = self._get_codec(msg_code) + msg = codec._encode_get_preflist(bucket, key) + resp_code, resp = self._request(msg) return [codec._decode_preflist(item) for item in resp.preflist] # TODO RTS-842 is_ttb @@ -574,6 +549,7 @@ def _parse_msg(self, code, packet, is_ttb=False): pbo.ParseFromString(packet) return pbo + # TODO RTS-842 move to base Codec object def _maybe_riak_error(self, msg_code, data=None, is_ttb=False): if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: if data is None: @@ -591,7 +567,13 @@ def _maybe_incorrect_code(self, resp_code, expect=None): % (resp_code, expect)) # TODO RTS-842 is_ttb - def _request(self, msg_code, data=None, expect=None, is_ttb=False): + def _request(self, msg, is_ttb=False): + if isinstance(msg, Msg): + msg_code = msg.msg_code + data = msg.data + expect = msg.resp_code + else: + raise ValueError('expected a Msg argument') resp_code, data = self._send_recv(msg_code, data) self._maybe_riak_error(resp_code, data, is_ttb) self._maybe_incorrect_code(resp_code, expect) From da5c13a30f6e2c5b6c1ce118896737e6cb6b5250 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 31 Mar 2016 16:50:03 -0700 Subject: [PATCH 151/324] make timeseries tests happy --- buildbot/Makefile | 2 +- riak/codecs/pbuf.py | 24 +++++++++---------- riak/codecs/ttb.py | 40 ++++++++++++++++++++++--------- riak/tests/test_timeseries.py | 29 +++++++++++++++------- riak/tests/test_timeseries_ttb.py | 14 ++++------- riak/transports/tcp/connection.py | 2 +- riak/transports/tcp/stream.py | 6 ++++- riak/transports/tcp/transport.py | 10 ++++---- 8 files changed, 79 insertions(+), 48 deletions(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index abf4863f..ecc933b8 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -45,7 +45,7 @@ test_security: test_timeseries: @echo "Testing Riak Python Client (timeseries)" @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RUN_YZ=0 RUN_DATATYPES=0 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=0 RUN_DATATYPES=0 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. setup: ./tox_setup.sh diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 9432d3c9..b55a4253 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -660,12 +660,13 @@ def _encode_to_ts_cell(self, cell, ts_cell): ts_cell.timestamp_value = unix_time_millis(cell) elif isinstance(cell, bool): ts_cell.boolean_value = cell + elif isinstance(cell, six.binary_type): + ts_cell.varchar_value = cell + elif isinstance(cell, six.text_type): + ts_cell.varchar_value = str_to_bytes(cell) elif isinstance(cell, six.string_types): - # logging.debug("cell -> str: '%s'", cell) ts_cell.varchar_value = str_to_bytes(cell) - elif (isinstance(cell, int) or - (six.PY2 and isinstance(cell, long))): # noqa - # logging.debug("cell -> int/long: '%s'", cell) + elif (isinstance(cell, six.integer_types)): ts_cell.sint64_value = cell elif isinstance(cell, float): ts_cell.double_value = cell @@ -681,20 +682,18 @@ def _encode_timeseries_keyreq(self, table, key, is_delete=False): else: raise ValueError("key must be a list") + req = riak.pb.riak_ts_pb2.TsGetReq() + mc = riak.pb.messages.MSG_CODE_TS_GET_REQ + rc = riak.pb.messages.MSG_CODE_TS_GET_RESP if is_delete: req = riak.pb.riak_ts_pb2.TsDelReq() - else: - req = riak.pb.riak_ts_pb2.TsGetReq() + mc = riak.pb.messages.MSG_CODE_TS_DEL_REQ + rc = riak.pb.messages.MSG_CODE_TS_DEL_RESP req.table = str_to_bytes(table.name) for cell in key_vals: ts_cell = req.key.add() self._encode_to_ts_cell(cell, ts_cell) - mc = riak.pb.messages.MSG_CODE_TS_GET_REQ - rc = riak.pb.messages.MSG_CODE_TS_GET_RESP - if is_delete: - mc = riak.pb.messages.MSG_CODE_TS_DEL_REQ - rc = riak.pb.messages.MSG_CODE_TS_DEL_RESP return Msg(mc, req.SerializeToString(), rc) def _encode_timeseries_listkeysreq(self, table, timeout=None): @@ -788,7 +787,8 @@ def _decode_timeseries_row(self, tsrow, tscols=None): if col and col.type != TsColumnType.Value('VARCHAR'): raise TypeError('expected VARCHAR column') else: - row.append(bytes_to_str(cell.varchar_value)) + # TODO RTS-842 - keep as bytes? + row.append(cell.varchar_value) elif cell.HasField('sint64_value'): if col and col.type != TsColumnType.Value('SINT64'): raise TypeError('expected SINT64 column') diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 8d795675..eef65fdc 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -1,11 +1,13 @@ import datetime +import six + +import riak.pb.messages from erlastic import encode from erlastic.types import Atom -from six import text_type, binary_type, \ - string_types, PY2 from riak import RiakError +from riak.codecs import Msg from riak.util import unix_time_millis, \ datetime_from_unix_time_millis @@ -15,6 +17,7 @@ tsgetreq_a = Atom('tsgetreq') tsgetresp_a = Atom('tsgetresp') tsputreq_a = Atom('tsputreq') +tsdelreq_a = Atom('tsdelreq') tsrow_a = Atom('tsrow') tscell_a = Atom('tscell') @@ -38,13 +41,12 @@ def _encode_to_ts_cell(self, cell): return (tscell_a, udef_a, udef_a, ts, udef_a, udef_a) elif isinstance(cell, bool): return (tscell_a, udef_a, udef_a, udef_a, cell, udef_a) - elif isinstance(cell, text_type) or \ - isinstance(cell, binary_type) or \ - isinstance(cell, string_types): + elif isinstance(cell, six.text_type) or \ + isinstance(cell, six.binary_type) or \ + isinstance(cell, six.string_types): return (tscell_a, cell, udef_a, udef_a, udef_a, udef_a) - elif (isinstance(cell, int) or - (PY2 and isinstance(cell, long))): # noqa + elif (isinstance(cell, six.integer_types)): return (tscell_a, udef_a, cell, udef_a, udef_a, udef_a) elif isinstance(cell, float): return (tscell_a, udef_a, udef_a, udef_a, udef_a, cell) @@ -53,15 +55,24 @@ def _encode_to_ts_cell(self, cell): raise RiakError("can't serialize type '{}', value '{}'" .format(t, cell)) - def _encode_timeseries_keyreq(self, table, key): + def _encode_timeseries_keyreq(self, table, key, is_delete=False): key_vals = None if isinstance(key, list): key_vals = key else: raise ValueError("key must be a list") - req = tsgetreq_a, table.name, \ + + mc = riak.pb.messages.MSG_CODE_TS_GET_REQ + rc = riak.pb.messages.MSG_CODE_TS_GET_RESP + req_atom = tsgetreq_a + if is_delete: + mc = riak.pb.messages.MSG_CODE_TS_DEL_REQ + rc = riak.pb.messages.MSG_CODE_TS_DEL_RESP + req_atom = tsdelreq_a + + req = req_atom, table.name, \ [self._encode_to_ts_cell(k) for k in key_vals], udef_a - return encode(req) + return Msg(mc, encode(req), rc) def _encode_timeseries_put(self, tsobj): ''' @@ -84,7 +95,9 @@ def _encode_timeseries_put(self, tsobj): req_t = (tsrow_a, req_r) req_rows.append(req_t) req = tsputreq_a, tsobj.table.name, udef_a, req_rows - return encode(req) + mc = riak.pb.messages.MSG_CODE_TS_PUT_REQ + rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP + return Msg(mc, encode(req), rc) else: raise RiakError("TsObject requires a list of rows") @@ -107,6 +120,11 @@ def _decode_timeseries(self, resp_ttb, tsobj): # col_type = col.type # col = (col_name, col_type) # tsobj.columns.append(col) + # + # TODO RTS-842 is this correct? + if resp_ttb is None: + return tsobj + resp_a = resp_ttb[0] if resp_a == tsgetresp_a: # TODO resp_cols = resp_ttb[1] diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 7c5d68fb..e50d2613 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -179,14 +179,14 @@ def test_decode_data_from_query(self): self.assertEqual(c[4][1], TsColumnType.Value('BOOLEAN')) r0 = tsobj.rows[0] - self.assertEqual(r0[0], self.rows[0][0]) + self.assertEqual(bytes_to_str(r0[0]), self.rows[0][0]) self.assertEqual(r0[1], self.rows[0][1]) self.assertEqual(r0[2], self.rows[0][2]) self.assertEqual(r0[3], ts0) self.assertEqual(r0[4], self.rows[0][4]) r1 = tsobj.rows[1] - self.assertEqual(r1[0], self.rows[1][0]) + self.assertEqual(bytes_to_str(r1[0]), self.rows[1][0]) self.assertEqual(r1[1], self.rows[1][1]) self.assertEqual(r1[2], self.rows[1][2]) self.assertEqual(r1[3], ts1) @@ -229,17 +229,30 @@ def setUpClass(cls): cls.twentyMinsAgoMsec = unix_time_millis(twentyMinsAgo) cls.numCols = len(rows[0]) cls.rows = rows + encoded_rows = [ + [str_to_bytes('hash1'), str_to_bytes('user2'), + twentyFiveMinsAgo, str_to_bytes('typhoon'), 90.3], + [str_to_bytes('hash1'), str_to_bytes('user2'), + twentyMinsAgo, str_to_bytes('hurricane'), 82.3], + [str_to_bytes('hash1'), str_to_bytes('user2'), + fifteenMinsAgo, str_to_bytes('rain'), 79.0], + [str_to_bytes('hash1'), str_to_bytes('user2'), + fiveMinsAgo, str_to_bytes('wind'), None], + [str_to_bytes('hash1'), str_to_bytes('user2'), + cls.now, str_to_bytes('snow'), 20.1] + ] + cls.encoded_rows = encoded_rows def validate_data(self, ts_obj): if ts_obj.columns is not None: self.assertEqual(len(ts_obj.columns), self.numCols) self.assertEqual(len(ts_obj.rows), 1) row = ts_obj.rows[0] - self.assertEqual(row[0], 'hash1') - self.assertEqual(row[1], 'user2') + self.assertEqual(bytes_to_str(row[0]), 'hash1') + self.assertEqual(bytes_to_str(row[1]), 'user2') self.assertEqual(row[2], self.fiveMinsAgo) self.assertEqual(row[2].microsecond, 987000) - self.assertEqual(row[3], 'wind') + self.assertEqual(bytes_to_str(row[3]), 'wind') self.assertIsNone(row[4]) def test_query_that_creates_table_using_interpolation(self): @@ -359,7 +372,7 @@ def test_query_that_matches_more_data(self): t2=self.nowMsec) ts_obj = self.client.ts_query('GeoCheckin', query) j = 0 - for i, want in enumerate(self.rows): + for i, want in enumerate(self.encoded_rows): if want[2] == self.twentyFiveMinsAgo: continue got = ts_obj.rows[j] @@ -393,8 +406,8 @@ def test_stream_keys(self): for key in keylist: self.assertIsInstance(key, list) self.assertEqual(len(key), 3) - self.assertEqual('hash1', key[0]) - self.assertEqual('user2', key[1]) + self.assertEqual(bytes_to_str(key[0]), 'hash1') + self.assertEqual(bytes_to_str(key[1]), 'user2') self.assertIsInstance(key[2], datetime.datetime) self.assertGreater(len(streamed_keys), 0) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 959b0467..81c0554c 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -6,7 +6,6 @@ from erlastic import decode, encode from erlastic.types import Atom -from riak.client import RiakClient from riak.table import Table from riak.ts_object import TsObject from riak.codecs.ttb import TtbCodec @@ -53,8 +52,8 @@ def test_encode_data_for_get(self): test_key = ['hash1', 'user2', ts0] c = TtbCodec() - req_encoded = c._encode_timeseries_keyreq(self.table, test_key) - self.assertEqual(req_test, req_encoded) + msg = c._encode_timeseries_keyreq(self.table, test_key) + self.assertEqual(req_test, msg.data) # def test_decode_riak_error(self): @@ -146,8 +145,8 @@ def test_encode_data_for_put(self): tsobj = TsObject(None, self.table, rows_to_encode, None) c = TtbCodec() - req_encoded = c._encode_timeseries_put(tsobj) - self.assertEqual(req_test, req_encoded) + msg = c._encode_timeseries_put(tsobj) + self.assertEqual(req_test, msg.data) @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, @@ -166,10 +165,7 @@ def test_store_and_fetch_ttb(self): twentyFiveMinsAgo = twentyMinsAgo - fiveMins opts = {'use_ttb': True} - client = RiakClient(protocol='pbc', - host='riak-test', - pb_port=10017, - transport_options=opts) + client = self.create_client(transport_options=opts) table = client.table(table_name) rows = [ diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 92802009..8972e480 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -88,7 +88,7 @@ def _enable_ttb(self): req = riak.pb.riak_pb2.RpbToggleEncodingReq() req.use_native = True data = req.SerializeToString() - resp_code, _ = self._non_connect_send_recv( + resp_code, _ = self._send_recv( riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ, data) if resp_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 8c290e7c..46fb5941 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -174,6 +174,10 @@ class PbufTsKeyStream(PbufStream, TtbCodec): _expect = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP + def __init__(self, transport, codec): + super(PbufTsKeyStream, self).__init__(transport) + self._codec = codec + def next(self): response = super(PbufTsKeyStream, self).next() @@ -182,7 +186,7 @@ def next(self): keys = [] for tsrow in response.keys: - keys.append(self._decode_timeseries_row(tsrow)) + keys.append(self._codec._decode_timeseries_row(tsrow)) return keys diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index c2a9c053..ee8898f2 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -65,8 +65,6 @@ def _get_codec(self, msg_code): codec = self._get_ttb_codec() elif msg_code == riak.pb.messages.MSG_CODE_TS_PUT_REQ: codec = self._get_ttb_codec() - elif msg_code == riak.pb.messages.MSG_CODE_TS_DEL_REQ: - codec = self._get_ttb_codec() else: codec = self._get_pbuf_codec() return codec @@ -147,7 +145,8 @@ def ts_get(self, table, key): msg_code = riak.pb.messages.MSG_CODE_TS_GET_REQ codec = self._get_codec(msg_code) msg = codec._encode_timeseries_keyreq(table, key) - resp_code, resp = self._request(msg) + # TODO RTS-842 is_ttb + resp_code, resp = self._request(msg, self._use_ttb) tsobj = TsObject(self._client, table, [], None) codec._decode_timeseries(resp, tsobj) return tsobj @@ -158,6 +157,7 @@ def ts_put(self, tsobj): msg = codec._encode_timeseries_put(tsobj) # logging.debug("pbc/transport ts_put _use_ttb: '%s'", # self._use_ttb) + # TODO RTS-842 use_ttb resp_code, resp = self._request(msg, self._use_ttb) if self._use_ttb and \ resp is None and \ @@ -184,7 +184,7 @@ def ts_query(self, table, query, interpolations=None): msg = codec._encode_timeseries_query(table, query, interpolations) resp_code, resp = self._request(msg) tsobj = TsObject(self._client, table, [], []) - self._decode_timeseries(resp, tsobj) + codec._decode_timeseries(resp, tsobj) return tsobj def ts_stream_keys(self, table, timeout=None): @@ -196,7 +196,7 @@ def ts_stream_keys(self, table, timeout=None): codec = self._get_codec(msg_code) msg = codec._encode_timeseries_listkeysreq(table, timeout) self._send_msg(msg.msg_code, msg.data) - return PbufTsKeyStream(self) + return PbufTsKeyStream(self, codec) def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): From 642e4b77e783902c334b5ecb7d1a4dcbce3f1f8e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 1 Apr 2016 12:09:21 -0700 Subject: [PATCH 152/324] All code for PB and TTB encoding / decoding now in dedicated classes. --- riak/client/__init__.py | 6 +- riak/codecs/__init__.py | 20 ++ riak/codecs/pbuf.py | 302 ++++++++++++++++-------------- riak/codecs/ttb.py | 64 +++++-- riak/tests/base.py | 13 +- riak/tests/test_timeseries.py | 50 ++--- riak/tests/test_timeseries_ttb.py | 26 ++- riak/transports/tcp/connection.py | 4 +- riak/transports/tcp/stream.py | 20 +- riak/transports/tcp/transport.py | 273 +++++++++++---------------- 10 files changed, 410 insertions(+), 368 deletions(-) diff --git a/riak/client/__init__.py b/riak/client/__init__.py index 3a3caad5..ea9abaca 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -68,7 +68,7 @@ class RiakClient(RiakMapReduceChain, RiakClientOperations): PROTOCOLS = ['http', 'pbc'] def __init__(self, protocol='pbc', transport_options={}, nodes=None, - credentials=None, multiget_pool_size=None, **unused_args): + credentials=None, multiget_pool_size=None, **kwargs): """ Construct a new ``RiakClient`` object. @@ -88,10 +88,10 @@ def __init__(self, protocol='pbc', transport_options={}, nodes=None, CPUs in the system :type multiget_pool_size: int """ - unused_args = unused_args.copy() + kwargs = kwargs.copy() if nodes is None: - self.nodes = [self._create_node(unused_args), ] + self.nodes = [self._create_node(kwargs), ] else: self.nodes = [self._create_node(n) for n in nodes] diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index 8aab82f0..0e221d3b 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -1,5 +1,25 @@ import collections +import riak.pb.messages +from riak import RiakError Msg = collections.namedtuple('Msg', ['msg_code', 'data', 'resp_code'], verbose=False) + + +class Codec(object): + def parse_msg(self): + raise NotImplementedError('parse_msg not implemented') + + def maybe_incorrect_code(self, resp_code, expect=None): + if expect and resp_code != expect: + raise RiakError("unexpected message code: %d, expected %d" + % (resp_code, expect)) + + def maybe_riak_error(self, msg_code, data=None): + if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: + if data is None: + raise RiakError('no error provided!') + return data + else: + return None diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index b55a4253..b4da1e16 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -8,7 +8,7 @@ import riak.pb.riak_ts_pb2 from riak import RiakError -from riak.codecs import Msg +from riak.codecs import Codec, Msg from riak.content import RiakContent from riak.riak_object import VClock from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ @@ -73,10 +73,10 @@ def _invert(d): } -class PbufCodec(object): - """ +class PbufCodec(Codec): + ''' Protobuffs Encoding and decoding methods for TcpTransport. - """ + ''' def __init__(self, client_timeouts=False, quorum_controls=False, @@ -88,7 +88,21 @@ def __init__(self, self._tombstone_vclocks = tombstone_vclocks self._bucket_types = bucket_types - def _encode_auth(self, username, password): + def parse_msg(self, msg_code, data): + pbclass = riak.pb.messages.MESSAGE_CLASSES.get(msg_code, None) + if pbclass is None: + return None + pbo = pbclass() + pbo.ParseFromString(data) + return pbo + + def maybe_riak_error(self, msg_code, data=None): + err_data = super(PbufCodec, self).maybe_riak_error(msg_code, data) + if err_data: + err = self.parse_msg(msg_code, err_data) + raise RiakError(bytes_to_str(err.errmsg)) + + def encode_auth(self, username, password): req = riak.pb.riak_pb2.RpbAuthReq() req.user = str_to_bytes(username) req.password = str_to_bytes(password) @@ -96,7 +110,11 @@ def _encode_auth(self, username, password): rc = riak.pb.messages.MSG_CODE_AUTH_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_quorum(self, rw): + def encode_ping(self): + return Msg(riak.pb.messages.MSG_CODE_PING_REQ, None, + riak.pb.messages.MSG_CODE_PING_RESP) + + def encode_quorum(self, rw): """ Converts a symbolic quorum value into its on-the-wire equivalent. @@ -112,7 +130,7 @@ def _encode_quorum(self, rw): else: return None - def _decode_quorum(self, rw): + def decode_quorum(self, rw): """ Converts a protobuf quorum value to a symbolic value if necessary. @@ -126,7 +144,7 @@ def _decode_quorum(self, rw): else: return rw - def _decode_contents(self, contents, obj): + def decode_contents(self, contents, obj): """ Decodes the list of siblings from the protobuf representation into the object. @@ -137,14 +155,14 @@ def _decode_contents(self, contents, obj): :type obj: RiakObject :rtype RiakObject """ - obj.siblings = [self._decode_content(c, RiakContent(obj)) + obj.siblings = [self.decode_content(c, RiakContent(obj)) for c in contents] # Invoke sibling-resolution logic if len(obj.siblings) > 1 and obj.resolver is not None: obj.resolver(obj) return obj - def _decode_content(self, rpb_content, sibling): + def decode_content(self, rpb_content, sibling): """ Decodes a single sibling from the protobuf representation into a RiakObject. @@ -170,7 +188,7 @@ def _decode_content(self, rpb_content, sibling): if rpb_content.HasField("vtag"): sibling.etag = bytes_to_str(rpb_content.vtag) - sibling.links = [self._decode_link(link) + sibling.links = [self.decode_link(link) for link in rpb_content.links] if rpb_content.HasField("last_mod"): sibling.last_modified = float(rpb_content.last_mod) @@ -187,7 +205,7 @@ def _decode_content(self, rpb_content, sibling): return sibling - def _encode_content(self, robj, rpb_content): + def encode_content(self, robj, rpb_content): """ Fills an RpbContent message with the appropriate data and metadata from a RiakObject. @@ -232,7 +250,7 @@ def _encode_content(self, robj, rpb_content): else: rpb_content.value = robj.encoded_data - def _decode_link(self, link): + def decode_link(self, link): """ Decodes an RpbLink message into a tuple @@ -256,7 +274,7 @@ def _decode_link(self, link): return (bucket, key, tag) - def _decode_index_value(self, index, value): + def decode_index_value(self, index, value): """ Decodes a secondary index value into the correct Python type. :param index: the name of the index @@ -270,7 +288,7 @@ def _decode_index_value(self, index, value): else: return bytes_to_str(value) - def _encode_bucket_props(self, props, msg): + def encode_bucket_props(self, props, msg): """ Encodes a dict of bucket properties into the protobuf message. @@ -288,13 +306,13 @@ def _encode_bucket_props(self, props, msg): for prop in COMMIT_HOOK_PROPS: if prop in props: setattr(msg.props, 'has_' + prop, True) - self._encode_hooklist(props[prop], getattr(msg.props, prop)) + self.encode_hooklist(props[prop], getattr(msg.props, prop)) for prop in MODFUN_PROPS: if prop in props and props[prop] is not None: - self._encode_modfun(props[prop], getattr(msg.props, prop)) + self.encode_modfun(props[prop], getattr(msg.props, prop)) for prop in QUORUM_PROPS: if prop in props and props[prop] not in (None, 'default'): - value = self._encode_quorum(props[prop]) + value = self.encode_quorum(props[prop]) if value is not None: if isinstance(value, six.string_types): setattr(msg.props, prop, str_to_bytes(value)) @@ -305,7 +323,7 @@ def _encode_bucket_props(self, props, msg): return msg - def _decode_bucket_props(self, msg): + def decode_bucket_props(self, msg): """ Decodes the protobuf bucket properties message into a dict. @@ -321,18 +339,18 @@ def _decode_bucket_props(self, msg): props[prop] = bytes_to_str(props[prop]) for prop in COMMIT_HOOK_PROPS: if getattr(msg, 'has_' + prop): - props[prop] = self._decode_hooklist(getattr(msg, prop)) + props[prop] = self.decode_hooklist(getattr(msg, prop)) for prop in MODFUN_PROPS: if msg.HasField(prop): - props[prop] = self._decode_modfun(getattr(msg, prop)) + props[prop] = self.decode_modfun(getattr(msg, prop)) for prop in QUORUM_PROPS: if msg.HasField(prop): - props[prop] = self._decode_quorum(getattr(msg, prop)) + props[prop] = self.decode_quorum(getattr(msg, prop)) if msg.HasField('repl'): props['repl'] = REPL_TO_PY[msg.repl] return props - def _decode_modfun(self, modfun): + def decode_modfun(self, modfun): """ Decodes a protobuf modfun pair into a dict with 'mod' and 'fun' keys. Used in bucket properties. @@ -344,7 +362,7 @@ def _decode_modfun(self, modfun): return {'mod': bytes_to_str(modfun.module), 'fun': bytes_to_str(modfun.function)} - def _encode_modfun(self, props, msg=None): + def encode_modfun(self, props, msg=None): """ Encodes a dict with 'mod' and 'fun' keys into a protobuf modfun pair. Used in bucket properties. @@ -361,7 +379,7 @@ def _encode_modfun(self, props, msg=None): msg.function = str_to_bytes(props['fun']) return msg - def _decode_hooklist(self, hooklist): + def decode_hooklist(self, hooklist): """ Decodes a list of protobuf commit hooks into their python equivalents. Used in bucket properties. @@ -370,9 +388,9 @@ def _decode_hooklist(self, hooklist): :type hooklist: list :rtype list """ - return [self._decode_hook(hook) for hook in hooklist] + return [self.decode_hook(hook) for hook in hooklist] - def _encode_hooklist(self, hooklist, msg): + def encode_hooklist(self, hooklist, msg): """ Encodes a list of commit hooks into their protobuf equivalent. Used in bucket properties. @@ -383,9 +401,9 @@ def _encode_hooklist(self, hooklist, msg): """ for hook in hooklist: pbhook = msg.add() - self._encode_hook(hook, pbhook) + self.encode_hook(hook, pbhook) - def _decode_hook(self, hook): + def decode_hook(self, hook): """ Decodes a protobuf commit hook message into a dict. Used in bucket properties. @@ -395,11 +413,11 @@ def _decode_hook(self, hook): :rtype dict """ if hook.HasField('modfun'): - return self._decode_modfun(hook.modfun) + return self.decode_modfun(hook.modfun) else: return {'name': bytes_to_str(hook.name)} - def _encode_hook(self, hook, msg): + def encode_hook(self, hook, msg): """ Encodes a commit hook dict into the protobuf message. Used in bucket properties. @@ -413,13 +431,13 @@ def _encode_hook(self, hook, msg): if 'name' in hook: msg.name = str_to_bytes(hook['name']) else: - self._encode_modfun(hook, msg.modfun) + self.encode_modfun(hook, msg.modfun) return msg - def _encode_index_req(self, bucket, index, startkey, endkey=None, - return_terms=None, max_results=None, - continuation=None, timeout=None, term_regex=None, - streaming=False): + def encode_index_req(self, bucket, index, startkey, endkey=None, + return_terms=None, max_results=None, + continuation=None, timeout=None, term_regex=None, + streaming=False): """ Encodes a secondary index request into the protobuf message. @@ -475,8 +493,8 @@ def _encode_index_req(self, bucket, index, startkey, endkey=None, rc = riak.pb.messages.MSG_CODE_INDEX_RESP return Msg(mc, req.SerializeToString(), rc) - def _decode_index_req(self, resp, index, - return_terms=None, max_results=None): + def decode_index_req(self, resp, index, + return_terms=None, max_results=None): if return_terms and resp.results: results = [(decode_index_value(index, pair.key), bytes_to_str(pair.value)) @@ -491,7 +509,7 @@ def _decode_index_req(self, resp, index, else: return (results, None) - def _decode_search_index(self, index): + def decode_search_index(self, index): """ Fills an RpbYokozunaIndex message with the appropriate data. @@ -514,7 +532,7 @@ def _add_bucket_type(self, req, bucket_type): 'Server does not support bucket-types') req.type = str_to_bytes(bucket_type.name) - def _encode_search_query(self, req, **kwargs): + def encode_search_query(self, req, **kwargs): if 'rows' in kwargs: req.rows = kwargs['rows'] if 'start' in kwargs: @@ -537,7 +555,7 @@ def _encode_search_query(self, req, **kwargs): if 'presort' in kwargs: req.presort = kwargs['presort'] - def _decode_search_doc(self, doc): + def decode_search_doc(self, doc): resultdoc = MultiDict() for pair in doc.fields: if six.PY2: @@ -549,12 +567,12 @@ def _decode_search_doc(self, doc): resultdoc.add(ukey, uval) return resultdoc.mixed() - def _decode_dt_fetch(self, resp): + def decode_dt_fetch(self, resp): dtype = DT_FETCH_TYPES.get(resp.type) if dtype is None: raise ValueError("Unknown datatype on wire: {}".format(resp.type)) - value = self._decode_dt_value(dtype, resp.value) + value = self.decode_dt_value(dtype, resp.value) if resp.HasField('context'): context = resp.context[:] @@ -563,25 +581,25 @@ def _decode_dt_fetch(self, resp): return dtype, value, context - def _decode_dt_value(self, dtype, msg): + def decode_dt_value(self, dtype, msg): if dtype == 'counter': return msg.counter_value elif dtype == 'set': - return self._decode_set_value(msg.set_value) + return self.decode_set_value(msg.set_value) elif dtype == 'map': - return self._decode_map_value(msg.map_value) + return self.decode_map_value(msg.map_value) - def _encode_dt_options(self, req, **kwargs): + def encode_dt_options(self, req, **kwargs): for q in ['r', 'pr', 'w', 'dw', 'pw']: if q in kwargs and kwargs[q] is not None: - setattr(req, q, self._encode_quorum(kwargs[q])) + setattr(req, q, self.encode_quorum(kwargs[q])) for o in ['basic_quorum', 'notfound_ok', 'timeout', 'return_body', 'include_context']: if o in kwargs and kwargs[o] is not None: setattr(req, o, kwargs[o]) - def _decode_map_value(self, entries): + def decode_map_value(self, entries): out = {} for entry in entries: name = bytes_to_str(entry.field.name[:]) @@ -589,37 +607,37 @@ def _decode_map_value(self, entries): if dtype == 'counter': value = entry.counter_value elif dtype == 'set': - value = self._decode_set_value(entry.set_value) + value = self.decode_set_value(entry.set_value) elif dtype == 'register': value = bytes_to_str(entry.register_value[:]) elif dtype == 'flag': value = entry.flag_value elif dtype == 'map': - value = self._decode_map_value(entry.map_value) + value = self.decode_map_value(entry.map_value) out[(name, dtype)] = value return out - def _decode_set_value(self, set_value): + def decode_set_value(self, set_value): return [bytes_to_str(string[:]) for string in set_value] - def _encode_dt_op(self, dtype, req, op): + def encode_dt_op(self, dtype, req, op): if dtype == 'counter': req.op.counter_op.increment = op[1] elif dtype == 'set': - self._encode_set_op(req.op, op) + self.encode_set_op(req.op, op) elif dtype == 'map': - self._encode_map_op(req.op.map_op, op) + self.encode_map_op(req.op.map_op, op) else: raise TypeError("Cannot send operation on datatype {!r}". format(dtype)) - def _encode_set_op(self, msg, op): + def encode_set_op(self, msg, op): if 'adds' in op: msg.set_op.adds.extend(str_to_bytes(op['adds'])) if 'removes' in op: msg.set_op.removes.extend(str_to_bytes(op['removes'])) - def _encode_map_op(self, msg, ops): + def encode_map_op(self, msg, ops): for op in ops: name, dtype = op[1] ftype = MAP_FIELD_TYPES[dtype] @@ -635,16 +653,16 @@ def _encode_map_op(self, msg, ops): update = msg.updates.add() update.field.name = str_to_bytes(name) update.field.type = ftype - self._encode_map_update(dtype, update, op[2]) + self.encode_map_update(dtype, update, op[2]) - def _encode_map_update(self, dtype, msg, op): + def encode_map_update(self, dtype, msg, op): if dtype == 'counter': # ('increment', some_int) msg.counter_op.increment = op[1] elif dtype == 'set': - self._encode_set_op(msg, op) + self.encode_set_op(msg, op) elif dtype == 'map': - self._encode_map_op(msg.map_op, op) + self.encode_map_op(msg.map_op, op) elif dtype == 'register': # ('assign', some_str) msg.register_op = str_to_bytes(op[1]) @@ -654,7 +672,7 @@ def _encode_map_update(self, dtype, msg, op): else: msg.flag_op = riak.pb.riak_dt_pb2.MapUpdate.DISABLE - def _encode_to_ts_cell(self, cell, ts_cell): + def encode_to_ts_cell(self, cell, ts_cell): if cell is not None: if isinstance(cell, datetime.datetime): ts_cell.timestamp_value = unix_time_millis(cell) @@ -675,7 +693,7 @@ def _encode_to_ts_cell(self, cell, ts_cell): raise RiakError("can't serialize type '{}', value '{}'" .format(t, cell)) - def _encode_timeseries_keyreq(self, table, key, is_delete=False): + def encode_timeseries_keyreq(self, table, key, is_delete=False): key_vals = None if isinstance(key, list): key_vals = key @@ -693,10 +711,10 @@ def _encode_timeseries_keyreq(self, table, key, is_delete=False): req.table = str_to_bytes(table.name) for cell in key_vals: ts_cell = req.key.add() - self._encode_to_ts_cell(cell, ts_cell) + self.encode_to_ts_cell(cell, ts_cell) return Msg(mc, req.SerializeToString(), rc) - def _encode_timeseries_listkeysreq(self, table, timeout=None): + def encode_timeseries_listkeysreq(self, table, timeout=None): req = riak.pb.riak_ts_pb2.TsListKeysReq() req.table = str_to_bytes(table.name) if self._client_timeouts and timeout: @@ -705,7 +723,13 @@ def _encode_timeseries_listkeysreq(self, table, timeout=None): rc = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_timeseries_put(self, tsobj): + def validate_timeseries_put_resp(self, resp_code, resp): + if resp is not None: + return True + else: + raise RiakError("missing response object") + + def encode_timeseries_put(self, tsobj): """ Fills an TsPutReq message with the appropriate data and metadata from a TsObject. @@ -728,7 +752,7 @@ def _encode_timeseries_put(self, tsobj): raise ValueError("TsObject row must be a list of values") for cell in row: tsc = tsr.cells.add() # NB: type TsCell - self._encode_to_ts_cell(cell, tsc) + self.encode_to_ts_cell(cell, tsc) else: raise RiakError("TsObject requires a list of rows") @@ -736,7 +760,7 @@ def _encode_timeseries_put(self, tsobj): rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_timeseries_query(self, table, query, interpolations=None): + def encode_timeseries_query(self, table, query, interpolations=None): req = riak.pb.riak_ts_pb2.TsQueryReq() q = query if '{table}' in q: @@ -746,7 +770,7 @@ def _encode_timeseries_query(self, table, query, interpolations=None): rc = riak.pb.messages.MSG_CODE_TS_QUERY_RESP return Msg(mc, req.SerializeToString(), rc) - def _decode_timeseries(self, resp, tsobj): + def decode_timeseries(self, resp, tsobj): """ Fills an TsObject with the appropriate data and metadata from a TsGetResp / TsQueryResp. @@ -766,9 +790,9 @@ def _decode_timeseries(self, resp, tsobj): for row in resp.rows: tsobj.rows.append( - self._decode_timeseries_row(row, resp.columns)) + self.decode_timeseries_row(row, resp.columns)) - def _decode_timeseries_row(self, tsrow, tscols=None): + def decode_timeseries_row(self, tsrow, tscols=None): """ Decodes a TsRow into a list @@ -815,7 +839,7 @@ def _decode_timeseries_row(self, tsrow, tscols=None): row.append(None) return row - def _decode_preflist(self, item): + def decode_preflist(self, item): """ Decodes a preflist response @@ -829,15 +853,15 @@ def _decode_preflist(self, item): 'primary': item. primary} return result - def _encode_get(self, robj, r=None, pr=None, timeout=None, - basic_quorum=None, notfound_ok=None): + def encode_get(self, robj, r=None, pr=None, timeout=None, + basic_quorum=None, notfound_ok=None): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbGetReq() if r: - req.r = self._encode_quorum(r) + req.r = self.encode_quorum(r) if self._quorum_controls: if pr: - req.pr = self._encode_quorum(pr) + req.pr = self.encode_quorum(pr) if basic_quorum is not None: req.basic_quorum = basic_quorum if notfound_ok is not None: @@ -853,17 +877,17 @@ def _encode_get(self, robj, r=None, pr=None, timeout=None, rc = riak.pb.messages.MSG_CODE_GET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_put(self, robj, w=None, dw=None, pw=None, - return_body=True, if_none_match=False, - timeout=None): + def encode_put(self, robj, w=None, dw=None, pw=None, + return_body=True, if_none_match=False, + timeout=None): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbPutReq() if w: - req.w = self._encode_quorum(w) + req.w = self.encode_quorum(w) if dw: - req.dw = self._encode_quorum(dw) + req.dw = self.encode_quorum(dw) if self._quorum_controls and pw: - req.pw = self._encode_quorum(pw) + req.pw = self.encode_quorum(pw) if return_body: req.return_body = 1 if if_none_match: @@ -876,54 +900,54 @@ def _encode_put(self, robj, w=None, dw=None, pw=None, req.key = str_to_bytes(robj.key) if robj.vclock: req.vclock = robj.vclock.encode('binary') - self._encode_content(robj, req.content) + self.encode_content(robj, req.content) mc = riak.pb.messages.MSG_CODE_PUT_REQ rc = riak.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc) - def _decode_get(self, robj, resp): + def decode_get(self, robj, resp): if resp is not None: if resp.HasField('vclock'): robj.vclock = VClock(resp.vclock, 'binary') # We should do this even if there are no contents, i.e. # the object is tombstoned - self._decode_contents(resp.content, robj) + self.decode_contents(resp.content, robj) else: # "not found" returns an empty message, # so let's make sure to clear the siblings robj.siblings = [] return robj - def _decode_put(self, robj, resp): + def decode_put(self, robj, resp): if resp is not None: if resp.HasField('key'): robj.key = bytes_to_str(resp.key) if resp.HasField("vclock"): robj.vclock = VClock(resp.vclock, 'binary') if resp.content: - self._decode_contents(resp.content, robj) + self.decode_contents(resp.content, robj) elif not robj.key: raise RiakError("missing response object") return robj - def _encode_delete(self, robj, rw=None, r=None, - w=None, dw=None, pr=None, pw=None, - timeout=None): + def encode_delete(self, robj, rw=None, r=None, + w=None, dw=None, pr=None, pw=None, + timeout=None): req = riak.pb.riak_kv_pb2.RpbDelReq() if rw: - req.rw = self._encode_quorum(rw) + req.rw = self.encode_quorum(rw) if r: - req.r = self._encode_quorum(r) + req.r = self.encode_quorum(r) if w: - req.w = self._encode_quorum(w) + req.w = self.encode_quorum(w) if dw: - req.dw = self._encode_quorum(dw) + req.dw = self.encode_quorum(dw) if self._quorum_controls: if pr: - req.pr = self._encode_quorum(pr) + req.pr = self.encode_quorum(pr) if pw: - req.pw = self._encode_quorum(pw) + req.pw = self.encode_quorum(pw) if self._client_timeouts and timeout: req.timeout = timeout @@ -941,7 +965,7 @@ def _encode_delete(self, robj, rw=None, r=None, rc = riak.pb.messages.MSG_CODE_DEL_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_stream_keys(self, bucket, timeout=None): + def encode_stream_keys(self, bucket, timeout=None): req = riak.pb.riak_kv_pb2.RpbListKeysReq() req.bucket = str_to_bytes(bucket.name) if self._client_timeouts and timeout: @@ -951,34 +975,34 @@ def _encode_stream_keys(self, bucket, timeout=None): rc = riak.pb.messages.MSG_CODE_LIST_KEYS_RESP return Msg(mc, req.SerializeToString(), rc) - def _decode_get_keys(self, stream): + def decode_get_keys(self, stream): keys = [] for keylist in stream: for key in keylist: keys.append(bytes_to_str(key)) return keys - def _decode_get_server_info(self, resp): + def decode_get_server_info(self, resp): return {'node': bytes_to_str(resp.node), 'server_version': bytes_to_str(resp.server_version)} - def _encode_get_client_id(self): + def encode_get_client_id(self): mc = riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ rc = riak.pb.messages.MSG_CODE_GET_CLIENT_ID_RESP return Msg(mc, None, rc) - def _decode_get_client_id(self, resp): + def decode_get_client_id(self, resp): return bytes_to_str(resp.client_id) - def _encode_set_client_id(self, client_id): + def encode_set_client_id(self, client_id): req = riak.pb.riak_kv_pb2.RpbSetClientIdReq() req.client_id = str_to_bytes(client_id) mc = riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ rc = riak.pb.messages.MSG_CODE_SET_CLIENT_ID_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_get_buckets(self, bucket_type, - timeout=None, streaming=False): + def encode_get_buckets(self, bucket_type, + timeout=None, streaming=False): # Bucket streaming landed in the same release as timeouts, so # we don't need to check the capability. req = riak.pb.riak_kv_pb2.RpbListBucketsReq() @@ -990,7 +1014,7 @@ def _encode_get_buckets(self, bucket_type, rc = riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_get_bucket_props(self, bucket): + def encode_get_bucket_props(self, bucket): req = riak.pb.riak_pb2.RpbGetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) @@ -998,16 +1022,16 @@ def _encode_get_bucket_props(self, bucket): rc = riak.pb.messages.MSG_CODE_GET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_set_bucket_props(self, bucket, props): + def encode_set_bucket_props(self, bucket, props): req = riak.pb.riak_pb2.RpbSetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) - self._encode_bucket_props(props, req) + self.encode_bucket_props(props, req) mc = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ rc = riak.pb.messages.MSG_CODE_SET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_clear_bucket_props(self, bucket): + def encode_clear_bucket_props(self, bucket): req = riak.pb.riak_pb2.RpbResetBucketReq() req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) @@ -1015,22 +1039,22 @@ def _encode_clear_bucket_props(self, bucket): rc = riak.pb.messages.MSG_CODE_RESET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_get_bucket_type_props(self, bucket_type): + def encode_get_bucket_type_props(self, bucket_type): req = riak.pb.riak_pb2.RpbGetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) mc = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ rc = riak.pb.messages.MSG_CODE_GET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_set_bucket_type_props(self, bucket_type, props): + def encode_set_bucket_type_props(self, bucket_type, props): req = riak.pb.riak_pb2.RpbSetBucketTypeReq() req.type = str_to_bytes(bucket_type.name) - self._encode_bucket_props(props, req) + self.encode_bucket_props(props, req) mc = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ rc = riak.pb.messages.MSG_CODE_SET_BUCKET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_stream_mapred(self, content): + def encode_stream_mapred(self, content): req = riak.pb.riak_kv_pb2.RpbMapRedReq() req.request = str_to_bytes(content) req.content_type = str_to_bytes("application/json") @@ -1038,8 +1062,8 @@ def _encode_stream_mapred(self, content): rc = riak.pb.messages.MSG_CODE_MAP_RED_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_create_search_index(self, index, schema=None, - n_val=None, timeout=None): + def encode_create_search_index(self, index, schema=None, + n_val=None, timeout=None): index = str_to_bytes(index) idx = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) if schema: @@ -1053,27 +1077,27 @@ def _encode_create_search_index(self, index, schema=None, rc = riak.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_get_search_index(self, index): + def encode_get_search_index(self, index): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( name=str_to_bytes(index)) mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ rc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_list_search_indexes(self): + def encode_list_search_indexes(self): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ rc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_delete_search_index(self, index): + def encode_delete_search_index(self, index): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( name=str_to_bytes(index)) mc = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ rc = riak.pb.messages.MSG_CODE_DEL_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_create_search_schema(self, schema, content): + def encode_create_search_schema(self, schema, content): scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( name=str_to_bytes(schema), content=str_to_bytes(content)) @@ -1083,45 +1107,45 @@ def _encode_create_search_schema(self, schema, content): rc = riak.pb.messages.MSG_CODE_PUT_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_get_search_schema(self, schema): + def encode_get_search_schema(self, schema): req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( name=str_to_bytes(schema)) mc = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ rc = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP return Msg(mc, req.SerializeToString(), rc) - def _decode_get_search_schema(self, resp): + def decode_get_search_schema(self, resp): result = {} result['name'] = bytes_to_str(resp.schema.name) result['content'] = bytes_to_str(resp.schema.content) return result - def _encode_search(self, index, query, **kwargs): + def encode_search(self, index, query, **kwargs): req = riak.pb.riak_search_pb2.RpbSearchQueryReq( index=str_to_bytes(index), q=str_to_bytes(query)) - self._encode_search_query(req, **kwargs) + self.encode_search_query(req, **kwargs) mc = riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ rc = riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP return Msg(mc, req.SerializeToString(), rc) - def _decode_search(self, resp): + def decode_search(self, resp): result = {} if resp.HasField('max_score'): result['max_score'] = resp.max_score if resp.HasField('num_found'): result['num_found'] = resp.num_found - result['docs'] = [self._decode_search_doc(doc) for doc in resp.docs] + result['docs'] = [self.decode_search_doc(doc) for doc in resp.docs] return result - def _encode_get_counter(self, bucket, key, **kwargs): + def encode_get_counter(self, bucket, key, **kwargs): req = riak.pb.riak_kv_pb2.RpbCounterGetReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) if kwargs.get('r') is not None: - req.r = self._encode_quorum(kwargs['r']) + req.r = self.encode_quorum(kwargs['r']) if kwargs.get('pr') is not None: - req.pr = self._encode_quorum(kwargs['pr']) + req.pr = self.encode_quorum(kwargs['pr']) if kwargs.get('basic_quorum') is not None: req.basic_quorum = kwargs['basic_quorum'] if kwargs.get('notfound_ok') is not None: @@ -1130,34 +1154,34 @@ def _encode_get_counter(self, bucket, key, **kwargs): rc = riak.pb.messages.MSG_CODE_COUNTER_GET_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_update_counter(self, bucket, key, value, **kwargs): + def encode_update_counter(self, bucket, key, value, **kwargs): req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) req.amount = value if kwargs.get('w') is not None: - req.w = self._encode_quorum(kwargs['w']) + req.w = self.encode_quorum(kwargs['w']) if kwargs.get('dw') is not None: - req.dw = self._encode_quorum(kwargs['dw']) + req.dw = self.encode_quorum(kwargs['dw']) if kwargs.get('pw') is not None: - req.pw = self._encode_quorum(kwargs['pw']) + req.pw = self.encode_quorum(kwargs['pw']) if kwargs.get('returnvalue') is not None: req.returnvalue = kwargs['returnvalue'] mc = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ rc = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_fetch_datatype(self, bucket, key, **kwargs): + def encode_fetch_datatype(self, bucket, key, **kwargs): req = riak.pb.riak_dt_pb2.DtFetchReq() req.type = str_to_bytes(bucket.bucket_type.name) req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) - self._encode_dt_options(req, **kwargs) + self.encode_dt_options(req, **kwargs) mc = riak.pb.messages.MSG_CODE_DT_FETCH_REQ rc = riak.pb.messages.MSG_CODE_DT_FETCH_RESP return Msg(mc, req.SerializeToString(), rc) - def _encode_update_datatype(self, datatype, **kwargs): + def encode_update_datatype(self, datatype, **kwargs): op = datatype.to_op() type_name = datatype.type_name if not op: @@ -1170,22 +1194,22 @@ def _encode_update_datatype(self, datatype, **kwargs): req.key = str_to_bytes(datatype.key) if datatype._context: req.context = datatype._context - self._encode_dt_options(req, **kwargs) - self._encode_dt_op(type_name, req, op) + self.encode_dt_options(req, **kwargs) + self.encode_dt_op(type_name, req, op) mc = riak.pb.messages.MSG_CODE_DT_UPDATE_REQ rc = riak.pb.messages.MSG_CODE_DT_UPDATE_RESP return Msg(mc, req.SerializeToString(), rc) - def _decode_update_datatype(self, datatype, resp, **kwargs): + def decode_update_datatype(self, datatype, resp, **kwargs): type_name = datatype.type_name if resp.HasField('key'): datatype.key = resp.key[:] if resp.HasField('context'): datatype._context = resp.context[:] if kwargs.get('return_body'): - datatype._set_value(self._decode_dt_value(type_name, resp)) + datatype._set_value(self.decode_dt_value(type_name, resp)) - def _encode_get_preflist(self, bucket, key): + def encode_get_preflist(self, bucket, key): req = riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() req.bucket = str_to_bytes(bucket.name) req.key = str_to_bytes(key) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index eef65fdc..3ac7651a 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -3,12 +3,12 @@ import riak.pb.messages -from erlastic import encode +from erlastic import encode, decode from erlastic.types import Atom from riak import RiakError -from riak.codecs import Msg -from riak.util import unix_time_millis, \ +from riak.codecs import Codec, Msg +from riak.util import bytes_to_str, unix_time_millis, \ datetime_from_unix_time_millis udef_a = Atom('undefined') @@ -24,7 +24,7 @@ tscell_empty = (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) -class TtbCodec(object): +class TtbCodec(Codec): ''' Erlang term-to-binary Encoding and decoding methods for TcpTransport ''' @@ -32,7 +32,32 @@ class TtbCodec(object): def __init__(self, **unused_args): super(TtbCodec, self).__init__(**unused_args) - def _encode_to_ts_cell(self, cell): + def parse_msg(self, msg_code, data): + if msg_code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ + msg_code != riak.pb.messages.MSG_CODE_TS_PUT_RESP and \ + msg_code != riak.pb.messages.MSG_CODE_ERROR_RESP: + raise RiakError("TTB can't parse code: {}".format(msg_code)) + if len(data) > 0: + return decode(data) + else: + return None + + def process_err_ttb(self, err_ttb): + resp_a = err_ttb[0] + if resp_a == rpberrorresp_a: + errmsg = err_ttb[1] + raise RiakError(bytes_to_str(errmsg)) + else: + raise RiakError( + "Unknown TTB error type: {}".format(resp_a)) + + def maybe_riak_error(self, msg_code, data=None): + err_data = super(TtbCodec, self).maybe_riak_error(msg_code, data) + if err_data: + err_ttb = decode(err_data) + self.process_err_ttb(err_ttb) + + def encode_to_ts_cell(self, cell): if cell is None: return tscell_empty else: @@ -55,7 +80,7 @@ def _encode_to_ts_cell(self, cell): raise RiakError("can't serialize type '{}', value '{}'" .format(t, cell)) - def _encode_timeseries_keyreq(self, table, key, is_delete=False): + def encode_timeseries_keyreq(self, table, key, is_delete=False): key_vals = None if isinstance(key, list): key_vals = key @@ -71,10 +96,19 @@ def _encode_timeseries_keyreq(self, table, key, is_delete=False): req_atom = tsdelreq_a req = req_atom, table.name, \ - [self._encode_to_ts_cell(k) for k in key_vals], udef_a + [self.encode_to_ts_cell(k) for k in key_vals], udef_a return Msg(mc, encode(req), rc) - def _encode_timeseries_put(self, tsobj): + def validate_timeseries_put_resp(self, resp_code, resp): + if resp is None and \ + resp_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: + return True + if resp is not None: + return True + else: + raise RiakError("missing response object") + + def encode_timeseries_put(self, tsobj): ''' Returns an Erlang-TTB encoded tuple with the appropriate data and metadata from a TsObject. @@ -91,7 +125,7 @@ def _encode_timeseries_put(self, tsobj): for row in tsobj.rows: req_r = [] for cell in row: - req_r.append(self._encode_to_ts_cell(cell)) + req_r.append(self.encode_to_ts_cell(cell)) req_t = (tsrow_a, req_r) req_rows.append(req_t) req = tsputreq_a, tsobj.table.name, udef_a, req_rows @@ -101,7 +135,7 @@ def _encode_timeseries_put(self, tsobj): else: raise RiakError("TsObject requires a list of rows") - def _decode_timeseries(self, resp_ttb, tsobj): + def decode_timeseries(self, resp_ttb, tsobj): """ Fills an TsObject with the appropriate data and metadata from a TTB-encoded TsGetResp / TsQueryResp. @@ -126,18 +160,18 @@ def _decode_timeseries(self, resp_ttb, tsobj): return tsobj resp_a = resp_ttb[0] - if resp_a == tsgetresp_a: + if resp_a == rpberrorresp_a: + self.process_err_ttb(resp_ttb) + elif resp_a == tsgetresp_a: # TODO resp_cols = resp_ttb[1] resp_rows = resp_ttb[2] for row_ttb in resp_rows: tsobj.rows.append( - self._decode_timeseries_row(row_ttb, None)) - # TODO - # elif resp_a == rpberrorresp_a: + self.decode_timeseries_row(row_ttb, None)) else: raise RiakError("Unknown TTB response type: {}".format(resp_a)) - def _decode_timeseries_row(self, tsrow_ttb, tscols=None): + def decode_timeseries_row(self, tsrow_ttb, tscols=None): """ Decodes a TTB-encoded TsRow into a list diff --git a/riak/tests/base.py b/riak/tests/base.py index ec0f397c..97b9607c 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -9,7 +9,6 @@ class IntegrationTestBase(object): - host = None pb_port = None http_port = None @@ -28,7 +27,7 @@ def randname(length=12): @classmethod def create_client(cls, host=None, http_port=None, pb_port=None, - protocol=None, credentials=None, **client_args): + protocol=None, credentials=None, **kwargs): host = host or HOST http_port = http_port or HTTP_PORT pb_port = pb_port or PB_PORT @@ -43,22 +42,26 @@ def create_client(cls, host=None, http_port=None, pb_port=None, credentials = credentials or SECURITY_CREDS + if hasattr(cls, 'client_options'): + kwargs.update(cls.client_options) + if hasattr(cls, 'logging_enabled') and cls.logging_enabled: cls.logger.debug("RiakClient(protocol='%s', host='%s', " + "pb_port='%d', http_port='%d', " + - "credentials='%s', client_args='%s')", + "credentials='%s', kwargs='%s')", protocol, host, pb_port, http_port, credentials, - client_args) + kwargs) return RiakClient(protocol=protocol, host=host, http_port=http_port, credentials=credentials, - pb_port=pb_port, **client_args) + pb_port=pb_port, + **kwargs) @classmethod def setUpClass(cls): diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index e50d2613..0ebbe552 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -67,7 +67,7 @@ def test_encode_decode_timestamp(self): def test_encode_data_for_get(self): c = PbufCodec() - msg = c._encode_timeseries_keyreq( + msg = c.encode_timeseries_keyreq( self.table, self.test_key, is_delete=False) req = riak.pb.riak_ts_pb2.TsGetReq() req.ParseFromString(msg.data) @@ -75,7 +75,7 @@ def test_encode_data_for_get(self): def test_encode_data_for_delete(self): c = PbufCodec() - msg = c._encode_timeseries_keyreq( + msg = c.encode_timeseries_keyreq( self.table, self.test_key, is_delete=True) req = riak.pb.riak_ts_pb2.TsDelReq() req.ParseFromString(msg.data) @@ -84,7 +84,7 @@ def test_encode_data_for_delete(self): def test_encode_data_for_put(self): c = PbufCodec() tsobj = TsObject(None, self.table, self.rows, None) - msg = c._encode_timeseries_put(tsobj) + msg = c.encode_timeseries_put(tsobj) req = riak.pb.riak_ts_pb2.TsPutReq() req.ParseFromString(msg.data) @@ -110,7 +110,7 @@ def test_encode_data_for_put(self): def test_encode_data_for_listkeys(self): c = PbufCodec(client_timeouts=True) - msg = c._encode_timeseries_listkeysreq(self.table, 1234) + msg = c.encode_timeseries_listkeysreq(self.table, 1234) req = riak.pb.riak_ts_pb2.TsListKeysReq() req.ParseFromString(msg.data) self.assertEqual(self.table.name, bytes_to_str(req.table)) @@ -161,7 +161,7 @@ def test_decode_data_from_query(self): tsobj = TsObject(None, self.table, [], []) c = PbufCodec() - c._decode_timeseries(tqr, tsobj) + c.decode_timeseries(tqr, tsobj) self.assertEqual(len(self.rows), len(tsobj.rows)) self.assertEqual(len(tqr.columns), len(tsobj.columns)) @@ -196,6 +196,8 @@ def test_decode_data_from_query(self): @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesTests(IntegrationTestBase, unittest.TestCase): + client_options = {'transport_options': {'use_ttb': False}} + @classmethod def setUpClass(cls): super(TimeseriesTests, cls).setUpClass() @@ -276,34 +278,34 @@ def test_query_that_creates_table_using_interpolation(self): def test_query_that_returns_table_description(self): fmt = 'DESCRIBE {table}' query = fmt.format(table=table_name) - ts_obj = self.client.ts_query('GeoCheckin', query) + ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) self.assertEqual(len(ts_obj.columns), 5) self.assertEqual(len(ts_obj.rows), 5) def test_query_that_returns_table_description_using_interpolation(self): query = 'Describe {table}' - ts_obj = self.client.ts_query('GeoCheckin', query) + ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) self.assertEqual(len(ts_obj.columns), 5) self.assertEqual(len(ts_obj.rows), 5) def test_query_description_via_table(self): query = 'describe {table}' - table = Table(self.client, 'GeoCheckin') + table = Table(self.client, table_name) ts_obj = table.query(query) self.assertIsNotNone(ts_obj) self.assertEqual(len(ts_obj.columns), 5) self.assertEqual(len(ts_obj.rows), 5) def test_get_description(self): - ts_obj = self.client.ts_describe('GeoCheckin') + ts_obj = self.client.ts_describe(table_name) self.assertIsNotNone(ts_obj) self.assertEqual(len(ts_obj.columns), 5) self.assertEqual(len(ts_obj.rows), 5) def test_get_description_via_table(self): - table = Table(self.client, 'GeoCheckin') + table = Table(self.client, table_name) ts_obj = table.describe() self.assertIsNotNone(ts_obj) self.assertEqual(len(ts_obj.columns), 5) @@ -317,7 +319,7 @@ def test_query_that_returns_no_data(self): user = 'user1' """ query = fmt.format(table=table_name) - ts_obj = self.client.ts_query('GeoCheckin', query) + ts_obj = self.client.ts_query(table_name, query) self.assertEqual(len(ts_obj.columns), 0) self.assertEqual(len(ts_obj.rows), 0) @@ -328,7 +330,7 @@ def test_query_that_returns_no_data_using_interpolation(self): geohash = 'hash1' and user = 'user1' """ - ts_obj = self.client.ts_query('GeoCheckin', query) + ts_obj = self.client.ts_query(table_name, query) self.assertEqual(len(ts_obj.columns), 0) self.assertEqual(len(ts_obj.rows), 0) @@ -343,7 +345,7 @@ def test_query_that_matches_some_data(self): table=table_name, t1=self.tenMinsAgoMsec, t2=self.nowMsec) - ts_obj = self.client.ts_query('GeoCheckin', query) + ts_obj = self.client.ts_query(table_name, query) self.validate_data(ts_obj) def test_query_that_matches_some_data_using_interpolation(self): @@ -356,7 +358,7 @@ def test_query_that_matches_some_data_using_interpolation(self): query = fmt.format( t1=self.tenMinsAgoMsec, t2=self.nowMsec) - ts_obj = self.client.ts_query('GeoCheckin', query) + ts_obj = self.client.ts_query(table_name, query) self.validate_data(ts_obj) def test_query_that_matches_more_data(self): @@ -370,7 +372,7 @@ def test_query_that_matches_more_data(self): table=table_name, t1=self.twentyMinsAgoMsec, t2=self.nowMsec) - ts_obj = self.client.ts_query('GeoCheckin', query) + ts_obj = self.client.ts_query(table_name, query) j = 0 for i, want in enumerate(self.encoded_rows): if want[2] == self.twentyFiveMinsAgo: @@ -382,23 +384,23 @@ def test_query_that_matches_more_data(self): def test_get_with_invalid_key(self): key = ['hash1', 'user2'] with self.assertRaises(RiakError): - self.client.ts_get('GeoCheckin', key) + self.client.ts_get(table_name, key) def test_get_single_value(self): key = ['hash1', 'user2', self.fiveMinsAgo] - ts_obj = self.client.ts_get('GeoCheckin', key) + ts_obj = self.client.ts_get(table_name, key) self.assertIsNotNone(ts_obj) self.validate_data(ts_obj) def test_get_single_value_via_table(self): key = ['hash1', 'user2', self.fiveMinsAgo] - table = Table(self.client, 'GeoCheckin') + table = Table(self.client, table_name) ts_obj = table.get(key) self.assertIsNotNone(ts_obj) self.validate_data(ts_obj) def test_stream_keys(self): - table = Table(self.client, 'GeoCheckin') + table = Table(self.client, table_name) streamed_keys = [] for keylist in table.stream_keys(): self.assertNotEqual([], keylist) @@ -413,7 +415,13 @@ def test_stream_keys(self): def test_delete_single_value(self): key = ['hash1', 'user2', self.twentyFiveMinsAgo] - rslt = self.client.ts_delete('GeoCheckin', key) + rslt = self.client.ts_delete(table_name, key) self.assertTrue(rslt) - ts_obj = self.client.ts_get('GeoCheckin', key) + ts_obj = self.client.ts_get(table_name, key) self.assertEqual(len(ts_obj.rows), 0) + + def test_create_error_via_put(self): + table = Table(self.client, table_name) + ts_obj = table.new([]) + with self.assertRaises(RiakError): + ts_obj.store() diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 81c0554c..ba368a1c 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -1,11 +1,13 @@ # -*- coding: utf-8 -*- import datetime +import logging import six import unittest from erlastic import decode, encode from erlastic.types import Atom +from riak import RiakError from riak.table import Table from riak.ts_object import TsObject from riak.codecs.ttb import TtbCodec @@ -52,7 +54,7 @@ def test_encode_data_for_get(self): test_key = ['hash1', 'user2', ts0] c = TtbCodec() - msg = c._encode_timeseries_keyreq(self.table, test_key) + msg = c.encode_timeseries_keyreq(self.table, test_key) self.assertEqual(req_test, msg.data) # def test_decode_riak_error(self): @@ -84,7 +86,7 @@ def test_decode_data_from_get(self): tsobj = TsObject(None, self.table, [], []) c = TtbCodec() - c._decode_timeseries(decode(rsp_ttb), tsobj) + c.decode_timeseries(decode(rsp_ttb), tsobj) for i in range(0, 1): self.assertEqual(tsrow_a, rows[i][0]) @@ -145,13 +147,15 @@ def test_encode_data_for_put(self): tsobj = TsObject(None, self.table, rows_to_encode, None) c = TtbCodec() - msg = c._encode_timeseries_put(tsobj) + msg = c.encode_timeseries_put(tsobj) self.assertEqual(req_test, msg.data) @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): + client_options = {'transport_options': {'use_ttb': True}} + @classmethod def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() @@ -164,10 +168,7 @@ def test_store_and_fetch_ttb(self): twentyMinsAgo = fifteenMinsAgo - fiveMins twentyFiveMinsAgo = twentyMinsAgo - fiveMins - opts = {'use_ttb': True} - client = self.create_client(transport_options=opts) - - table = client.table(table_name) + table = self.client.table(table_name) rows = [ ['hash1', 'user2', twentyFiveMinsAgo, 'typhoon', 90.3], ['hash1', 'user2', twentyMinsAgo, 'hurricane', 82.3], @@ -181,9 +182,16 @@ def test_store_and_fetch_ttb(self): for r in rows: k = r[0:3] - ts_obj = client.ts_get(table_name, k) + ts_obj = self.client.ts_get(table_name, k) self.assertIsNotNone(ts_obj) self.assertEqual(len(ts_obj.rows), 1) self.assertEqual(len(ts_obj.rows[0]), 5) - client.close() + def test_create_error_via_put(self): + table = Table(self.client, table_name) + ts_obj = table.new([]) + with self.assertRaises(RiakError) as cm: + ts_obj.store() + logging.debug( + "[test_timeseries_ttb] saw exception: {}" + .format(cm.exception)) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 8972e480..a8433f30 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -105,12 +105,12 @@ def _auth(self): Note: Riak will sleep for a short period of time upon a failed auth request/response to prevent denial of service attacks """ - c = PbufCodec() + codec = PbufCodec() username = self._client._credentials.username password = self._client._credentials.password if not password: password = '' - msg = c._encode_auth(username, password) + msg = codec.encode_auth(username, password) resp_code, _ = self._non_connect_send_recv_msg(msg) if resp_code == riak.pb.messages.MSG_CODE_AUTH_RESP: return True diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 46fb5941..3cf0e974 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -16,9 +16,10 @@ class PbufStream(object): _expect = None - def __init__(self, transport): + def __init__(self, transport, codec): self.finished = False self.transport = transport + self.codec = codec self.resource = None def __iter__(self): @@ -29,12 +30,11 @@ def next(self): raise StopIteration try: - # TODO RTS-842 - should be part of passed-in codec resp_code, data = self.transport._recv_msg() - self.transport._maybe_riak_error(resp_code, data) + self.codec.maybe_riak_error(resp_code, data) expect = self._expect - self.transport._maybe_incorrect_code(resp_code, expect) - resp = self.transport._parse_msg(expect, data, is_ttb=False) + self.codec.maybe_incorrect_code(resp_code, expect) + resp = self.codec.parse_msg(expect, data) except: self.finished = True raise @@ -137,8 +137,8 @@ class PbufIndexStream(PbufStream): _expect = riak.pb.messages.MSG_CODE_INDEX_RESP - def __init__(self, transport, index, return_terms=False): - super(PbufIndexStream, self).__init__(transport) + def __init__(self, transport, codec, index, return_terms=False): + super(PbufIndexStream, self).__init__(transport, codec) self.index = index self.return_terms = return_terms @@ -174,10 +174,6 @@ class PbufTsKeyStream(PbufStream, TtbCodec): _expect = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP - def __init__(self, transport, codec): - super(PbufTsKeyStream, self).__init__(transport) - self._codec = codec - def next(self): response = super(PbufTsKeyStream, self).next() @@ -186,7 +182,7 @@ def next(self): keys = [] for tsrow in response.keys: - keys.append(self._codec._decode_timeseries_row(tsrow)) + keys.append(self.codec.decode_timeseries_row(tsrow)) return keys diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index ee8898f2..151f76c2 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -1,18 +1,13 @@ -# TODO RTS-842 codecs should return msg codes too -import erlastic import six import riak.pb.messages from riak import RiakError -from riak.codecs import Msg +from riak.codecs import Codec, Msg from riak.codecs.pbuf import PbufCodec from riak.codecs.ttb import TtbCodec from riak.transports.transport import Transport from riak.ts_object import TsObject -# TODO RTS-842 ideally these would not be needed -from riak.util import bytes_to_str - from riak.transports.tcp.connection import TcpConnection from riak.transports.tcp.stream import (PbufKeyStream, PbufMapredStream, @@ -78,9 +73,10 @@ def ping(self): """ Ping the remote server """ - msg = Msg(riak.pb.messages.MSG_CODE_PING_REQ, None, - riak.pb.messages.MSG_CODE_PING_RESP) - resp_code, _ = self._request(msg) + msg_code = riak.pb.messages.MSG_CODE_PING_REQ + codec = self._get_codec(msg_code) + msg = codec.encode_ping() + resp_code, _ = self._request(msg, codec) if resp_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: @@ -95,21 +91,21 @@ def get_server_info(self): codec = PbufCodec() msg = Msg(riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, None, riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) - resp_code, resp = self._request(msg) - return codec._decode_get_server_info(resp) + resp_code, resp = self._request(msg, codec) + return codec.decode_get_server_info(resp) def _get_client_id(self): msg_code = riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_client_id() - resp_code, resp = self._request(msg) - return codec._decode_get_client_id(resp) + msg = codec.encode_get_client_id() + resp_code, resp = self._request(msg, codec) + return codec.decode_get_client_id(resp) def _set_client_id(self, client_id): msg_code = riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ codec = self._get_codec(msg_code) - msg = codec._encode_set_client_id(client_id) - resp_code, resp = self._request(msg) + msg = codec.encode_set_client_id(client_id) + resp_code, resp = self._request(msg, codec) self._client_id = client_id client_id = property(_get_client_id, _set_client_id, @@ -122,20 +118,20 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, """ msg_code = riak.pb.messages.MSG_CODE_GET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get(robj, r, pr, - timeout, basic_quorum, - notfound_ok) - resp_code, resp = self._request(msg) - return codec._decode_get(robj, resp) + msg = codec.encode_get(robj, r, pr, + timeout, basic_quorum, + notfound_ok) + resp_code, resp = self._request(msg, codec) + return codec.decode_get(robj, resp) def put(self, robj, w=None, dw=None, pw=None, return_body=True, if_none_match=False, timeout=None): msg_code = riak.pb.messages.MSG_CODE_PUT_REQ codec = self._get_codec(msg_code) - msg = codec._encode_put(robj, w, dw, pw, return_body, - if_none_match, timeout) - resp_code, resp = self._request(msg) - return codec._decode_put(robj, resp) + msg = codec.encode_put(robj, w, dw, pw, return_body, + if_none_match, timeout) + resp_code, resp = self._request(msg, codec) + return codec.decode_put(robj, resp) def ts_describe(self, table): query = 'DESCRIBE {table}'.format(table=table.name) @@ -144,35 +140,24 @@ def ts_describe(self, table): def ts_get(self, table, key): msg_code = riak.pb.messages.MSG_CODE_TS_GET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_timeseries_keyreq(table, key) - # TODO RTS-842 is_ttb - resp_code, resp = self._request(msg, self._use_ttb) + msg = codec.encode_timeseries_keyreq(table, key) + resp_code, resp = self._request(msg, codec) tsobj = TsObject(self._client, table, [], None) - codec._decode_timeseries(resp, tsobj) + codec.decode_timeseries(resp, tsobj) return tsobj def ts_put(self, tsobj): msg_code = riak.pb.messages.MSG_CODE_TS_PUT_REQ codec = self._get_codec(msg_code) - msg = codec._encode_timeseries_put(tsobj) - # logging.debug("pbc/transport ts_put _use_ttb: '%s'", - # self._use_ttb) - # TODO RTS-842 use_ttb - resp_code, resp = self._request(msg, self._use_ttb) - if self._use_ttb and \ - resp is None and \ - resp_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: - return True - if resp is not None: - return True - else: - raise RiakError("missing response object") + msg = codec.encode_timeseries_put(tsobj) + resp_code, resp = self._request(msg, codec) + return codec.validate_timeseries_put_resp(resp_code, resp) def ts_delete(self, table, key): msg_code = riak.pb.messages.MSG_CODE_TS_DEL_REQ codec = self._get_codec(msg_code) - msg = codec._encode_timeseries_keyreq(table, key, is_delete=True) - resp_code, resp = self._request(msg) + msg = codec.encode_timeseries_keyreq(table, key, is_delete=True) + resp_code, resp = self._request(msg, codec) if resp is not None: return True else: @@ -181,10 +166,10 @@ def ts_delete(self, table, key): def ts_query(self, table, query, interpolations=None): msg_code = riak.pb.messages.MSG_CODE_TS_QUERY_REQ codec = self._get_codec(msg_code) - msg = codec._encode_timeseries_query(table, query, interpolations) - resp_code, resp = self._request(msg) + msg = codec.encode_timeseries_query(table, query, interpolations) + resp_code, resp = self._request(msg, codec) tsobj = TsObject(self._client, table, [], []) - codec._decode_timeseries(resp, tsobj) + codec.decode_timeseries(resp, tsobj) return tsobj def ts_stream_keys(self, table, timeout=None): @@ -194,16 +179,16 @@ def ts_stream_keys(self, table, timeout=None): """ msg_code = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ codec = self._get_codec(msg_code) - msg = codec._encode_timeseries_listkeysreq(table, timeout) + msg = codec.encode_timeseries_listkeysreq(table, timeout) self._send_msg(msg.msg_code, msg.data) return PbufTsKeyStream(self, codec) - def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, - timeout=None): + def delete(self, robj, rw=None, r=None, w=None, dw=None, + pr=None, pw=None, timeout=None): msg_code = riak.pb.messages.MSG_CODE_DEL_REQ codec = self._get_codec(msg_code) - msg = codec._encode_delete(robj, rw, r, w, dw, pr, pw, timeout) - resp_code, resp = self._request(msg) + msg = codec.encode_delete(robj, rw, r, w, dw, pr, pw, timeout) + resp_code, resp = self._request(msg, codec) return self def get_keys(self, bucket, timeout=None): @@ -213,7 +198,7 @@ def get_keys(self, bucket, timeout=None): msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ codec = self._get_codec(msg_code) stream = self.stream_keys(bucket, timeout=timeout) - return codec._decode_get_keys(stream) + return codec.decode_get_keys(stream) def stream_keys(self, bucket, timeout=None): """ @@ -222,9 +207,9 @@ def stream_keys(self, bucket, timeout=None): """ msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ codec = self._get_codec(msg_code) - msg = codec._encode_stream_keys(bucket, timeout) + msg = codec.encode_stream_keys(bucket, timeout) self._send_msg(msg.msg_code, msg.data) - return PbufKeyStream(self) + return PbufKeyStream(self, codec) def get_buckets(self, bucket_type=None, timeout=None): """ @@ -232,9 +217,9 @@ def get_buckets(self, bucket_type=None, timeout=None): """ msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_buckets(bucket_type, - timeout, streaming=False) - resp_code, resp = self._request(msg) + msg = codec.encode_get_buckets(bucket_type, + timeout, streaming=False) + resp_code, resp = self._request(msg, codec) return resp.buckets def stream_buckets(self, bucket_type=None, timeout=None): @@ -246,10 +231,10 @@ def stream_buckets(self, bucket_type=None, timeout=None): 'supported') msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_buckets(bucket_type, - timeout, streaming=True) + msg = codec.encode_get_buckets(bucket_type, + timeout, streaming=True) self._send_msg(msg.msg_code, msg.data) - return PbufBucketStream(self) + return PbufBucketStream(self, codec) def get_bucket_props(self, bucket): """ @@ -257,9 +242,9 @@ def get_bucket_props(self, bucket): """ msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_bucket_props(bucket) - resp_code, resp = self._request(msg) - return codec._decode_bucket_props(resp.props) + msg = codec.encode_get_bucket_props(bucket) + resp_code, resp = self._request(msg, codec) + return codec.decode_bucket_props(resp.props) def set_bucket_props(self, bucket, props): """ @@ -272,8 +257,8 @@ def set_bucket_props(self, bucket, props): 'allow_mult properties over PBC') msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_set_bucket_props(bucket, props) - resp_code, resp = self._request(msg) + msg = codec.encode_set_bucket_props(bucket, props) + resp_code, resp = self._request(msg, codec) return True def clear_bucket_props(self, bucket): @@ -284,8 +269,8 @@ def clear_bucket_props(self, bucket): return False msg_code = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_clear_bucket_props(bucket) - self._request(msg) + msg = codec.encode_clear_bucket_props(bucket) + self._request(msg, codec) return True def get_bucket_type_props(self, bucket_type): @@ -295,9 +280,9 @@ def get_bucket_type_props(self, bucket_type): self._check_bucket_types(bucket_type) msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_bucket_type_props(bucket_type) - resp_code, resp = self._request(msg) - return codec._decode_bucket_props(resp.props) + msg = codec.encode_get_bucket_type_props(bucket_type) + resp_code, resp = self._request(msg, codec) + return codec.decode_bucket_props(resp.props) def set_bucket_type_props(self, bucket_type, props): """ @@ -306,8 +291,8 @@ def set_bucket_type_props(self, bucket_type, props): self._check_bucket_types(bucket_type) msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ codec = self._get_codec(msg_code) - msg = codec._encode_set_bucket_type_props(bucket_type, props) - resp_code, resp = self._request(msg) + msg = codec.encode_set_bucket_type_props(bucket_type, props) + resp_code, resp = self._request(msg, codec) return True def mapred(self, inputs, query, timeout=None): @@ -333,9 +318,9 @@ def stream_mapred(self, inputs, query, timeout=None): msg_code = riak.pb.messages.MSG_CODE_MAP_RED_REQ codec = self._get_codec(msg_code) content = self._construct_mapred_json(inputs, query, timeout) - msg = codec._encode_stream_mapred(content) + msg = codec.encode_stream_mapred(content) self._send_msg(msg.msg_code, msg.data) - return PbufMapredStream(self) + return PbufMapredStream(self, codec) def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, @@ -350,13 +335,13 @@ def get_index(self, bucket, index, startkey, endkey=None, msg_code = riak.pb.messages.MSG_CODE_INDEX_REQ codec = self._get_codec(msg_code) - msg = codec._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, - continuation, timeout, - term_regex, streaming=False) - resp_code, resp = self._request(msg) - return codec._decode_index_req(resp, index, - return_terms, max_results) + msg = codec.encode_index_req(bucket, index, startkey, endkey, + return_terms, max_results, + continuation, timeout, + term_regex, streaming=False) + resp_code, resp = self._request(msg, codec) + return codec.decode_index_req(resp, index, + return_terms, max_results) def stream_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, @@ -369,12 +354,12 @@ def stream_index(self, bucket, index, startkey, endkey=None, "supported") msg_code = riak.pb.messages.MSG_CODE_INDEX_REQ codec = self._get_codec(msg_code) - msg = codec._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, - continuation, timeout, - term_regex, streaming=True) + msg = codec.encode_index_req(bucket, index, startkey, endkey, + return_terms, max_results, + continuation, timeout, + term_regex, streaming=True) self._send_msg(msg.msg_code, msg.data) - return PbufIndexStream(self, index, return_terms) + return PbufIndexStream(self, codec, index, return_terms) def create_search_index(self, index, schema=None, n_val=None, timeout=None): @@ -383,8 +368,8 @@ def create_search_index(self, index, schema=None, n_val=None, "supported for this version") msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ codec = self._get_codec(msg_code) - msg = codec._encode_create_search_index(index, schema, n_val, timeout) - self._request(msg) + msg = codec.encode_create_search_index(index, schema, n_val, timeout) + self._request(msg, codec) return True def get_search_index(self, index): @@ -393,10 +378,10 @@ def get_search_index(self, index): "supported for this version") msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_search_index(index) - resp_code, resp = self._request(msg) + msg = codec.encode_get_search_index(index) + resp_code, resp = self._request(msg, codec) if len(resp.index) > 0: - return codec._decode_search_index(resp.index[0]) + return codec.decode_search_index(resp.index[0]) else: raise RiakError('notfound') @@ -406,9 +391,9 @@ def list_search_indexes(self): "supported for this version") msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_list_search_indexes() - resp_code, resp = self._request(msg) - return [codec._decode_search_index(index) for index in resp.index] + msg = codec.encode_list_search_indexes() + resp_code, resp = self._request(msg, codec) + return [codec.decode_search_index(index) for index in resp.index] def delete_search_index(self, index): if not self.pb_search_admin(): @@ -416,8 +401,8 @@ def delete_search_index(self, index): "supported for this version") msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ codec = self._get_codec(msg_code) - msg = codec._encode_delete_search_index(index) - self._request(msg) + msg = codec.encode_delete_search_index(index) + self._request(msg, codec) return True def create_search_schema(self, schema, content): @@ -426,8 +411,8 @@ def create_search_schema(self, schema, content): "supported for this version") msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ codec = self._get_codec(msg_code) - msg = codec._encode_create_search_schema(schema, content) - self._request(msg) + msg = codec.encode_create_search_schema(schema, content) + self._request(msg, codec) return True def get_search_schema(self, schema): @@ -436,9 +421,9 @@ def get_search_schema(self, schema): "supported for this version") msg_code = riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_search_schema(schema) - resp_code, resp = self._request(msg) - return codec._decode_get_search_schema(resp) + msg = codec.encode_get_search_schema(schema) + resp_code, resp = self._request(msg, codec) + return codec.decode_get_search_schema(resp) def search(self, index, query, **kwargs): # TODO RTS-842 NUKE THIS @@ -449,9 +434,9 @@ def search(self, index, query, **kwargs): query = query.encode('utf8') msg_code = riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ codec = self._get_codec(msg_code) - msg = codec._encode_search(index, query, **kwargs) - resp_code, resp = self._request(msg) - return codec._decode_search(resp) + msg = codec.encode_search(index, query, **kwargs) + resp_code, resp = self._request(msg, codec) + return codec.decode_search(resp) def get_counter(self, bucket, key, **kwargs): if not bucket.bucket_type.is_default(): @@ -462,8 +447,8 @@ def get_counter(self, bucket, key, **kwargs): raise NotImplementedError("Counters are not supported") msg_code = riak.pb.messages.MSG_CODE_COUNTER_GET_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_counter(bucket, key, **kwargs) - resp_code, resp = self._request(msg) + msg = codec.encode_get_counter(bucket, key, **kwargs) + resp_code, resp = self._request(msg, codec) if resp.HasField('value'): return resp.value else: @@ -478,8 +463,8 @@ def update_counter(self, bucket, key, value, **kwargs): raise NotImplementedError("Counters are not supported") msg_code = riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ codec = self._get_codec(msg_code) - msg = codec._encode_update_counter(bucket, key, value, **kwargs) - resp_code, resp = self._request(msg) + msg = codec.encode_update_counter(bucket, key, value, **kwargs) + resp_code, resp = self._request(msg, codec) if resp.HasField('value'): return resp.value else: @@ -493,9 +478,9 @@ def fetch_datatype(self, bucket, key, **kwargs): raise NotImplementedError("Datatypes are not supported.") msg_code = riak.pb.messages.MSG_CODE_DT_FETCH_REQ codec = self._get_codec(msg_code) - msg = codec._encode_fetch_datatype(bucket, key, **kwargs) - resp_code, resp = self._request(msg) - return codec._decode_dt_fetch(resp) + msg = codec.encode_fetch_datatype(bucket, key, **kwargs) + resp_code, resp = self._request(msg, codec) + return codec.decode_dt_fetch(resp) def update_datatype(self, datatype, **kwargs): if datatype.bucket.bucket_type.is_default(): @@ -505,9 +490,9 @@ def update_datatype(self, datatype, **kwargs): raise NotImplementedError("Datatypes are not supported.") msg_code = riak.pb.messages.MSG_CODE_DT_UPDATE_REQ codec = self._get_codec(msg_code) - msg = codec._encode_update_datatype(datatype, **kwargs) - resp_code, resp = self._request(msg) - codec._decode_update_datatype(datatype, resp, **kwargs) + msg = codec.encode_update_datatype(datatype, **kwargs) + resp_code, resp = self._request(msg, codec) + codec.decode_update_datatype(datatype, resp, **kwargs) return True def get_preflist(self, bucket, key): @@ -522,63 +507,27 @@ def get_preflist(self, bucket, key): """ msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ codec = self._get_codec(msg_code) - msg = codec._encode_get_preflist(bucket, key) - resp_code, resp = self._request(msg) - return [codec._decode_preflist(item) for item in resp.preflist] + msg = codec.encode_get_preflist(bucket, key) + resp_code, resp = self._request(msg, codec) + return [codec.decode_preflist(item) for item in resp.preflist] # TODO RTS-842 is_ttb - def _parse_msg(self, code, packet, is_ttb=False): - if is_ttb: - if code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ - code != riak.pb.messages.MSG_CODE_TS_PUT_RESP: - raise RiakError("TTB can't parse code: %d" % code) - if len(packet) > 0: - return erlastic.decode(packet) - else: - return None - else: - try: - pbclass = riak.pb.messages.MESSAGE_CLASSES[code] - except KeyError: - pbclass = None - - if pbclass is None: - return None - - pbo = pbclass() - pbo.ParseFromString(packet) - return pbo - - # TODO RTS-842 move to base Codec object - def _maybe_riak_error(self, msg_code, data=None, is_ttb=False): - if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: - if data is None: - raise RiakError('no error provided!') - # TODO RTS-842 TTB-specific version - err = self._parse_msg(msg_code, data, is_ttb) - if err is None: - raise RiakError('no error provided!') - else: - raise RiakError(bytes_to_str(err.errmsg)) - - def _maybe_incorrect_code(self, resp_code, expect=None): - if expect and resp_code != expect: - raise RiakError("unexpected message code: %d, expected %d" - % (resp_code, expect)) - - # TODO RTS-842 is_ttb - def _request(self, msg, is_ttb=False): + def _request(self, msg, codec=None): if isinstance(msg, Msg): msg_code = msg.msg_code data = msg.data expect = msg.resp_code else: raise ValueError('expected a Msg argument') + + if not isinstance(codec, Codec): + raise ValueError('expected a Codec argument') + resp_code, data = self._send_recv(msg_code, data) - self._maybe_riak_error(resp_code, data, is_ttb) - self._maybe_incorrect_code(resp_code, expect) + codec.maybe_riak_error(resp_code, data) + codec.maybe_incorrect_code(resp_code, expect) if resp_code in riak.pb.messages.MESSAGE_CLASSES: - msg = self._parse_msg(resp_code, data, is_ttb) + msg = codec.parse_msg(resp_code, data) else: raise Exception("unknown msg code %s" % resp_code) # logging.debug("tcp/connection received resp_code %d msg %s", From a2f6b92a4a19b68244cc00a5a85e665f66378754 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 6 Apr 2016 09:09:38 -0700 Subject: [PATCH 153/324] Updating for TTB changes server-side. --- riak/codecs/__init__.py | 4 ++-- riak/codecs/pbuf.py | 3 ++- riak/codecs/ttb.py | 36 +++++++++++++++---------------- riak/tests/test_timeseries.py | 4 ++-- riak/tests/test_timeseries_ttb.py | 2 +- riak/transports/tcp/connection.py | 21 ------------------ riak/transports/tcp/transport.py | 18 +++++++--------- 7 files changed, 32 insertions(+), 56 deletions(-) diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index 0e221d3b..e2ef78de 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -16,8 +16,8 @@ def maybe_incorrect_code(self, resp_code, expect=None): raise RiakError("unexpected message code: %d, expected %d" % (resp_code, expect)) - def maybe_riak_error(self, msg_code, data=None): - if msg_code is riak.pb.messages.MSG_CODE_ERROR_RESP: + def maybe_riak_error(self, err_code, msg_code, data=None): + if msg_code == err_code: if data is None: raise RiakError('no error provided!') return data diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index b4da1e16..2e08ec5c 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -97,7 +97,8 @@ def parse_msg(self, msg_code, data): return pbo def maybe_riak_error(self, msg_code, data=None): - err_data = super(PbufCodec, self).maybe_riak_error(msg_code, data) + err_code = riak.pb.messages.MSG_CODE_ERROR_RESP + err_data = super(PbufCodec, self).maybe_riak_error(err_code, msg_code, data) if err_data: err = self.parse_msg(msg_code, err_data) raise RiakError(bytes_to_str(err.errmsg)) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 3ac7651a..98e75da3 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -23,6 +23,9 @@ tscell_empty = (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) +# TODO RTS-842 +MSG_CODE_TS_TTB = 104 + class TtbCodec(Codec): ''' @@ -33,29 +36,25 @@ def __init__(self, **unused_args): super(TtbCodec, self).__init__(**unused_args) def parse_msg(self, msg_code, data): - if msg_code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ - msg_code != riak.pb.messages.MSG_CODE_TS_PUT_RESP and \ - msg_code != riak.pb.messages.MSG_CODE_ERROR_RESP: + if msg_code != MSG_CODE_TS_TTB and \ + msg_code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ + msg_code != riak.pb.messages.MSG_CODE_TS_PUT_RESP: raise RiakError("TTB can't parse code: {}".format(msg_code)) if len(data) > 0: - return decode(data) + decoded = decode(data) + self.maybe_err_ttb(decoded) + return decoded else: return None - def process_err_ttb(self, err_ttb): + def maybe_err_ttb(self, err_ttb): resp_a = err_ttb[0] if resp_a == rpberrorresp_a: errmsg = err_ttb[1] raise RiakError(bytes_to_str(errmsg)) - else: - raise RiakError( - "Unknown TTB error type: {}".format(resp_a)) def maybe_riak_error(self, msg_code, data=None): - err_data = super(TtbCodec, self).maybe_riak_error(msg_code, data) - if err_data: - err_ttb = decode(err_data) - self.process_err_ttb(err_ttb) + pass def encode_to_ts_cell(self, cell): if cell is None: @@ -87,14 +86,13 @@ def encode_timeseries_keyreq(self, table, key, is_delete=False): else: raise ValueError("key must be a list") - mc = riak.pb.messages.MSG_CODE_TS_GET_REQ - rc = riak.pb.messages.MSG_CODE_TS_GET_RESP + mc = MSG_CODE_TS_TTB + rc = MSG_CODE_TS_TTB req_atom = tsgetreq_a if is_delete: - mc = riak.pb.messages.MSG_CODE_TS_DEL_REQ - rc = riak.pb.messages.MSG_CODE_TS_DEL_RESP req_atom = tsdelreq_a + # TODO RTS-842 timeout is last req = req_atom, table.name, \ [self.encode_to_ts_cell(k) for k in key_vals], udef_a return Msg(mc, encode(req), rc) @@ -128,9 +126,9 @@ def encode_timeseries_put(self, tsobj): req_r.append(self.encode_to_ts_cell(cell)) req_t = (tsrow_a, req_r) req_rows.append(req_t) - req = tsputreq_a, tsobj.table.name, udef_a, req_rows - mc = riak.pb.messages.MSG_CODE_TS_PUT_REQ - rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP + req = tsputreq_a, tsobj.table.name, [], req_rows + mc = MSG_CODE_TS_TTB + rc = MSG_CODE_TS_TTB return Msg(mc, encode(req), rc) else: raise RiakError("TsObject requires a list of rows") diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries.py index 0ebbe552..5d9e81cb 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries.py @@ -417,8 +417,8 @@ def test_delete_single_value(self): key = ['hash1', 'user2', self.twentyFiveMinsAgo] rslt = self.client.ts_delete(table_name, key) self.assertTrue(rslt) - ts_obj = self.client.ts_get(table_name, key) - self.assertEqual(len(ts_obj.rows), 0) + with self.assertRaises(RiakError): + self.client.ts_get(table_name, key) def test_create_error_via_put(self): table = Table(self.client, table_name) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index ba368a1c..5da40aab 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -137,7 +137,7 @@ def test_encode_data_for_put(self): (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) ]) rows = [r0, r1] - req = tsputreq_a, str_to_bytes(table_name), udef_a, rows + req = tsputreq_a, str_to_bytes(table_name), [], rows req_test = encode(req) rows_to_encode = [ diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index a8433f30..d0ffab13 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -21,9 +21,6 @@ class TcpConnection(object): """ Connection-related methods for TcpTransport. """ - def __init__(self): - self._ttb_enabled = False - def _encode_msg(self, msg_code, data=None): if data is None: return struct.pack("!iB", 1, msg_code) @@ -80,24 +77,6 @@ def _starttls(self): else: return False - def _enable_ttb(self): - if self._ttb_enabled: - return True - else: - logging.debug("tcp/connection enabling TTB") - req = riak.pb.riak_pb2.RpbToggleEncodingReq() - req.use_native = True - data = req.SerializeToString() - resp_code, _ = self._send_recv( - riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_REQ, - data) - if resp_code == riak.pb.messages.MSG_CODE_TOGGLE_ENCODING_RESP: - self._ttb_enabled = True - logging.debug("tcp/connection TTB IS ENABLED") - return True - else: - return False - def _auth(self): """ Perform an authorization request against Riak diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 151f76c2..d7e7954b 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -4,7 +4,7 @@ from riak import RiakError from riak.codecs import Codec, Msg from riak.codecs.pbuf import PbufCodec -from riak.codecs.ttb import TtbCodec +from riak.codecs.ttb import TtbCodec, MSG_CODE_TS_TTB from riak.transports.transport import Transport from riak.ts_object import TsObject @@ -46,8 +46,6 @@ def _get_pbuf_codec(self): def _get_ttb_codec(self): if self._use_ttb: - if not self._enable_ttb(): - raise RiakError('could not switch to TTB encoding!') if not self._ttb_c: self._ttb_c = TtbCodec() codec = self._ttb_c @@ -56,7 +54,9 @@ def _get_ttb_codec(self): return codec def _get_codec(self, msg_code): - if msg_code == riak.pb.messages.MSG_CODE_TS_GET_REQ: + if msg_code == MSG_CODE_TS_TTB: + codec = self._get_ttb_codec() + elif msg_code == riak.pb.messages.MSG_CODE_TS_GET_REQ: codec = self._get_ttb_codec() elif msg_code == riak.pb.messages.MSG_CODE_TS_PUT_REQ: codec = self._get_ttb_codec() @@ -138,7 +138,7 @@ def ts_describe(self, table): return self.ts_query(table, query) def ts_get(self, table, key): - msg_code = riak.pb.messages.MSG_CODE_TS_GET_REQ + msg_code = MSG_CODE_TS_TTB codec = self._get_codec(msg_code) msg = codec.encode_timeseries_keyreq(table, key) resp_code, resp = self._request(msg, codec) @@ -147,7 +147,7 @@ def ts_get(self, table, key): return tsobj def ts_put(self, tsobj): - msg_code = riak.pb.messages.MSG_CODE_TS_PUT_REQ + msg_code = MSG_CODE_TS_TTB codec = self._get_codec(msg_code) msg = codec.encode_timeseries_put(tsobj) resp_code, resp = self._request(msg, codec) @@ -511,7 +511,6 @@ def get_preflist(self, bucket, key): resp_code, resp = self._request(msg, codec) return [codec.decode_preflist(item) for item in resp.preflist] - # TODO RTS-842 is_ttb def _request(self, msg, codec=None): if isinstance(msg, Msg): msg_code = msg.msg_code @@ -526,10 +525,9 @@ def _request(self, msg, codec=None): resp_code, data = self._send_recv(msg_code, data) codec.maybe_riak_error(resp_code, data) codec.maybe_incorrect_code(resp_code, expect) - if resp_code in riak.pb.messages.MESSAGE_CLASSES: + if resp_code == MSG_CODE_TS_TTB or \ + resp_code in riak.pb.messages.MESSAGE_CLASSES: msg = codec.parse_msg(resp_code, data) else: raise Exception("unknown msg code %s" % resp_code) - # logging.debug("tcp/connection received resp_code %d msg %s", - # resp_code, msg) return resp_code, msg From b946f0fe3eeff47106480ffac3a3c9a1ab301fe8 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 6 Apr 2016 17:30:52 -0700 Subject: [PATCH 154/324] Continuing to work on TTB changes --- riak/codecs/ttb.py | 20 ++++----- riak/tests/test_timeseries_ttb.py | 69 +++++++++++-------------------- 2 files changed, 33 insertions(+), 56 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 98e75da3..63479a55 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -21,8 +21,6 @@ tsrow_a = Atom('tsrow') tscell_a = Atom('tscell') -tscell_empty = (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) - # TODO RTS-842 MSG_CODE_TS_TTB = 104 @@ -58,22 +56,21 @@ def maybe_riak_error(self, msg_code, data=None): def encode_to_ts_cell(self, cell): if cell is None: - return tscell_empty + return [] else: if isinstance(cell, datetime.datetime): ts = unix_time_millis(cell) - return (tscell_a, udef_a, udef_a, ts, udef_a, udef_a) + return ts elif isinstance(cell, bool): - return (tscell_a, udef_a, udef_a, udef_a, cell, udef_a) + return cell elif isinstance(cell, six.text_type) or \ isinstance(cell, six.binary_type) or \ isinstance(cell, six.string_types): - return (tscell_a, cell, - udef_a, udef_a, udef_a, udef_a) + return cell elif (isinstance(cell, six.integer_types)): - return (tscell_a, udef_a, cell, udef_a, udef_a, udef_a) + return cell elif isinstance(cell, float): - return (tscell_a, udef_a, udef_a, udef_a, udef_a, cell) + return cell else: t = type(cell) raise RiakError("can't serialize type '{}', value '{}'" @@ -116,7 +113,7 @@ def encode_timeseries_put(self, tsobj): :rtype: term-to-binary encoded object ''' if tsobj.columns: - raise NotImplementedError("columns are not implemented yet") + raise NotImplementedError('columns are not used') if tsobj.rows and isinstance(tsobj.rows, list): req_rows = [] @@ -124,8 +121,7 @@ def encode_timeseries_put(self, tsobj): req_r = [] for cell in row: req_r.append(self.encode_to_ts_cell(cell)) - req_t = (tsrow_a, req_r) - req_rows.append(req_t) + req_rows.append(tuple(req_r)) req = tsputreq_a, tsobj.table.name, [], req_rows mc = MSG_CODE_TS_TTB rc = MSG_CODE_TS_TTB diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 5da40aab..811f90b3 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -23,8 +23,12 @@ tsputreq_a = Atom('tsputreq') udef_a = Atom('undefined') -tsrow_a = Atom('tsrow') -tscell_a = Atom('tscell') +varchar_a = Atom('varchar') +sint64_a = Atom('sint64') +double_a = Atom('double') +timestamp_a = Atom('timestamp') +boolean_a = Atom('boolean') + table_name = 'GeoCheckin' str0 = 'ascii-0' @@ -45,9 +49,7 @@ def setUp(self): def test_encode_data_for_get(self): keylist = [ - (tscell_a, str_to_bytes('hash1'), udef_a, udef_a, udef_a, udef_a), - (tscell_a, str_to_bytes('user2'), udef_a, udef_a, udef_a, udef_a), - (tscell_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a) + str_to_bytes('hash1'), str_to_bytes('user2'), unix_time_millis(ts0) ] req = tsgetreq_a, str_to_bytes(table_name), keylist, udef_a req_test = encode(req) @@ -57,31 +59,24 @@ def test_encode_data_for_get(self): msg = c.encode_timeseries_keyreq(self.table, test_key) self.assertEqual(req_test, msg.data) - # def test_decode_riak_error(self): - + # {tsgetresp, + # { + # [<<"geohash">>, <<"user">>, <<"time">>, <<"weather">>, <<"temperature">>], + # [varchar, varchar, timestamp, varchar, double] + # }, + # [[<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, 90.3]] + # } def test_decode_data_from_get(self): - cols = [] - r0 = (tsrow_a, [ - (tscell_a, bd0, udef_a, udef_a, udef_a, udef_a), - (tscell_a, udef_a, 0, udef_a, udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, 1.2), - (tscell_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, True, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a), - (tscell_a, str1, udef_a, udef_a, udef_a, udef_a) - ]) - r1 = (tsrow_a, [ - (tscell_a, bd1, udef_a, udef_a, udef_a, udef_a), - (tscell_a, udef_a, 3, udef_a, udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, 4.5), - (tscell_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, False, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a), - (tscell_a, str1, udef_a, udef_a, udef_a, udef_a) - ]) + colnames = ["varchar", "sint64", "double", "timestamp", + "boolean", "varchar", "varchar"] + coltypes = [varchar_a, sint64_a, double_a, timestamp_a, + boolean_a, varchar_a, varchar_a] + r0 = (bd0, 0, 1.2, unix_time_millis(ts0), True, [], str1) + r1 = (bd1, 3, 4.5, unix_time_millis(ts1), False, [], str1) rows = [r0, r1] - # { tsgetresp, [cols], [rows] } - rsp_data = tsgetresp_a, cols, rows # NB: Python tuple notation + # { tsgetresp, { [colnames], [coltypes] }, [rows] } + cols_t = colnames, coltypes + rsp_data = tsgetresp_a, cols_t, rows rsp_ttb = encode(rsp_data) tsobj = TsObject(None, self.table, [], []) @@ -120,22 +115,8 @@ def test_decode_data_from_get(self): self.assertEqual(r[6], dr[6][1].encode('ascii')) def test_encode_data_for_put(self): - r0 = (tsrow_a, [ - (tscell_a, bd0, udef_a, udef_a, udef_a, udef_a), - (tscell_a, udef_a, 0, udef_a, udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, 1.2), - (tscell_a, udef_a, udef_a, unix_time_millis(ts0), udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, True, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) - ]) - r1 = (tsrow_a, [ - (tscell_a, bd1, udef_a, udef_a, udef_a, udef_a), - (tscell_a, udef_a, 3, udef_a, udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, 4.5), - (tscell_a, udef_a, udef_a, unix_time_millis(ts1), udef_a, udef_a), - (tscell_a, udef_a, udef_a, udef_a, False, udef_a), - (tscell_a, udef_a, udef_a, udef_a, udef_a, udef_a) - ]) + r0 = (bd0, 0, 1.2, unix_time_millis(ts0), True, []) + r1 = (bd1, 3, 4.5, unix_time_millis(ts1), False, []) rows = [r0, r1] req = tsputreq_a, str_to_bytes(table_name), [], rows req_test = encode(req) From 1eed9f253ba80f1c8945c5cc93091192a318ba88 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 7 Apr 2016 07:49:47 -0700 Subject: [PATCH 155/324] TTB and PBUF timeseries tests working correctly. --- riak/codecs/__init__.py | 2 +- riak/codecs/pbuf.py | 26 ++++--- riak/codecs/ttb.py | 70 ++++++++----------- ..._timeseries.py => test_timeseries_pbuf.py} | 69 +++++++++--------- riak/tests/test_timeseries_ttb.py | 42 ++++------- riak/transports/tcp/connection.py | 1 - riak/transports/tcp/transport.py | 4 +- riak/ts_object.py | 28 +++++--- 8 files changed, 114 insertions(+), 128 deletions(-) rename riak/tests/{test_timeseries.py => test_timeseries_pbuf.py} (89%) diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index e2ef78de..e356b5f9 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -1,5 +1,5 @@ import collections -import riak.pb.messages + from riak import RiakError Msg = collections.namedtuple('Msg', diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 2e08ec5c..b17c5b6a 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -11,6 +11,7 @@ from riak.codecs import Codec, Msg from riak.content import RiakContent from riak.riak_object import VClock +from riak.ts_object import TsColumns from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ unix_time_millis, datetime_from_unix_time_millis from riak.multidict import MultiDict @@ -98,7 +99,8 @@ def parse_msg(self, msg_code, data): def maybe_riak_error(self, msg_code, data=None): err_code = riak.pb.messages.MSG_CODE_ERROR_RESP - err_data = super(PbufCodec, self).maybe_riak_error(err_code, msg_code, data) + err_data = super(PbufCodec, self).maybe_riak_error( + err_code, msg_code, data) if err_data: err = self.parse_msg(msg_code, err_data) raise RiakError(bytes_to_str(err.errmsg)) @@ -782,16 +784,20 @@ def decode_timeseries(self, resp, tsobj): :param tsobj: a TsObject :type tsobj: TsObject """ - if tsobj.columns is not None: + if resp.columns is not None: + col_names = [] + col_types = [] for col in resp.columns: - col_name = bytes_to_str(col.name) - col_type = col.type - col = (col_name, col_type) - tsobj.columns.append(col) - - for row in resp.rows: - tsobj.rows.append( - self.decode_timeseries_row(row, resp.columns)) + col_names.append(bytes_to_str(col.name)) + col_types.append(col.type) + tsobj.columns = TsColumns(col_names, col_types) + + tsobj.rows = [] + if resp.rows is not None: + for row in resp.rows: + tsobj.rows.append( + self.decode_timeseries_row( + row, resp.columns)) def decode_timeseries_row(self, tsrow, tscols=None): """ diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 63479a55..f852dea7 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -8,6 +8,7 @@ from riak import RiakError from riak.codecs import Codec, Msg +from riak.ts_object import TsColumns from riak.util import bytes_to_str, unix_time_millis, \ datetime_from_unix_time_millis @@ -18,8 +19,7 @@ tsgetresp_a = Atom('tsgetresp') tsputreq_a = Atom('tsputreq') tsdelreq_a = Atom('tsdelreq') -tsrow_a = Atom('tsrow') -tscell_a = Atom('tscell') +timestamp_a = Atom('timestamp') # TODO RTS-842 MSG_CODE_TS_TTB = 104 @@ -139,17 +139,6 @@ def decode_timeseries(self, resp_ttb, tsobj): :param tsobj: a TsObject :type tsobj: TsObject """ - # TODO TODO RTS-842 CLIENTS-814 GH-445 - # TODO COLUMNS - # TODO TODO RTS-842 CLIENTS-814 GH-445 - # if tsobj.columns is not None: - # for col in resp.columns: - # col_name = bytes_to_str(col.name) - # col_type = col.type - # col = (col_name, col_type) - # tsobj.columns.append(col) - # - # TODO RTS-842 is this correct? if resp_ttb is None: return tsobj @@ -157,45 +146,42 @@ def decode_timeseries(self, resp_ttb, tsobj): if resp_a == rpberrorresp_a: self.process_err_ttb(resp_ttb) elif resp_a == tsgetresp_a: - # TODO resp_cols = resp_ttb[1] + resp_cols = resp_ttb[1] + tsobj.columns = self.decode_timeseries_cols(resp_cols) resp_rows = resp_ttb[2] - for row_ttb in resp_rows: + tsobj.rows = [] + for resp_row in resp_rows: tsobj.rows.append( - self.decode_timeseries_row(row_ttb, None)) + self.decode_timeseries_row(resp_row, resp_cols)) else: raise RiakError("Unknown TTB response type: {}".format(resp_a)) - def decode_timeseries_row(self, tsrow_ttb, tscols=None): + def decode_timeseries_cols(self, tscols): + cn, ct = tscols + cnames = [bytes_to_str(cname) for cname in cn] + ctypes = [str(ctype) for ctype in ct] + return TsColumns(cnames, ctypes) + + def decode_timeseries_row(self, tsrow, tscols): """ Decodes a TTB-encoded TsRow into a list - :param tsrow: the TTB-encoded TsRow to decode. - :type tsrow: TTB encoded row - :param tscols: the TTB-encoded TsColumn data to help decode. + :param tsrow: the TTB decoded TsRow to decode. + :type tsrow: TTB dncoded row + :param tscols: the TTB decoded TsColumn data to help decode rows. :type tscols: list :rtype list """ - if tsrow_ttb[0] == tsrow_a: - row = [] - for tsc_ttb in tsrow_ttb[1]: - if tsc_ttb[0] == tscell_a: - if tsc_ttb[1] != udef_a: - row.append(tsc_ttb[1]) - elif tsc_ttb[2] != udef_a: - row.append(tsc_ttb[2]) - elif tsc_ttb[3] != udef_a: - row.append( - datetime_from_unix_time_millis(tsc_ttb[3])) - elif tsc_ttb[4] != udef_a: - row.append(tsc_ttb[4]) - elif tsc_ttb[5] != udef_a: - row.append(tsc_ttb[5]) - else: - row.append(None) + cn, ct = tscols + row = [] + for i, cell in enumerate(tsrow): + if cell is None: + row.append(None) + elif cell is list and len(cell) == 0: + row.append(None) + else: + if ct[i] == timestamp_a: + row.append(datetime_from_unix_time_millis(cell)) else: - raise RiakError( - "Expected tscell atom, got: {}".format(tsc_ttb[0])) - else: - raise RiakError( - "Expected tsrow atom, got: {}".format(tsrow_ttb[0])) + row.append(cell) return row diff --git a/riak/tests/test_timeseries.py b/riak/tests/test_timeseries_pbuf.py similarity index 89% rename from riak/tests/test_timeseries.py rename to riak/tests/test_timeseries_pbuf.py index 5d9e81cb..93e088a2 100644 --- a/riak/tests/test_timeseries.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -32,7 +32,7 @@ @unittest.skipUnless(is_timeseries_supported(), "Timeseries not supported") -class TimeseriesUnitTests(unittest.TestCase): +class TimeseriesPbufUnitTests(unittest.TestCase): @classmethod def setUpClass(cls): cls.ts0ms = unix_time_millis(ts0) @@ -159,24 +159,25 @@ def test_decode_data_from_query(self): r1c4 = r1.cells.add() r1c4.boolean_value = self.rows[1][4] - tsobj = TsObject(None, self.table, [], []) + tsobj = TsObject(None, self.table) c = PbufCodec() c.decode_timeseries(tqr, tsobj) - self.assertEqual(len(self.rows), len(tsobj.rows)) - self.assertEqual(len(tqr.columns), len(tsobj.columns)) - - c = tsobj.columns - self.assertEqual(c[0][0], 'col_varchar') - self.assertEqual(c[0][1], TsColumnType.Value('VARCHAR')) - self.assertEqual(c[1][0], 'col_integer') - self.assertEqual(c[1][1], TsColumnType.Value('SINT64')) - self.assertEqual(c[2][0], 'col_double') - self.assertEqual(c[2][1], TsColumnType.Value('DOUBLE')) - self.assertEqual(c[3][0], 'col_timestamp') - self.assertEqual(c[3][1], TsColumnType.Value('TIMESTAMP')) - self.assertEqual(c[4][0], 'col_boolean') - self.assertEqual(c[4][1], TsColumnType.Value('BOOLEAN')) + self.assertEqual(len(tsobj.rows), len(self.rows)) + self.assertEqual(len(tsobj.columns.names), len(tqr.columns)) + self.assertEqual(len(tsobj.columns.types), len(tqr.columns)) + + cn, ct = tsobj.columns + self.assertEqual(cn[0], 'col_varchar') + self.assertEqual(ct[0], TsColumnType.Value('VARCHAR')) + self.assertEqual(cn[1], 'col_integer') + self.assertEqual(ct[1], TsColumnType.Value('SINT64')) + self.assertEqual(cn[2], 'col_double') + self.assertEqual(ct[2], TsColumnType.Value('DOUBLE')) + self.assertEqual(cn[3], 'col_timestamp') + self.assertEqual(ct[3], TsColumnType.Value('TIMESTAMP')) + self.assertEqual(cn[4], 'col_boolean') + self.assertEqual(ct[4], TsColumnType.Value('BOOLEAN')) r0 = tsobj.rows[0] self.assertEqual(bytes_to_str(r0[0]), self.rows[0][0]) @@ -195,12 +196,12 @@ def test_decode_data_from_query(self): @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, 'Timeseries not supported or RUN_TIMESERIES is 0') -class TimeseriesTests(IntegrationTestBase, unittest.TestCase): +class TimeseriesPbufTests(IntegrationTestBase, unittest.TestCase): client_options = {'transport_options': {'use_ttb': False}} @classmethod def setUpClass(cls): - super(TimeseriesTests, cls).setUpClass() + super(TimeseriesPbufTests, cls).setUpClass() cls.now = datetime.datetime.utcfromtimestamp(144379690.987000) fiveMinsAgo = cls.now - fiveMins tenMinsAgo = fiveMinsAgo - fiveMins @@ -245,9 +246,15 @@ def setUpClass(cls): ] cls.encoded_rows = encoded_rows + def validate_len(self, ts_obj, expected_len): + self.assertEqual(len(ts_obj.columns.names), expected_len) + self.assertEqual(len(ts_obj.columns.types), expected_len) + self.assertEqual(len(ts_obj.rows), expected_len) + def validate_data(self, ts_obj): if ts_obj.columns is not None: - self.assertEqual(len(ts_obj.columns), self.numCols) + self.assertEqual(len(ts_obj.columns.names), self.numCols) + self.assertEqual(len(ts_obj.columns.types), self.numCols) self.assertEqual(len(ts_obj.rows), 1) row = ts_obj.rows[0] self.assertEqual(bytes_to_str(row[0]), 'hash1') @@ -272,44 +279,38 @@ def test_query_that_creates_table_using_interpolation(self): """ ts_obj = self.client.ts_query(table, query) self.assertIsNotNone(ts_obj) - self.assertEqual(len(ts_obj.columns), 0) - self.assertEqual(len(ts_obj.rows), 0) + self.validate_len(ts_obj, 0) def test_query_that_returns_table_description(self): fmt = 'DESCRIBE {table}' query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.assertEqual(len(ts_obj.columns), 5) - self.assertEqual(len(ts_obj.rows), 5) + self.validate_len(ts_obj, 5) def test_query_that_returns_table_description_using_interpolation(self): query = 'Describe {table}' ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.assertEqual(len(ts_obj.columns), 5) - self.assertEqual(len(ts_obj.rows), 5) + self.validate_len(ts_obj, 5) def test_query_description_via_table(self): query = 'describe {table}' table = Table(self.client, table_name) ts_obj = table.query(query) self.assertIsNotNone(ts_obj) - self.assertEqual(len(ts_obj.columns), 5) - self.assertEqual(len(ts_obj.rows), 5) + self.validate_len(ts_obj, 5) def test_get_description(self): ts_obj = self.client.ts_describe(table_name) self.assertIsNotNone(ts_obj) - self.assertEqual(len(ts_obj.columns), 5) - self.assertEqual(len(ts_obj.rows), 5) + self.validate_len(ts_obj, 5) def test_get_description_via_table(self): table = Table(self.client, table_name) ts_obj = table.describe() self.assertIsNotNone(ts_obj) - self.assertEqual(len(ts_obj.columns), 5) - self.assertEqual(len(ts_obj.rows), 5) + self.validate_len(ts_obj, 5) def test_query_that_returns_no_data(self): fmt = """ @@ -320,8 +321,7 @@ def test_query_that_returns_no_data(self): """ query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) - self.assertEqual(len(ts_obj.columns), 0) - self.assertEqual(len(ts_obj.rows), 0) + self.validate_len(ts_obj, 0) def test_query_that_returns_no_data_using_interpolation(self): query = """ @@ -331,8 +331,7 @@ def test_query_that_returns_no_data_using_interpolation(self): user = 'user1' """ ts_obj = self.client.ts_query(table_name, query) - self.assertEqual(len(ts_obj.columns), 0) - self.assertEqual(len(ts_obj.rows), 0) + self.validate_len(ts_obj, 0) def test_query_that_matches_some_data(self): fmt = """ diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 811f90b3..8936567c 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -61,7 +61,8 @@ def test_encode_data_for_get(self): # {tsgetresp, # { - # [<<"geohash">>, <<"user">>, <<"time">>, <<"weather">>, <<"temperature">>], + # [<<"geohash">>, <<"user">>, <<"time">>, + # <<"weather">>, <<"temperature">>], # [varchar, varchar, timestamp, varchar, double] # }, # [[<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, 90.3]] @@ -71,48 +72,35 @@ def test_decode_data_from_get(self): "boolean", "varchar", "varchar"] coltypes = [varchar_a, sint64_a, double_a, timestamp_a, boolean_a, varchar_a, varchar_a] - r0 = (bd0, 0, 1.2, unix_time_millis(ts0), True, [], str1) - r1 = (bd1, 3, 4.5, unix_time_millis(ts1), False, [], str1) + r0 = (bd0, 0, 1.2, unix_time_millis(ts0), True, + [], str1, None) + r1 = (bd1, 3, 4.5, unix_time_millis(ts1), False, + [], str1, None) rows = [r0, r1] # { tsgetresp, { [colnames], [coltypes] }, [rows] } cols_t = colnames, coltypes rsp_data = tsgetresp_a, cols_t, rows rsp_ttb = encode(rsp_data) - tsobj = TsObject(None, self.table, [], []) + tsobj = TsObject(None, self.table, []) c = TtbCodec() c.decode_timeseries(decode(rsp_ttb), tsobj) for i in range(0, 1): - self.assertEqual(tsrow_a, rows[i][0]) - dr = rows[i][1] + dr = rows[i] r = tsobj.rows[i] # encoded - - # cells - self.assertEqual(tscell_a, dr[0][0]) - self.assertEqual(r[0], dr[0][1].encode('utf-8')) - - self.assertEqual(tscell_a, dr[1][0]) - self.assertEqual(r[1], dr[1][2]) - - self.assertEqual(tscell_a, dr[2][0]) - self.assertEqual(r[2], dr[2][5]) - - self.assertEqual(tscell_a, dr[3][0]) - dt = datetime_from_unix_time_millis(dr[3][3]) + self.assertEqual(r[0], dr[0].encode('utf-8')) + self.assertEqual(r[1], dr[1]) + self.assertEqual(r[2], dr[2]) + dt = datetime_from_unix_time_millis(dr[3]) self.assertEqual(r[3], dt) - - self.assertEqual(tscell_a, dr[4][0]) if i == 0: self.assertEqual(r[4], True) else: self.assertEqual(r[4], False) - - self.assertEqual(tscell_a, dr[5][0]) - self.assertEqual(r[5], None) - - self.assertEqual(tscell_a, dr[6][0]) - self.assertEqual(r[6], dr[6][1].encode('ascii')) + self.assertEqual(r[5], []) + self.assertEqual(r[6], dr[6].encode('ascii')) + self.assertEqual(r[7], None) def test_encode_data_for_put(self): r0 = (bd0, 0, 1.2, unix_time_millis(ts0), True, []) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index d0ffab13..a9d75603 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,4 +1,3 @@ -import logging import socket import struct diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index d7e7954b..02d4b5e4 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -142,7 +142,7 @@ def ts_get(self, table, key): codec = self._get_codec(msg_code) msg = codec.encode_timeseries_keyreq(table, key) resp_code, resp = self._request(msg, codec) - tsobj = TsObject(self._client, table, [], None) + tsobj = TsObject(self._client, table) codec.decode_timeseries(resp, tsobj) return tsobj @@ -168,7 +168,7 @@ def ts_query(self, table, query, interpolations=None): codec = self._get_codec(msg_code) msg = codec.encode_timeseries_query(table, query, interpolations) resp_code, resp = self._request(msg, codec) - tsobj = TsObject(self._client, table, [], []) + tsobj = TsObject(self._client, table) codec.decode_timeseries(resp, tsobj) return tsobj diff --git a/riak/ts_object.py b/riak/ts_object.py index ef01baff..24eccbe1 100644 --- a/riak/ts_object.py +++ b/riak/ts_object.py @@ -1,13 +1,17 @@ +import collections + from riak import RiakError from riak.table import Table +TsColumns = collections.namedtuple('TsColumns', ['names', 'types']) + class TsObject(object): """ The TsObject holds information about Timeseries data, plus the data itself. """ - def __init__(self, client, table, rows=[], columns=[]): + def __init__(self, client, table, rows=None, columns=None): """ Construct a new TsObject. @@ -17,8 +21,8 @@ def __init__(self, client, table, rows=[], columns=[]): :type table: :class:`Table` :param rows: An list of lists with timeseries data :type rows: list - :param columns: An list of Column names and types. Optional. - :type columns: list + :param columns: A TsColumns tuple. Optional + :type columns: :class:`TsColumns` """ if not isinstance(table, Table): @@ -27,13 +31,17 @@ def __init__(self, client, table, rows=[], columns=[]): self.client = client self.table = table - self.rows = rows - if not isinstance(self.rows, list): - raise RiakError("TsObject requires a list of rows") - - self.columns = columns - if self.columns is not None and not isinstance(self.columns, list): - raise RiakError("TsObject columns must be a list") + if rows is not None and not isinstance(rows, list): + raise RiakError("TsObject rows parameter must be a list.") + else: + self.rows = rows + + if columns is not None and \ + not isinstance(columns, TsColumns): + raise RiakError( + "TsObject columns parameter must be a TsColumns instance") + else: + self.columns = columns def store(self): """ From 7736ae3ef78cf0221b4dec5aea9a137d40f42b55 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 7 Apr 2016 08:29:23 -0700 Subject: [PATCH 156/324] Decode column type the same way for ttb and pbuf --- riak/codecs/pbuf.py | 22 +++++++++++++++++++--- riak/tests/test_timeseries_pbuf.py | 10 +++++----- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index b17c5b6a..a976d1ff 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -10,12 +10,12 @@ from riak import RiakError from riak.codecs import Codec, Msg from riak.content import RiakContent +from riak.pb.riak_ts_pb2 import TsColumnType from riak.riak_object import VClock from riak.ts_object import TsColumns from riak.util import decode_index_value, str_to_bytes, bytes_to_str, \ unix_time_millis, datetime_from_unix_time_millis from riak.multidict import MultiDict -from riak.pb.riak_ts_pb2 import TsColumnType def _invert(d): @@ -789,7 +789,8 @@ def decode_timeseries(self, resp, tsobj): col_types = [] for col in resp.columns: col_names.append(bytes_to_str(col.name)) - col_types.append(col.type) + col_type = self.decode_timeseries_col_type(col.type) + col_types.append(col_type) tsobj.columns = TsColumns(col_names, col_types) tsobj.rows = [] @@ -799,6 +800,22 @@ def decode_timeseries(self, resp, tsobj): self.decode_timeseries_row( row, resp.columns)) + def decode_timeseries_col_type(self, col_type): + # NB: these match the atom names for column types + if col_type == TsColumnType.Value('VARCHAR'): + return 'varchar' + elif col_type == TsColumnType.Value('SINT64'): + return 'sint64' + elif col_type == TsColumnType.Value('DOUBLE'): + return 'double' + elif col_type == TsColumnType.Value('TIMESTAMP'): + return 'timestamp' + elif col_type == TsColumnType.Value('BOOLEAN'): + return 'boolean' + else: + msg = 'could not decode column type: {}'.format(col_type) + raise RiakError(msg) + def decode_timeseries_row(self, tsrow, tscols=None): """ Decodes a TsRow into a list @@ -818,7 +835,6 @@ def decode_timeseries_row(self, tsrow, tscols=None): if col and col.type != TsColumnType.Value('VARCHAR'): raise TypeError('expected VARCHAR column') else: - # TODO RTS-842 - keep as bytes? row.append(cell.varchar_value) elif cell.HasField('sint64_value'): if col and col.type != TsColumnType.Value('SINT64'): diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 93e088a2..9eddcc81 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -169,15 +169,15 @@ def test_decode_data_from_query(self): cn, ct = tsobj.columns self.assertEqual(cn[0], 'col_varchar') - self.assertEqual(ct[0], TsColumnType.Value('VARCHAR')) + self.assertEqual(ct[0], 'varchar') self.assertEqual(cn[1], 'col_integer') - self.assertEqual(ct[1], TsColumnType.Value('SINT64')) + self.assertEqual(ct[1], 'sint64') self.assertEqual(cn[2], 'col_double') - self.assertEqual(ct[2], TsColumnType.Value('DOUBLE')) + self.assertEqual(ct[2], 'double') self.assertEqual(cn[3], 'col_timestamp') - self.assertEqual(ct[3], TsColumnType.Value('TIMESTAMP')) + self.assertEqual(ct[3], 'timestamp') self.assertEqual(cn[4], 'col_boolean') - self.assertEqual(ct[4], TsColumnType.Value('BOOLEAN')) + self.assertEqual(ct[4], 'boolean') r0 = tsobj.rows[0] self.assertEqual(bytes_to_str(r0[0]), self.rows[0][0]) From 57daac0c451135a3a8964279603a0a2d190412c3 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 13 Apr 2016 16:19:48 -0700 Subject: [PATCH 157/324] More work on TTB encoding --- riak/codecs/ttb.py | 29 +++++++++++++++++++++-------- riak/tests/test_timeseries_ttb.py | 26 +++++++++++++++++++++++--- riak/transports/tcp/transport.py | 2 ++ 3 files changed, 46 insertions(+), 11 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index f852dea7..353f955d 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -1,8 +1,6 @@ import datetime import six -import riak.pb.messages - from erlastic import encode, decode from erlastic.types import Atom @@ -17,6 +15,9 @@ rpberrorresp_a = Atom('rpberrorresp') tsgetreq_a = Atom('tsgetreq') tsgetresp_a = Atom('tsgetresp') +tsqueryreq_a = Atom('tsqueryreq') +tsqueryresp_a = Atom('tsqueryresp') +tsinterpolation_a = Atom('tsinterpolation') tsputreq_a = Atom('tsputreq') tsdelreq_a = Atom('tsdelreq') timestamp_a = Atom('timestamp') @@ -34,9 +35,7 @@ def __init__(self, **unused_args): super(TtbCodec, self).__init__(**unused_args) def parse_msg(self, msg_code, data): - if msg_code != MSG_CODE_TS_TTB and \ - msg_code != riak.pb.messages.MSG_CODE_TS_GET_RESP and \ - msg_code != riak.pb.messages.MSG_CODE_TS_PUT_RESP: + if msg_code != MSG_CODE_TS_TTB: raise RiakError("TTB can't parse code: {}".format(msg_code)) if len(data) > 0: decoded = decode(data) @@ -95,8 +94,7 @@ def encode_timeseries_keyreq(self, table, key, is_delete=False): return Msg(mc, encode(req), rc) def validate_timeseries_put_resp(self, resp_code, resp): - if resp is None and \ - resp_code == riak.pb.messages.MSG_CODE_TS_PUT_RESP: + if resp is None and resp_code == MSG_CODE_TS_TTB: return True if resp is not None: return True @@ -129,6 +127,16 @@ def encode_timeseries_put(self, tsobj): else: raise RiakError("TsObject requires a list of rows") + def encode_timeseries_query(self, table, query, interpolations=None): + q = query + if '{table}' in q: + q = q.format(table=table.name) + tsi = tsinterpolation_a, q, [] + req = tsqueryreq_a, tsi, False, [] + mc = MSG_CODE_TS_TTB + rc = MSG_CODE_TS_TTB + return Msg(mc, encode(req), rc) + def decode_timeseries(self, resp_ttb, tsobj): """ Fills an TsObject with the appropriate data and @@ -142,10 +150,13 @@ def decode_timeseries(self, resp_ttb, tsobj): if resp_ttb is None: return tsobj + import sys resp_a = resp_ttb[0] + sys.stderr.write("resp_a: {}".format(resp_a)) if resp_a == rpberrorresp_a: self.process_err_ttb(resp_ttb) - elif resp_a == tsgetresp_a: + elif resp_a == tsgetresp_a or \ + resp_a == tsqueryresp_a: resp_cols = resp_ttb[1] tsobj.columns = self.decode_timeseries_cols(resp_cols) resp_rows = resp_ttb[2] @@ -172,9 +183,11 @@ def decode_timeseries_row(self, tsrow, tscols): :type tscols: list :rtype list """ + import sys cn, ct = tscols row = [] for i, cell in enumerate(tsrow): + sys.stderr.write("\ncell: {}\n".format(cell)) if cell is None: row.append(None) elif cell is list and len(cell) == 0: diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 8936567c..b79993cb 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -129,7 +129,21 @@ class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() - def test_store_and_fetch_ttb(self): + def test_query_that_returns_table_description(self): + import sys + fmt = 'DESCRIBE {table}' + query = fmt.format(table=table_name) + ts_obj = self.client.ts_query(table_name, query) + self.assertIsNotNone(ts_obj) + ts_cols = ts_obj.columns + sys.stderr.write("\n\nts_cols: {}\n\n".format(ts_cols)) + sys.stderr.write("\n\nrows: {}\n\n".format(ts_obj.rows)) + self.assertEqual(len(ts_cols.names), 5) + self.assertEqual(len(ts_cols.types), 5) + row = ts_obj.rows[0] + self.assertEqual(len(row), 5) + + def test_store_and_fetch(self): now = datetime.datetime.utcfromtimestamp(144379690.987000) fiveMinsAgo = now - fiveMins tenMinsAgo = fiveMinsAgo - fiveMins @@ -149,12 +163,18 @@ def test_store_and_fetch_ttb(self): result = ts_obj.store() self.assertTrue(result) - for r in rows: + for i, r in enumerate(rows): k = r[0:3] ts_obj = self.client.ts_get(table_name, k) self.assertIsNotNone(ts_obj) + ts_cols = ts_obj.columns + self.assertEqual(len(ts_cols.names), 5) + self.assertEqual(len(ts_cols.types), 5) self.assertEqual(len(ts_obj.rows), 1) - self.assertEqual(len(ts_obj.rows[0]), 5) + row = ts_obj.rows[0] + exp = rows[i] + self.assertEqual(len(row), 5) + self.assertEqual(row, exp) def test_create_error_via_put(self): table = Table(self.client, table_name) diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 02d4b5e4..58420767 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -60,6 +60,8 @@ def _get_codec(self, msg_code): codec = self._get_ttb_codec() elif msg_code == riak.pb.messages.MSG_CODE_TS_PUT_REQ: codec = self._get_ttb_codec() + elif msg_code == riak.pb.messages.MSG_CODE_TS_QUERY_REQ: + codec = self._get_ttb_codec() else: codec = self._get_pbuf_codec() return codec From 4eec9379ca48ae6a3c40f36230073755b2901e24 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 14 Apr 2016 14:16:06 -0700 Subject: [PATCH 158/324] finish up Riak TS 1.3 changes --- riak/codecs/ttb.py | 49 ++++++++++++++++-------------- riak/tests/test_timeseries_pbuf.py | 7 +++-- riak/tests/test_timeseries_ttb.py | 34 +++++++++++++-------- 3 files changed, 53 insertions(+), 37 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 353f955d..7333943b 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -19,6 +19,7 @@ tsqueryresp_a = Atom('tsqueryresp') tsinterpolation_a = Atom('tsinterpolation') tsputreq_a = Atom('tsputreq') +tsputresp_a = Atom('tsputresp') tsdelreq_a = Atom('tsdelreq') timestamp_a = Atom('timestamp') @@ -150,50 +151,52 @@ def decode_timeseries(self, resp_ttb, tsobj): if resp_ttb is None: return tsobj - import sys resp_a = resp_ttb[0] - sys.stderr.write("resp_a: {}".format(resp_a)) if resp_a == rpberrorresp_a: self.process_err_ttb(resp_ttb) - elif resp_a == tsgetresp_a or \ - resp_a == tsqueryresp_a: - resp_cols = resp_ttb[1] - tsobj.columns = self.decode_timeseries_cols(resp_cols) - resp_rows = resp_ttb[2] - tsobj.rows = [] - for resp_row in resp_rows: - tsobj.rows.append( - self.decode_timeseries_row(resp_row, resp_cols)) + elif resp_a == tsputresp_a: + return + elif resp_a == tsgetresp_a or resp_a == tsqueryresp_a: + resp_data = resp_ttb[1] + if len(resp_data) == 0: + return + elif len(resp_data) == 3: + resp_colnames = resp_data[0] + resp_coltypes = resp_data[1] + tsobj.columns = self.decode_timeseries_cols(resp_colnames, resp_coltypes) + resp_rows = resp_data[2] + tsobj.rows = [] + for resp_row in resp_rows: + tsobj.rows.append( + self.decode_timeseries_row(resp_row, resp_coltypes)) + else: + raise RiakError("Expected 3-tuple in response, got: {}".format(resp_data)) else: raise RiakError("Unknown TTB response type: {}".format(resp_a)) - def decode_timeseries_cols(self, tscols): - cn, ct = tscols - cnames = [bytes_to_str(cname) for cname in cn] - ctypes = [str(ctype) for ctype in ct] + def decode_timeseries_cols(self, cnames, ctypes): + cnames = [bytes_to_str(cname) for cname in cnames] + ctypes = [str(ctype) for ctype in ctypes] return TsColumns(cnames, ctypes) - def decode_timeseries_row(self, tsrow, tscols): + def decode_timeseries_row(self, tsrow, tsct): """ Decodes a TTB-encoded TsRow into a list :param tsrow: the TTB decoded TsRow to decode. :type tsrow: TTB dncoded row - :param tscols: the TTB decoded TsColumn data to help decode rows. - :type tscols: list + :param tsct: the TTB decoded column types (atoms). + :type tsct: list :rtype list """ - import sys - cn, ct = tscols row = [] for i, cell in enumerate(tsrow): - sys.stderr.write("\ncell: {}\n".format(cell)) if cell is None: row.append(None) - elif cell is list and len(cell) == 0: + elif isinstance(cell, list) and len(cell) == 0: row.append(None) else: - if ct[i] == timestamp_a: + if tsct[i] == timestamp_a: row.append(datetime_from_unix_time_millis(cell)) else: row.append(cell) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 9eddcc81..50c0818c 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -416,8 +416,11 @@ def test_delete_single_value(self): key = ['hash1', 'user2', self.twentyFiveMinsAgo] rslt = self.client.ts_delete(table_name, key) self.assertTrue(rslt) - with self.assertRaises(RiakError): - self.client.ts_get(table_name, key) + ts_obj = self.client.ts_get(table_name, key) + self.assertIsNotNone(ts_obj) + self.assertEqual(len(ts_obj.rows), 0) + self.assertEqual(len(ts_obj.columns.names), 0) + self.assertEqual(len(ts_obj.columns.types), 0) def test_create_error_via_put(self): table = Table(self.client, table_name) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index b79993cb..339dd040 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -63,9 +63,9 @@ def test_encode_data_for_get(self): # { # [<<"geohash">>, <<"user">>, <<"time">>, # <<"weather">>, <<"temperature">>], - # [varchar, varchar, timestamp, varchar, double] - # }, - # [[<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, 90.3]] + # [varchar, varchar, timestamp, varchar, double], + # [(<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, 90.3)] + # } # } def test_decode_data_from_get(self): colnames = ["varchar", "sint64", "double", "timestamp", @@ -77,12 +77,12 @@ def test_decode_data_from_get(self): r1 = (bd1, 3, 4.5, unix_time_millis(ts1), False, [], str1, None) rows = [r0, r1] - # { tsgetresp, { [colnames], [coltypes] }, [rows] } - cols_t = colnames, coltypes - rsp_data = tsgetresp_a, cols_t, rows + # { tsgetresp, { [colnames], [coltypes], [rows] } } + data_t = colnames, coltypes, rows + rsp_data = tsgetresp_a, data_t rsp_ttb = encode(rsp_data) - tsobj = TsObject(None, self.table, []) + tsobj = TsObject(None, self.table) c = TtbCodec() c.decode_timeseries(decode(rsp_ttb), tsobj) @@ -98,7 +98,7 @@ def test_decode_data_from_get(self): self.assertEqual(r[4], True) else: self.assertEqual(r[4], False) - self.assertEqual(r[5], []) + self.assertEqual(r[5], None) self.assertEqual(r[6], dr[6].encode('ascii')) self.assertEqual(r[7], None) @@ -130,14 +130,11 @@ def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() def test_query_that_returns_table_description(self): - import sys fmt = 'DESCRIBE {table}' query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) ts_cols = ts_obj.columns - sys.stderr.write("\n\nts_cols: {}\n\n".format(ts_cols)) - sys.stderr.write("\n\nrows: {}\n\n".format(ts_obj.rows)) self.assertEqual(len(ts_cols.names), 5) self.assertEqual(len(ts_cols.types), 5) row = ts_obj.rows[0] @@ -159,6 +156,19 @@ def test_store_and_fetch(self): ['hash1', 'user2', fiveMinsAgo, 'wind', None], ['hash1', 'user2', now, 'snow', 20.1] ] + # NB: response data is binary + exp_rows = [ + [six.b('hash1'), six.b('user2'), twentyFiveMinsAgo, + six.b('typhoon'), 90.3], + [six.b('hash1'), six.b('user2'), twentyMinsAgo, + six.b('hurricane'), 82.3], + [six.b('hash1'), six.b('user2'), fifteenMinsAgo, + six.b('rain'), 79.0], + [six.b('hash1'), six.b('user2'), fiveMinsAgo, + six.b('wind'), None], + [six.b('hash1'), six.b('user2'), now, + six.b('snow'), 20.1] + ] ts_obj = table.new(rows) result = ts_obj.store() self.assertTrue(result) @@ -172,7 +182,7 @@ def test_store_and_fetch(self): self.assertEqual(len(ts_cols.types), 5) self.assertEqual(len(ts_obj.rows), 1) row = ts_obj.rows[0] - exp = rows[i] + exp = exp_rows[i] self.assertEqual(len(row), 5) self.assertEqual(row, exp) From 2e5b0aab5846a919ceb52fc817ec9e6ca8cfba4f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 19 Apr 2016 11:19:17 -0700 Subject: [PATCH 159/324] process rpberrorresp correctly --- riak/codecs/ttb.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 7333943b..c18912af 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -49,6 +49,7 @@ def maybe_err_ttb(self, err_ttb): resp_a = err_ttb[0] if resp_a == rpberrorresp_a: errmsg = err_ttb[1] + # errcode = err_ttb[2] raise RiakError(bytes_to_str(errmsg)) def maybe_riak_error(self, msg_code, data=None): @@ -151,10 +152,10 @@ def decode_timeseries(self, resp_ttb, tsobj): if resp_ttb is None: return tsobj + self.maybe_err_ttb(resp_ttb) + resp_a = resp_ttb[0] - if resp_a == rpberrorresp_a: - self.process_err_ttb(resp_ttb) - elif resp_a == tsputresp_a: + if resp_a == tsputresp_a: return elif resp_a == tsgetresp_a or resp_a == tsqueryresp_a: resp_data = resp_ttb[1] From c6ecbfb3dead3552c239e7c56661c9d67397d0b6 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 22 Apr 2016 11:14:17 -0700 Subject: [PATCH 160/324] make lint happy --- riak/codecs/ttb.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index c18912af..6bb81f0d 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -164,14 +164,16 @@ def decode_timeseries(self, resp_ttb, tsobj): elif len(resp_data) == 3: resp_colnames = resp_data[0] resp_coltypes = resp_data[1] - tsobj.columns = self.decode_timeseries_cols(resp_colnames, resp_coltypes) + tsobj.columns = self.decode_timeseries_cols( + resp_colnames, resp_coltypes) resp_rows = resp_data[2] tsobj.rows = [] for resp_row in resp_rows: tsobj.rows.append( self.decode_timeseries_row(resp_row, resp_coltypes)) else: - raise RiakError("Expected 3-tuple in response, got: {}".format(resp_data)) + raise RiakError( + "Expected 3-tuple in response, got: {}".format(resp_data)) else: raise RiakError("Unknown TTB response type: {}".format(resp_a)) From c0255b2f1da78770afb55e5a43547841f3a7c89e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 07:12:30 -0700 Subject: [PATCH 161/324] Small fixes, lint --- riak/transports/tcp/connection.py | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index ea78a76e..2ac0369f 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -6,6 +6,7 @@ from riak import RiakError from riak.codecs.pbuf import PbufCodec +from riak.transports.pool import BadResource from riak.security import SecurityError, USE_STDLIB_SSL @@ -16,8 +17,6 @@ import ssl from riak.transports.security import configure_ssl_context -from riak.transports.pool import BadResource - class TcpConnection(object): """ @@ -157,7 +156,7 @@ def _ssl_handshake(self): def _recv_msg(self): try: msgbuf = self._recv_pkt() - except socket.timeout, e: + except socket.timeout as e: # A timeout can leave the socket in an inconsistent state because # it might still receive the data later and mix up with a # subsequent request. @@ -168,8 +167,6 @@ def _recv_msg(self): data = mv[1:].tobytes() return (msg_code, data) - - def _recv_pkt(self): # TODO FUTURE re-use buffer msglen_buf = self._recv(4) From bb21de9ff6bb61995383570528cacaa8ac60f36a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 07:13:39 -0700 Subject: [PATCH 162/324] Add basic make.ps1 for Windows --- .gitignore | 1 - make.ps1 | 20 ++++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 make.ps1 diff --git a/.gitignore b/.gitignore index e20b3037..3b1e0f43 100644 --- a/.gitignore +++ b/.gitignore @@ -14,4 +14,3 @@ riak.egg-info/ .eggs/ #*# *~ -*.ps1 diff --git a/make.ps1 b/make.ps1 new file mode 100644 index 00000000..6d3c4181 --- /dev/null +++ b/make.ps1 @@ -0,0 +1,20 @@ +Set-StrictMode -Version Latest +$ErrorActionPreference = 'Stop' + +$env:RIAK_TEST_HOST = 'riak-test' +$env:RIAK_TEST_PROTOCOL = 'pbc' +$env:RIAK_TEST_PB_PORT = 10017 +$env:RUN_DATATYPES = 1 +$env:RUN_INDEXES = 1 +$env:RUN_POOL = 1 +$env:RUN_YZ = 1 + +flake8 --exclude=riak/pb riak commands.py setup.py version.py +if ($LastExitCode -ne 0) { + throw 'flake8 failed!' +} + +python setup.py test +if ($LastExitCode -ne 0) { + throw 'python tests failed!' +} From 5016352deca12bd3143aea976be54b18c6fe25e8 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 07:18:10 -0700 Subject: [PATCH 163/324] add contributor --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index ba773034..529b7a20 100644 --- a/README.md +++ b/README.md @@ -178,6 +178,7 @@ Contributors * Mark Phillips * Mathias Meyer * Matt Heitzenroder +* [Matt Lohier](https://github.com/aquam8) * Mikhail Sobolev * Reid Draper * Russell Brown From 086257f1c56d193f3dec05e8e9fdd32fc946da85 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 08:26:41 -0700 Subject: [PATCH 164/324] Port multiput from features/lrb/protobuf-cpp branch. --- riak/client/__init__.py | 20 +++- riak/client/{multiget.py => multi.py} | 138 +++++++++++++++++++++----- riak/client/operations.py | 20 +++- riak/tests/test_client.py | 102 +++++++++++++++++++ 4 files changed, 248 insertions(+), 32 deletions(-) rename riak/client/{multiget.py => multi.py} (59%) diff --git a/riak/client/__init__.py b/riak/client/__init__.py index ea9abaca..fae5e133 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -16,7 +16,7 @@ from riak.security import SecurityCreds from riak.util import lazy_property, bytes_to_str, str_to_bytes from six import string_types, PY2 -from riak.client.multiget import MultiGetPool +from riak.client.multi import MultiGetPool, MultiPutPool def default_encoder(obj): @@ -67,8 +67,10 @@ class RiakClient(RiakMapReduceChain, RiakClientOperations): #: The supported protocols PROTOCOLS = ['http', 'pbc'] - def __init__(self, protocol='pbc', transport_options={}, nodes=None, - credentials=None, multiget_pool_size=None, **kwargs): + def __init__(self, protocol='pbc', transport_options={}, + nodes=None, credentials=None, + multiget_pool_size=None, multiput_pool_size=None, + **kwargs): """ Construct a new ``RiakClient`` object. @@ -87,6 +89,10 @@ def __init__(self, protocol='pbc', transport_options={}, nodes=None, :meth:`multiget` operations. Defaults to a factor of the number of CPUs in the system :type multiget_pool_size: int + :param multiput_pool_size: the number of threads to use in + :meth:`multiput` operations. Defaults to a factor of the number of + CPUs in the system + :type multiput_pool_size: int """ kwargs = kwargs.copy() @@ -96,6 +102,7 @@ def __init__(self, protocol='pbc', transport_options={}, nodes=None, self.nodes = [self._create_node(n) for n in nodes] self._multiget_pool_size = multiget_pool_size + self._multiput_pool_size = multiput_pool_size self.protocol = protocol or 'pbc' self._resolver = None self._credentials = self._create_credentials(credentials) @@ -358,6 +365,13 @@ def _multiget_pool(self): else: return None + @lazy_property + def _multiput_pool(self): + if self._multiput_pool_size: + return MultiPutPool(self._multiput_pool_size) + else: + return None + def __hash__(self): return hash(frozenset([(n.host, n.http_port, n.pb_port) for n in self.nodes])) diff --git a/riak/client/multiget.py b/riak/client/multi.py similarity index 59% rename from riak/client/multiget.py rename to riak/client/multi.py index 9b5d7522..672c32b9 100644 --- a/riak/client/multiget.py +++ b/riak/client/multi.py @@ -3,13 +3,12 @@ from threading import Thread, Lock, Event from multiprocessing import cpu_count from six import PY2 - if PY2: from Queue import Queue else: from queue import Queue -__all__ = ['multiget', 'MultiGetPool'] +__all__ = ['multiget', 'multiput', 'MultiGetPool', 'MultiPutPool'] try: @@ -21,18 +20,21 @@ POOL_SIZE = 6 #: A :class:`namedtuple` for tasks that are fed to workers in the -#: multiget pool. -Task = namedtuple('Task', ['client', 'outq', 'bucket_type', 'bucket', 'key', - 'options']) +#: multi pool. +Task = namedtuple( + 'Task', + ['client', 'outq', + 'bucket_type', 'bucket', 'key', + 'object', 'options']) -class MultiGetPool(object): +class MultiPool(object): """ - Encapsulates a pool of fetcher threads. These threads can be used - across many multi-get requests. + Encapsulates a pool of threads. These threads can be used + across many multi requests. """ - def __init__(self, size=POOL_SIZE): + def __init__(self, size=POOL_SIZE, name='unknown'): """ :param size: the desired size of the worker pool :type size: int @@ -40,6 +42,7 @@ def __init__(self, size=POOL_SIZE): self._inq = Queue() self._size = size + self._name = name self._started = Event() self._stop = Event() self._lock = Lock() @@ -57,14 +60,14 @@ def enq(self, task): if not self._stop.is_set(): self._inq.put(task) else: - raise RuntimeError("Attempted to enqueue a fetch operation while " - "multi-get pool was shutdown!") + raise RuntimeError("Attempted to enqueue an operation while " + "multi pool was shutdown!") def start(self): """ Starts the worker threads if they are not already started. This method is thread-safe and will be called automatically - when executing a MultiGet operation. + when executing an operation. """ # Check whether we are already started, skip if we are. if not self._started.is_set(): @@ -73,8 +76,9 @@ def start(self): # If we got the lock, go ahead and start the worker # threads, set the started flag, and release the lock. for i in range(self._size): - name = "riak.client.multiget-worker-{0}".format(i) - worker = Thread(target=self._fetcher, name=name) + name = "riak.client.multi-worker-{0}-{1}".format( + self._name, i) + worker = Thread(target=self._worker_method, name=name) worker.daemon = True worker.start() self._workers.append(worker) @@ -105,7 +109,26 @@ def __del__(self): # shutting down. self.stop() - def _fetcher(self): + def _worker_method(self): + raise NotImplementedError + + def _should_quit(self): + """ + Worker threads should exit when the stop flag is set and the + input queue is empty. Once the stop flag is set, new enqueues + are disallowed, meaning that the workers can safely drain the + queue before exiting. + + :rtype: bool + """ + return self.stopped() and self._inq.empty() + + +class MultiGetPool(MultiPool): + def __init__(self, size=POOL_SIZE): + super(MultiGetPool, self).__init__(size=size, name='get') + + def _worker_method(self): """ The body of the multi-get worker. Loops until :meth:`_should_quit` returns ``True``, taking tasks off the @@ -121,24 +144,40 @@ def _fetcher(self): except KeyboardInterrupt: raise except Exception as err: - task.outq.put((task.bucket_type, task.bucket, task.key, err), ) + errdata = (task.bucket_type, task.bucket, task.key, err) + task.outq.put(errdata) finally: self._inq.task_done() - def _should_quit(self): - """ - Worker threads should exit when the stop flag is set and the - input queue is empty. Once the stop flag is set, new enqueues - are disallowed, meaning that the workers can safely drain the - queue before exiting. - :rtype: bool +class MultiPutPool(MultiPool): + def __init__(self, size=POOL_SIZE): + super(MultiPutPool, self).__init__(size=size, name='put') + + def _worker_method(self): """ - return self.stopped() and self._inq.empty() + The body of the multi-put worker. Loops until + :meth:`_should_quit` returns ``True``, taking tasks off the + input queue, storing the object, and putting the result on + the output queue. + """ + while not self._should_quit(): + task = self._inq.get() + try: + robj = task.object + rv = task.client.put(robj, **task.options) + task.outq.put(rv) + except KeyboardInterrupt: + raise + except Exception as err: + errdata = (task.object, err) + task.outq.put(errdata) + finally: + self._inq.task_done() -#: The default pool is automatically created and stored in this constant. RIAK_MULTIGET_POOL = MultiGetPool() +RIAK_MULTIPUT_POOL = MultiPutPool() def multiget(client, keys, **options): @@ -160,8 +199,8 @@ def multiget(client, keys, **options): :meth:`RiakBucket.get ` :type options: dict :rtype: list - """ + """ outq = Queue() if 'pool' in options: @@ -172,7 +211,7 @@ def multiget(client, keys, **options): pool.start() for bucket_type, bucket, key in keys: - task = Task(client, outq, bucket_type, bucket, key, options) + task = Task(client, outq, bucket_type, bucket, key, None, options) pool.enq(task) results = [] @@ -184,3 +223,48 @@ def multiget(client, keys, **options): outq.task_done() return results + + +def multiput(client, objs, **options): + """Executes a parallel-store across multiple threads. Returns a list + containing booleans or :class:`~riak.riak_object.RiakObject` + + If a ``pool`` option is included, the request will use the given worker + pool and not the default :data:`RIAK_MULTIPUT_POOL`. This option will + be passed by the client if the ``multiput_pool_size`` option was set on + client initialization. + + :param client: the client to use + :type client: :class:`RiakClient ` + :param objs: the Riak Objects to store in parallel + :type keys: list of `RiakObject ` + :param options: request options to + :meth:`RiakClient.put ` + :type options: dict + :rtype: list + """ + outq = Queue() + + if 'pool' in options: + pool = options['pool'] + del options['pool'] + else: + pool = RIAK_MULTIPUT_POOL + + pool.start() + for robj in objs: + bucket_type = robj.bucket.bucket_type + bucket = robj.bucket.name + key = robj.key + task = Task(client, outq, bucket_type, bucket, key, robj, options) + pool.enq(task) + + results = [] + for _ in range(len(objs)): + if pool.stopped(): + raise RuntimeError("Multi-put operation interrupted by pool " + "stopping!") + results.append(outq.get()) + outq.task_done() + + return results diff --git a/riak/client/operations.py b/riak/client/operations.py index d3541b3c..8bf2b9c2 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -1,6 +1,7 @@ +import riak.client.multi + from riak.client.transport import RiakClientTransport, \ retryable, retryableHttpOnly -from riak.client.multiget import multiget from riak.client.index_page import IndexPage from riak.datatypes import TYPES from riak.table import Table @@ -976,7 +977,22 @@ def multiget(self, pairs, **params): """ if self._multiget_pool: params['pool'] = self._multiget_pool - return multiget(self, pairs, **params) + return riak.client.multi.multiget(self, pairs, **params) + + def multiput(self, objs, **params): + """ + Stores objects in parallel via threads. + + :param objs: the objects to store + :type objs: list of `RiakObject ` + :param params: additional request flags, e.g. w, dw, pw + :type params: dict + :rtype: list of boolean or + :class:`RiakObjects `, + """ + if self._multiput_pool: + params['pool'] = self._multiput_pool + return riak.client.multi.multiput(self, objs, **params) @retryable def get_counter(self, transport, bucket, key, r=None, pr=None, diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index 19379d06..d39b3290 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -149,6 +149,40 @@ def test_multiget_errors(self): self.assertIsInstance(failure[3], Exception) client.close() + def test_multiput_errors(self): + """ + Unrecoverable errors are captured along with the bucket/key + and not propagated. + """ + client = self.create_client(http_port=DUMMY_HTTP_PORT, + pb_port=DUMMY_PB_PORT) + bucket = client.bucket(self.bucket_name) + k1 = self.randname() + k2 = self.randname() + o1 = RiakObject(client, bucket, k1) + o2 = RiakObject(client, bucket, k2) + + if PY2: + o1.encoded_data = k1 + o2.encoded_data = k2 + else: + o1.data = k1 + o2.data = k2 + + objs = [o1, o2] + for robj in objs: + robj.content_type = 'text/plain' + + results = client.multiput(objs, return_body=True) + for failure in results: + self.assertIsInstance(failure, tuple) + self.assertIsInstance(failure[0], RiakObject) + if PY2: + self.assertIsInstance(failure[1], StandardError) # noqa + else: + self.assertIsInstance(failure[1], Exception) + client.close() + def test_multiget_notfounds(self): """ Not founds work in multiget just the same as get. @@ -189,6 +223,74 @@ def test_multiget_pool_size(self): self.assertEqual(obj.key, obj.data) client.close() + def test_multiput_pool_size(self): + """ + The pool size for multiputs can be configured at client initiation + time. Multiput still works as expected. + """ + client = self.create_client(multiput_pool_size=2) + self.assertEqual(2, client._multiput_pool._size) + + bucket = client.bucket(self.bucket_name) + k1 = self.randname() + k2 = self.randname() + o1 = RiakObject(client, bucket, k1) + o2 = RiakObject(client, bucket, k2) + + if PY2: + o1.encoded_data = k1 + o2.encoded_data = k2 + else: + o1.data = k1 + o2.data = k2 + + objs = [o1, o2] + for robj in objs: + robj.content_type = 'text/plain' + + results = client.multiput(objs, return_body=True) + for obj in results: + self.assertIsInstance(obj, RiakObject) + self.assertTrue(obj.exists) + self.assertEqual(obj.content_type, 'text/plain') + if PY2: + self.assertEqual(obj.key, obj.encoded_data) + else: + self.assertEqual(obj.key, obj.data) + client.close() + + def test_multiput_pool_options(self): + sz = 4 + client = self.create_client(multiput_pool_size=sz) + self.assertEqual(sz, client._multiput_pool._size) + + bucket = client.bucket(self.bucket_name) + k1 = self.randname() + k2 = self.randname() + o1 = RiakObject(client, bucket, k1) + o2 = RiakObject(client, bucket, k2) + + if PY2: + o1.encoded_data = k1 + o2.encoded_data = k2 + else: + o1.data = k1 + o2.data = k2 + + objs = [o1, o2] + for robj in objs: + robj.content_type = 'text/plain' + + results = client.multiput(objs, return_body=False) + for obj in results: + if client.protocol == 'pbc': + self.assertIsInstance(obj, RiakObject) + self.assertFalse(obj.exists) + self.assertEqual(obj.content_type, 'text/plain') + else: + self.assertIsNone(obj) + client.close() + @unittest.skipUnless(RUN_POOL, 'RUN_POOL is 0') def test_pool_close(self): """ From 6e2969a9770b0d347bae7cdcbefdc1893df3d43f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 08:51:37 -0700 Subject: [PATCH 165/324] remove file --- riak/transports/pbc/transport.py | 774 ------------------------------- 1 file changed, 774 deletions(-) delete mode 100644 riak/transports/pbc/transport.py diff --git a/riak/transports/pbc/transport.py b/riak/transports/pbc/transport.py deleted file mode 100644 index 6f7063c0..00000000 --- a/riak/transports/pbc/transport.py +++ /dev/null @@ -1,774 +0,0 @@ -import riak.pb.messages -import riak.pb.riak_pb2 -import riak.pb.riak_kv_pb2 -import riak.pb.riak_ts_pb2 - -from riak import RiakError -from riak.transports.transport import RiakTransport -from riak.riak_object import VClock -from riak.ts_object import TsObject -from riak.util import decode_index_value, str_to_bytes, bytes_to_str -from riak.transports.pbc.connection import RiakPbcConnection -from riak.transports.pbc.stream import (RiakPbcKeyStream, - RiakPbcMapredStream, - RiakPbcBucketStream, - RiakPbcIndexStream, - RiakPbcTsKeyStream) -from riak.transports.pbc.codec import RiakPbcCodec -from six import PY2, PY3 - - -class RiakPbcTransport(RiakTransport, RiakPbcConnection, RiakPbcCodec): - """ - The RiakPbcTransport object holds a connection to the protocol - buffers interface on the riak server. - """ - - def __init__(self, - node=None, - client=None, - timeout=None, - socket_keepalive=False, - socket_keepalive_options=None, - *unused_options): - """ - Construct a new RiakPbcTransport object. - """ - super(RiakPbcTransport, self).__init__() - - self._client = client - self._node = node - self._address = (node.host, node.pb_port) - self._timeout = timeout - self._socket = None - self._socket_keepalive = socket_keepalive - self._socket_keepalive_options = socket_keepalive_options - - # FeatureDetection API - def _server_version(self): - return bytes_to_str(self.get_server_info()['server_version']) - - def ping(self): - """ - Ping the remote server - """ - - msg_code, msg = self._request(riak.pb.messages.MSG_CODE_PING_REQ) - if msg_code == riak.pb.messages.MSG_CODE_PING_RESP: - return True - else: - return False - - def get_server_info(self): - """ - Get information about the server - """ - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, - expect=riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP) - return {'node': bytes_to_str(resp.node), - 'server_version': bytes_to_str(resp.server_version)} - - def _get_client_id(self): - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_CLIENT_ID_REQ, - expect=riak.pb.messages.MSG_CODE_GET_CLIENT_ID_RESP) - return bytes_to_str(resp.client_id) - - def _set_client_id(self, client_id): - req = riak.pb.riak_kv_pb2.RpbSetClientIdReq() - req.client_id = str_to_bytes(client_id) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_CLIENT_ID_REQ, req, - riak.pb.messages.MSG_CODE_SET_CLIENT_ID_RESP) - - self._client_id = client_id - - client_id = property(_get_client_id, _set_client_id, - doc="""the client ID for this connection""") - - def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, - notfound_ok=None): - """ - Serialize get request and deserialize response - """ - bucket = robj.bucket - - req = riak.pb.riak_kv_pb2.RpbGetReq() - if r: - req.r = self._encode_quorum(r) - if self.quorum_controls(): - if pr: - req.pr = self._encode_quorum(pr) - if basic_quorum is not None: - req.basic_quorum = basic_quorum - if notfound_ok is not None: - req.notfound_ok = notfound_ok - if self.client_timeouts() and timeout: - req.timeout = timeout - if self.tombstone_vclocks(): - req.deletedvclock = True - - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - - req.key = str_to_bytes(robj.key) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_REQ, req, - riak.pb.messages.MSG_CODE_GET_RESP) - - if resp is not None: - if resp.HasField('vclock'): - robj.vclock = VClock(resp.vclock, 'binary') - # We should do this even if there are no contents, i.e. - # the object is tombstoned - self._decode_contents(resp.content, robj) - else: - # "not found" returns an empty message, - # so let's make sure to clear the siblings - robj.siblings = [] - - return robj - - def put(self, robj, w=None, dw=None, pw=None, return_body=True, - if_none_match=False, timeout=None): - bucket = robj.bucket - - req = riak.pb.riak_kv_pb2.RpbPutReq() - if w: - req.w = self._encode_quorum(w) - if dw: - req.dw = self._encode_quorum(dw) - if self.quorum_controls() and pw: - req.pw = self._encode_quorum(pw) - - if return_body: - req.return_body = 1 - if if_none_match: - req.if_none_match = 1 - if self.client_timeouts() and timeout: - req.timeout = timeout - - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - - if robj.key: - req.key = str_to_bytes(robj.key) - if robj.vclock: - req.vclock = robj.vclock.encode('binary') - - self._encode_content(robj, req.content) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_PUT_REQ, req, - riak.pb.messages.MSG_CODE_PUT_RESP) - - if resp is not None: - if resp.HasField('key'): - robj.key = bytes_to_str(resp.key) - if resp.HasField("vclock"): - robj.vclock = VClock(resp.vclock, 'binary') - if resp.content: - self._decode_contents(resp.content, robj) - elif not robj.key: - raise RiakError("missing response object") - - return robj - - def ts_describe(self, table): - query = 'DESCRIBE {table}'.format(table=table.name) - return self.ts_query(table, query) - - def ts_get(self, table, key): - req = riak.pb.riak_ts_pb2.TsGetReq() - self._encode_timeseries_keyreq(table, key, req) - - msg_code, ts_get_resp = self._request( - riak.pb.messages.MSG_CODE_TS_GET_REQ, req, - riak.pb.messages.MSG_CODE_TS_GET_RESP) - - tsobj = TsObject(self._client, table, [], None) - self._decode_timeseries(ts_get_resp, tsobj) - return tsobj - - def ts_put(self, tsobj): - req = riak.pb.riak_ts_pb2.TsPutReq() - self._encode_timeseries_put(tsobj, req) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_TS_PUT_REQ, req, - riak.pb.messages.MSG_CODE_TS_PUT_RESP) - - if resp is not None: - return True - else: - raise RiakError("missing response object") - - def ts_delete(self, table, key): - req = riak.pb.riak_ts_pb2.TsDelReq() - self._encode_timeseries_keyreq(table, key, req) - - msg_code, ts_del_resp = self._request( - riak.pb.messages.MSG_CODE_TS_DEL_REQ, req, - riak.pb.messages.MSG_CODE_TS_DEL_RESP) - - if ts_del_resp is not None: - return True - else: - raise RiakError("missing response object") - - def ts_query(self, table, query, interpolations=None): - req = riak.pb.riak_ts_pb2.TsQueryReq() - - q = query - if '{table}' in q: - q = q.format(table=table.name) - - req.query.base = str_to_bytes(q) - - msg_code, ts_query_resp = self._request( - riak.pb.messages.MSG_CODE_TS_QUERY_REQ, req, - riak.pb.messages.MSG_CODE_TS_QUERY_RESP) - - tsobj = TsObject(self._client, table, [], []) - self._decode_timeseries(ts_query_resp, tsobj) - return tsobj - - def ts_stream_keys(self, table, timeout=None): - """ - Streams keys from a timeseries table, returning an iterator that - yields lists of keys. - """ - req = riak.pb.riak_ts_pb2.TsListKeysReq() - t = None - if self.client_timeouts() and timeout: - t = timeout - self._encode_timeseries_listkeysreq(table, req, t) - - self._send_msg(riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ, req) - - return RiakPbcTsKeyStream(self) - - def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, - timeout=None): - req = riak.pb.riak_kv_pb2.RpbDelReq() - if rw: - req.rw = self._encode_quorum(rw) - if r: - req.r = self._encode_quorum(r) - if w: - req.w = self._encode_quorum(w) - if dw: - req.dw = self._encode_quorum(dw) - - if self.quorum_controls(): - if pr: - req.pr = self._encode_quorum(pr) - if pw: - req.pw = self._encode_quorum(pw) - - if self.client_timeouts() and timeout: - req.timeout = timeout - - use_vclocks = (self.tombstone_vclocks() and - hasattr(robj, 'vclock') and robj.vclock) - if use_vclocks: - req.vclock = robj.vclock.encode('binary') - - bucket = robj.bucket - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - req.key = str_to_bytes(robj.key) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DEL_REQ, req, - riak.pb.messages.MSG_CODE_DEL_RESP) - return self - - def get_keys(self, bucket, timeout=None): - """ - Lists all keys within a bucket. - """ - keys = [] - for keylist in self.stream_keys(bucket, timeout=timeout): - for key in keylist: - keys.append(bytes_to_str(key)) - - return keys - - def stream_keys(self, bucket, timeout=None): - """ - Streams keys from a bucket, returning an iterator that yields - lists of keys. - """ - req = riak.pb.riak_kv_pb2.RpbListKeysReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - if self.client_timeouts() and timeout: - req.timeout = timeout - - self._send_msg(riak.pb.messages.MSG_CODE_LIST_KEYS_REQ, req) - - return RiakPbcKeyStream(self) - - def get_buckets(self, bucket_type=None, timeout=None): - """ - Serialize bucket listing request and deserialize response - """ - req = riak.pb.riak_kv_pb2.RpbListBucketsReq() - self._add_bucket_type(req, bucket_type) - - if self.client_timeouts() and timeout: - req.timeout = timeout - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req, - riak.pb.messages.MSG_CODE_LIST_BUCKETS_RESP) - return resp.buckets - - def stream_buckets(self, bucket_type=None, timeout=None): - """ - Stream list of buckets through an iterator - """ - - if not self.bucket_stream(): - raise NotImplementedError('Streaming list-buckets is not ' - 'supported') - - req = riak.pb.riak_kv_pb2.RpbListBucketsReq() - req.stream = True - self._add_bucket_type(req, bucket_type) - # Bucket streaming landed in the same release as timeouts, so - # we don't need to check the capability. - if timeout: - req.timeout = timeout - - self._send_msg(riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ, req) - - return RiakPbcBucketStream(self) - - def get_bucket_props(self, bucket): - """ - Serialize bucket property request and deserialize response - """ - req = riak.pb.riak_pb2.RpbGetBucketReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_REQ, req, - riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) - - return self._decode_bucket_props(resp.props) - - def set_bucket_props(self, bucket, props): - """ - Serialize set bucket property request and deserialize response - """ - req = riak.pb.riak_pb2.RpbSetBucketReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - - if not self.pb_all_bucket_props(): - for key in props: - if key not in ('n_val', 'allow_mult'): - raise NotImplementedError('Server only supports n_val and ' - 'allow_mult properties over PBC') - - self._encode_bucket_props(props, req) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_BUCKET_REQ, req, - riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) - return True - - def clear_bucket_props(self, bucket): - """ - Clear bucket properties, resetting them to their defaults - """ - if not self.pb_clear_bucket_props(): - return False - - req = riak.pb.riak_pb2.RpbResetBucketReq() - req.bucket = str_to_bytes(bucket.name) - self._add_bucket_type(req, bucket.bucket_type) - self._request( - riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ, req, - riak.pb.messages.MSG_CODE_RESET_BUCKET_RESP) - return True - - def get_bucket_type_props(self, bucket_type): - """ - Fetch bucket-type properties - """ - self._check_bucket_types(bucket_type) - - req = riak.pb.riak_pb2.RpbGetBucketTypeReq() - req.type = str_to_bytes(bucket_type.name) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ, req, - riak.pb.messages.MSG_CODE_GET_BUCKET_RESP) - - return self._decode_bucket_props(resp.props) - - def set_bucket_type_props(self, bucket_type, props): - """ - Set bucket-type properties - """ - self._check_bucket_types(bucket_type) - - req = riak.pb.riak_pb2.RpbSetBucketTypeReq() - req.type = str_to_bytes(bucket_type.name) - - self._encode_bucket_props(props, req) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ, req, - riak.pb.messages.MSG_CODE_SET_BUCKET_RESP) - - return True - - def mapred(self, inputs, query, timeout=None): - # dictionary of phase results - each content should be an encoded array - # which is appended to the result for that phase. - result = {} - for phase, content in self.stream_mapred(inputs, query, timeout): - if phase in result: - result[phase] += content - else: - result[phase] = content - - # If a single result - return the same as the HTTP interface does - # otherwise return all the phase information - if not len(result): - return None - elif len(result) == 1: - return result[max(result.keys())] - else: - return result - - def stream_mapred(self, inputs, query, timeout=None): - # Construct the job, optionally set the timeout... - content = self._construct_mapred_json(inputs, query, timeout) - - req = riak.pb.riak_kv_pb2.RpbMapRedReq() - req.request = str_to_bytes(content) - req.content_type = str_to_bytes("application/json") - - self._send_msg(riak.pb.messages.MSG_CODE_MAP_RED_REQ, req) - - return RiakPbcMapredStream(self) - - def get_index(self, bucket, index, startkey, endkey=None, - return_terms=None, max_results=None, continuation=None, - timeout=None, term_regex=None): - if not self.pb_indexes(): - return self._get_index_mapred_emu(bucket, index, startkey, endkey) - - if term_regex and not self.index_term_regex(): - raise NotImplementedError("Secondary index term_regex is not " - "supported") - - req = self._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, continuation, - timeout, term_regex) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_INDEX_REQ, req, - riak.pb.messages.MSG_CODE_INDEX_RESP) - - if return_terms and resp.results: - results = [(decode_index_value(index, pair.key), - bytes_to_str(pair.value)) - for pair in resp.results] - else: - results = resp.keys[:] - if PY3: - results = [bytes_to_str(key) for key in resp.keys] - - if max_results is not None and resp.HasField('continuation'): - return (results, bytes_to_str(resp.continuation)) - else: - return (results, None) - - def stream_index(self, bucket, index, startkey, endkey=None, - return_terms=None, max_results=None, continuation=None, - timeout=None, term_regex=None): - if not self.stream_indexes(): - raise NotImplementedError("Secondary index streaming is not " - "supported") - - if term_regex and not self.index_term_regex(): - raise NotImplementedError("Secondary index term_regex is not " - "supported") - - req = self._encode_index_req(bucket, index, startkey, endkey, - return_terms, max_results, continuation, - timeout, term_regex) - req.stream = True - - self._send_msg(riak.pb.messages.MSG_CODE_INDEX_REQ, req) - - return RiakPbcIndexStream(self, index, return_terms) - - def create_search_index(self, index, schema=None, n_val=None, - timeout=None): - if not self.pb_search_admin(): - raise NotImplementedError("Search 2.0 administration is not " - "supported for this version") - index = str_to_bytes(index) - idx = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex(name=index) - if schema: - idx.schema = str_to_bytes(schema) - if n_val: - idx.n_val = n_val - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexPutReq(index=idx) - if timeout is not None: - req.timeout = timeout - - self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_PUT_REQ, req, - riak.pb.messages.MSG_CODE_PUT_RESP) - - return True - - def get_search_index(self, index): - if not self.pb_search_admin(): - raise NotImplementedError("Search 2.0 administration is not " - "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq( - name=str_to_bytes(index)) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) - if len(resp.index) > 0: - return self._decode_search_index(resp.index[0]) - else: - raise RiakError('notfound') - - def list_search_indexes(self): - if not self.pb_search_admin(): - raise NotImplementedError("Search 2.0 administration is not " - "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexGetReq() - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_REQ, req, - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_GET_RESP) - - return [self._decode_search_index(index) for index in resp.index] - - def delete_search_index(self, index): - if not self.pb_search_admin(): - raise NotImplementedError("Search 2.0 administration is not " - "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaIndexDeleteReq( - name=str_to_bytes(index)) - - self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_INDEX_DELETE_REQ, req, - riak.pb.messages.MSG_CODE_DEL_RESP) - - return True - - def create_search_schema(self, schema, content): - if not self.pb_search_admin(): - raise NotImplementedError("Search 2.0 administration is not " - "supported for this version") - scma = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchema( - name=str_to_bytes(schema), - content=str_to_bytes(content)) - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaPutReq( - schema=scma) - - self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_PUT_REQ, req, - riak.pb.messages.MSG_CODE_PUT_RESP) - - return True - - def get_search_schema(self, schema): - if not self.pb_search_admin(): - raise NotImplementedError("Search 2.0 administration is not " - "supported for this version") - req = riak.pb.riak_yokozuna_pb2.RpbYokozunaSchemaGetReq( - name=str_to_bytes(schema)) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_REQ, req, - riak.pb.messages.MSG_CODE_YOKOZUNA_SCHEMA_GET_RESP) - - result = {} - result['name'] = bytes_to_str(resp.schema.name) - result['content'] = bytes_to_str(resp.schema.content) - return result - - def search(self, index, query, **params): - if not self.pb_search(): - return self._search_mapred_emu(index, query) - - if PY2 and isinstance(query, unicode): # noqa - query = query.encode('utf8') - - req = riak.pb.riak_search_pb2.RpbSearchQueryReq( - index=str_to_bytes(index), - q=str_to_bytes(query)) - self._encode_search_query(req, params) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ, req, - riak.pb.messages.MSG_CODE_SEARCH_QUERY_RESP) - - result = {} - if resp.HasField('max_score'): - result['max_score'] = resp.max_score - if resp.HasField('num_found'): - result['num_found'] = resp.num_found - result['docs'] = [self._decode_search_doc(doc) for doc in resp.docs] - return result - - def get_counter(self, bucket, key, **params): - if not bucket.bucket_type.is_default(): - raise NotImplementedError("Counters are not " - "supported with bucket-types, " - "use datatypes instead.") - - if not self.counters(): - raise NotImplementedError("Counters are not supported") - - req = riak.pb.riak_kv_pb2.RpbCounterGetReq() - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - if params.get('r') is not None: - req.r = self._encode_quorum(params['r']) - if params.get('pr') is not None: - req.pr = self._encode_quorum(params['pr']) - if params.get('basic_quorum') is not None: - req.basic_quorum = params['basic_quorum'] - if params.get('notfound_ok') is not None: - req.notfound_ok = params['notfound_ok'] - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_COUNTER_GET_REQ, req, - riak.pb.messages.MSG_CODE_COUNTER_GET_RESP) - if resp.HasField('value'): - return resp.value - else: - return None - - def update_counter(self, bucket, key, value, **params): - if not bucket.bucket_type.is_default(): - raise NotImplementedError("Counters are not " - "supported with bucket-types, " - "use datatypes instead.") - - if not self.counters(): - raise NotImplementedError("Counters are not supported") - - req = riak.pb.riak_kv_pb2.RpbCounterUpdateReq() - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - req.amount = value - if params.get('w') is not None: - req.w = self._encode_quorum(params['w']) - if params.get('dw') is not None: - req.dw = self._encode_quorum(params['dw']) - if params.get('pw') is not None: - req.pw = self._encode_quorum(params['pw']) - if params.get('returnvalue') is not None: - req.returnvalue = params['returnvalue'] - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_COUNTER_UPDATE_REQ, req, - riak.pb.messages.MSG_CODE_COUNTER_UPDATE_RESP) - if resp.HasField('value'): - return resp.value - else: - return True - - def fetch_datatype(self, bucket, key, **options): - - if bucket.bucket_type.is_default(): - raise NotImplementedError("Datatypes cannot be used in the default" - " bucket-type.") - - if not self.datatypes(): - raise NotImplementedError("Datatypes are not supported.") - - req = riak.pb.riak_dt_pb2.DtFetchReq() - req.type = str_to_bytes(bucket.bucket_type.name) - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - self._encode_dt_options(req, options) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DT_FETCH_REQ, req, - riak.pb.messages.MSG_CODE_DT_FETCH_RESP) - - return self._decode_dt_fetch(resp) - - def update_datatype(self, datatype, **options): - - if datatype.bucket.bucket_type.is_default(): - raise NotImplementedError("Datatypes cannot be used in the default" - " bucket-type.") - - if not self.datatypes(): - raise NotImplementedError("Datatypes are not supported.") - - op = datatype.to_op() - type_name = datatype.type_name - if not op: - raise ValueError("No operation to send on datatype {!r}". - format(datatype)) - - req = riak.pb.riak_dt_pb2.DtUpdateReq() - req.bucket = str_to_bytes(datatype.bucket.name) - req.type = str_to_bytes(datatype.bucket.bucket_type.name) - - if datatype.key: - req.key = str_to_bytes(datatype.key) - if datatype._context: - req.context = datatype._context - - self._encode_dt_options(req, options) - - self._encode_dt_op(type_name, req, op) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_DT_UPDATE_REQ, req, - riak.pb.messages.MSG_CODE_DT_UPDATE_RESP) - if resp.HasField('key'): - datatype.key = resp.key[:] - if resp.HasField('context'): - datatype._context = resp.context[:] - - if options.get('return_body'): - datatype._set_value(self._decode_dt_value(type_name, resp)) - - return True - - def get_preflist(self, bucket, key): - """ - Get the preflist for a bucket/key - - :param bucket: Riak Bucket - :type bucket: :class:`~riak.bucket.RiakBucket` - :param key: Riak Key - :type key: string - :rtype: list of dicts - """ - req = riak.pb.riak_kv_pb2.RpbGetBucketKeyPreflistReq() - req.bucket = str_to_bytes(bucket.name) - req.key = str_to_bytes(key) - req.type = str_to_bytes(bucket.bucket_type.name) - - msg_code, resp = self._request( - riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ, req, - riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_RESP) - - return [self._decode_preflist(item) for item in resp.preflist] From 0c557aecbafb548ff2c1d40dfe4b07d0a334477f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 10:00:42 -0700 Subject: [PATCH 166/324] Raise BadResource when a recv_into call returns zero bytes. This indicates that the connection has been closed, and the operation should be re-tried. --- riak/client/transport.py | 4 ++-- riak/transports/pool.py | 9 ++++++++- riak/transports/tcp/__init__.py | 2 +- riak/transports/tcp/connection.py | 9 ++++++++- 4 files changed, 19 insertions(+), 5 deletions(-) diff --git a/riak/client/transport.py b/riak/client/transport.py index 6aca7f24..bb2aaef9 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -1,6 +1,6 @@ from contextlib import contextmanager from riak.transports.pool import BadResource -from riak.transports.tcp import is_retryable as is_pbc_retryable +from riak.transports.tcp import is_retryable as is_tcp_retryable from riak.transports.http import is_retryable as is_http_retryable import threading from six import PY2 @@ -162,7 +162,7 @@ def _is_retryable(error): :type error: Exception :rtype: boolean """ - return is_pbc_retryable(error) or is_http_retryable(error) + return is_tcp_retryable(error) or is_http_retryable(error) def retryable(fn, protocol=None): diff --git a/riak/transports/pool.py b/riak/transports/pool.py index d0a9ee7f..308e31d6 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -12,7 +12,14 @@ class BadResource(Exception): resource currently in-use is bad and should be removed from the pool. """ - pass + def __init__(self, value=None): + self.value = value + + def __str__(self): + if self.value is None: + return 'BadResource' + else: + return repr(self.value) class Resource(object): diff --git a/riak/transports/tcp/__init__.py b/riak/transports/tcp/__init__.py index 312f9194..2634af0a 100644 --- a/riak/transports/tcp/__init__.py +++ b/riak/transports/tcp/__init__.py @@ -42,7 +42,7 @@ def destroy_resource(self, tcp): def is_retryable(err): """ Determines if the given exception is something that is - network/socket-related and should thus cause the PBC connection to + network/socket-related and should thus cause the TCP connection to close and the operation retried on another node. :rtype: boolean diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index a8ca4e4d..4808e20b 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -7,6 +7,7 @@ from riak import RiakError from riak.codecs.pbuf import PbufCodec from riak.security import SecurityError, USE_STDLIB_SSL +from riak.transports.pool import BadResource if not USE_STDLIB_SSL: from OpenSSL.SSL import Connection @@ -174,6 +175,10 @@ def _recv(self, msglen): toread = msglen while toread: nbytes = self._socket.recv_into(view, toread) + # https://docs.python.org/2/howto/sockets.html#using-a-socket + # https://github.com/basho/riak-python-client/issues/399 + if nbytes == 0: + raise BadResource('recv_into returned zero bytes unexpectedly') view = view[nbytes:] # slicing views is cheap toread -= nbytes nread += nbytes @@ -190,7 +195,8 @@ def _connect(self): else: self._socket = socket.create_connection(self._address) if self._socket_keepalive: - self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) + self._socket.setsockopt( + socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) ka_opts = self._socket_keepalive_options or {} for k, v in ka_opts.iteritems(): self._socket.setsockopt(socket.SOL_TCP, k, v) @@ -202,6 +208,7 @@ def close(self): Closes the underlying socket of the PB connection. """ if self._socket: + self._socket.shutdown(socket.SHUT_RDWR) self._socket.close() del self._socket From fe46c6d69f0d733e27a753196abdda7c95be2ee2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 11:35:16 -0700 Subject: [PATCH 167/324] Subclass Exception correctly --- riak/riak_error.py | 10 +++++++--- riak/transports/pool.py | 9 +-------- 2 files changed, 8 insertions(+), 11 deletions(-) diff --git a/riak/riak_error.py b/riak/riak_error.py index ce582bbb..b99eb7fe 100644 --- a/riak/riak_error.py +++ b/riak/riak_error.py @@ -21,8 +21,12 @@ class RiakError(Exception): """ Base class for exceptions generated in the Riak API. """ - def __init__(self, value): - self.value = value + def __init__(self, *args, **kwargs): + super(RiakError, self).__init__(*args, **kwargs) + if len(args) > 0: + self.value = args[0] + else: + self.value = 'unknown' def __str__(self): return repr(self.value) @@ -34,5 +38,5 @@ class ConflictError(RiakError): :class:`~riak.riak_object.RiakObject` that has more than one sibling. """ - def __init__(self, message="Object in conflict"): + def __init__(self, message='Object in conflict'): super(ConflictError, self).__init__(message) diff --git a/riak/transports/pool.py b/riak/transports/pool.py index 308e31d6..d0a9ee7f 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -12,14 +12,7 @@ class BadResource(Exception): resource currently in-use is bad and should be removed from the pool. """ - def __init__(self, value=None): - self.value = value - - def __str__(self): - if self.value is None: - return 'BadResource' - else: - return repr(self.value) + pass class Resource(object): From 866259f91d4a7112f9a4c41bb2375dd92132555a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 27 Apr 2016 15:45:17 -0700 Subject: [PATCH 168/324] Fixes in using socket.shutdown on older Python --- riak/transports/tcp/connection.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 4808e20b..2380a3d9 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,3 +1,4 @@ +import errno import socket import struct @@ -9,12 +10,12 @@ from riak.security import SecurityError, USE_STDLIB_SSL from riak.transports.pool import BadResource -if not USE_STDLIB_SSL: - from OpenSSL.SSL import Connection - from riak.transports.security import configure_pyopenssl_context -else: +if USE_STDLIB_SSL: import ssl from riak.transports.security import configure_ssl_context +else: + from OpenSSL.SSL import Connection + from riak.transports.security import configure_pyopenssl_context class TcpConnection(object): @@ -208,7 +209,16 @@ def close(self): Closes the underlying socket of the PB connection. """ if self._socket: - self._socket.shutdown(socket.SHUT_RDWR) + if USE_STDLIB_SSL: + # NB: Python 2.7.8 and earlier does not have a compatible + # shutdown() method due to the SSL lib + try: + self._socket.shutdown(socket.SHUT_RDWR) + except IOError as e: + # NB: sometimes this is the exception if the initial + # connection didn't succeed correctly + if e.errno != errno.EBADF: + raise self._socket.close() del self._socket From 30ef3e17003984b445a47158cba1455a0f58172c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 28 Apr 2016 08:19:03 -0700 Subject: [PATCH 169/324] prevent duplicate loading of test data --- riak/tests/test_btypes.py | 19 ++++++++++++------- riak/tests/test_kv.py | 25 ++++++++++++++++++------- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index d0fe728b..97d1b1a6 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -151,13 +151,18 @@ def test_multiget_bucket_types(self): self.assertEqual(btype, mobj.bucket.bucket_type) def test_write_once_bucket_type(self): - btype = self.client.bucket_type('write_once') - bucket = btype.bucket(self.bucket_name) - - for i in range(100): - obj = bucket.new(self.key_name + str(i)) - obj.data = {'id': i} - obj.store() + bt = 'write_once' + skey = 'write_once-init' + btype = self.client.bucket_type(bt) + bucket = btype.bucket(bt) + sobj = bucket.get(skey) + if not sobj.exists: + for i in range(100): + o = bucket.new(self.key_name + str(i)) + o.data = {'id': i} + o.store() + o = bucket.new(skey, data={'id': skey}) + o.store() mget = bucket.multiget([self.key_name + str(i) for i in range(100)]) for mobj in mget: diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 5513c603..aeebed68 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -180,17 +180,29 @@ def test_string_bucket_name(self): def test_generate_key(self): # Ensure that Riak generates a random key when # the key passed to bucket.new() is None. - bucket = self.client.bucket('random_key_bucket') - existing_keys = bucket.get_keys() + bucket = self.client.bucket(self.bucket_name) o = bucket.new(None, data={}) self.assertIsNone(o.key) o.store() self.assertIsNotNone(o.key) self.assertNotIn('/', o.key) - self.assertNotIn(o.key, existing_keys) - self.assertEqual(len(bucket.get_keys()), len(existing_keys) + 1) + existing_keys = bucket.get_keys() + self.assertEqual(len(existing_keys), 1) + + def maybe_store_keys(self): + skey = 'rkb-init' + bucket = self.client.bucket('random_key_bucket') + sobj = bucket.get(skey) + if sobj.exists: + return + for key in range(1, 1000): + o = bucket.new(None, data={}) + o.store() + o = bucket.new(skey, data={}) + o.store() def test_stream_keys(self): + self.maybe_store_keys() bucket = self.client.bucket('random_key_bucket') regular_keys = bucket.get_keys() self.assertNotEqual(len(regular_keys), 0) @@ -203,10 +215,8 @@ def test_stream_keys(self): self.assertEqual(sorted(regular_keys), sorted(streamed_keys)) def test_stream_keys_timeout(self): + self.maybe_store_keys() bucket = self.client.bucket('random_key_bucket') - for key in range(1, 1000): - o = bucket.new(None, data={}) - o.store() streamed_keys = [] with self.assertRaises(RiakError): for keylist in self.client.stream_keys(bucket, timeout=1): @@ -216,6 +226,7 @@ def test_stream_keys_timeout(self): streamed_keys += keylist def test_stream_keys_abort(self): + self.maybe_store_keys() bucket = self.client.bucket('random_key_bucket') regular_keys = bucket.get_keys() self.assertNotEqual(len(regular_keys), 0) From 4bfc758431f31217ab9f8d1bfe77ab0f4508486e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 28 Apr 2016 10:39:06 -0700 Subject: [PATCH 170/324] Tweak code to set socket keepalives and options, and add test --- riak/tests/test_client.py | 12 ++++++++++++ riak/transports/tcp/connection.py | 7 ++++--- riak/transports/tcp/transport.py | 11 ++++++----- 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index 19379d06..18f3d558 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -3,6 +3,7 @@ from six import PY2 from threading import Thread from riak.riak_object import RiakObject +from riak.transports.tcp import TcpTransport from riak.tests import DUMMY_HTTP_PORT, DUMMY_PB_PORT, RUN_POOL from riak.tests.base import IntegrationTestBase @@ -13,6 +14,17 @@ class ClientTests(IntegrationTestBase, unittest.TestCase): + def test_can_set_tcp_keepalive(self): + if self.protocol == 'pbc': + topts = {'socket_keepalive': True} + c = self.create_client(transport_options=topts) + for i, r in enumerate(c._tcp_pool.resources): + self.assertIsInstance(r, TcpTransport) + self.assertTrue(r._socket_keepalive) + c.close() + else: + pass + def test_uses_client_id_if_given(self): if self.protocol == 'pbc': zero_client_id = "\0\0\0\0" diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 2380a3d9..10adf191 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -195,12 +195,13 @@ def _connect(self): self._timeout) else: self._socket = socket.create_connection(self._address) + if self._socket_tcp_options: + ka_opts = self._socket_tcp_options + for k, v in ka_opts.iteritems(): + self._socket.setsockopt(socket.SOL_TCP, k, v) if self._socket_keepalive: self._socket.setsockopt( socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) - ka_opts = self._socket_keepalive_options or {} - for k, v in ka_opts.iteritems(): - self._socket.setsockopt(socket.SOL_TCP, k, v) if self._client._credentials: self._init_security() diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 46fa1c8a..7f440d7c 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -25,8 +25,6 @@ def __init__(self, node=None, client=None, timeout=None, - socket_keepalive=False, - socket_keepalive_options=None, **kwargs): super(TcpTransport, self).__init__() @@ -35,11 +33,14 @@ def __init__(self, self._address = (node.host, node.pb_port) self._timeout = timeout self._socket = None - self._socket_keepalive = socket_keepalive - self._socket_keepalive_options = socket_keepalive_options self._pbuf_c = None self._ttb_c = None - self._use_ttb = kwargs.get('use_ttb', True) + self._socket_tcp_options = \ + kwargs.get('socket_tcp_options', {}) + self._socket_keepalive = \ + kwargs.get('socket_keepalive', False) + self._use_ttb = \ + kwargs.get('use_ttb', True) def _get_pbuf_codec(self): if not self._pbuf_c: From 23b19294fb2593739558d4bb2ad7d03616be7bd1 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 28 Apr 2016 10:41:23 -0700 Subject: [PATCH 171/324] Fix instance variable declaration --- riak/transports/tcp/connection.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 10adf191..aabcb52e 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -19,6 +19,12 @@ class TcpConnection(object): + # These are set in the TcpTransport initializer + _address = None + _timeout = None + _socket_keepalive = None + _socket_tcp_options = None + """ Connection-related methods for TcpTransport. """ @@ -222,9 +228,3 @@ def close(self): raise self._socket.close() del self._socket - - # These are set in the TcpTransport initializer - _address = None - _timeout = None - _socket_keepalive = None - _socket_keepalive_options = None From 001865c138ef773c6ff13a35992019b25c6c6344 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 29 Apr 2016 12:51:41 -0700 Subject: [PATCH 172/324] Improve unix timestamp -> datetime conversion, update to riak_pb 2.1.3.0 --- riak/codecs/ttb.py | 23 +++---- riak/pb/messages.py | 8 +-- riak/pb/riak_pb2.py | 72 +------------------- riak/pb/riak_ts_pb2.py | 109 ++++++++---------------------- riak/tests/__init__.py | 8 +++ riak/tests/base.py | 37 +++------- riak/tests/test_timeseries_ttb.py | 2 +- riak/tests/test_util.py | 65 +++++++++++++++++- riak/transports/tcp/transport.py | 17 ++--- riak/transports/transport.py | 4 +- riak/util.py | 15 ++-- riak_pb | 2 +- 12 files changed, 149 insertions(+), 213 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 6bb81f0d..e8358326 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -6,6 +6,7 @@ from riak import RiakError from riak.codecs import Codec, Msg +from riak.pb.messages import MSG_CODE_TS_TTB_MSG from riak.ts_object import TsColumns from riak.util import bytes_to_str, unix_time_millis, \ datetime_from_unix_time_millis @@ -23,9 +24,6 @@ tsdelreq_a = Atom('tsdelreq') timestamp_a = Atom('timestamp') -# TODO RTS-842 -MSG_CODE_TS_TTB = 104 - class TtbCodec(Codec): ''' @@ -36,7 +34,7 @@ def __init__(self, **unused_args): super(TtbCodec, self).__init__(**unused_args) def parse_msg(self, msg_code, data): - if msg_code != MSG_CODE_TS_TTB: + if msg_code != MSG_CODE_TS_TTB_MSG: raise RiakError("TTB can't parse code: {}".format(msg_code)) if len(data) > 0: decoded = decode(data) @@ -61,6 +59,7 @@ def encode_to_ts_cell(self, cell): else: if isinstance(cell, datetime.datetime): ts = unix_time_millis(cell) + # logging.debug('encoded datetime %s as %s', cell, ts) return ts elif isinstance(cell, bool): return cell @@ -84,19 +83,19 @@ def encode_timeseries_keyreq(self, table, key, is_delete=False): else: raise ValueError("key must be a list") - mc = MSG_CODE_TS_TTB - rc = MSG_CODE_TS_TTB + mc = MSG_CODE_TS_TTB_MSG + rc = MSG_CODE_TS_TTB_MSG req_atom = tsgetreq_a if is_delete: req_atom = tsdelreq_a - # TODO RTS-842 timeout is last + # TODO FUTURE add timeout as last param req = req_atom, table.name, \ [self.encode_to_ts_cell(k) for k in key_vals], udef_a return Msg(mc, encode(req), rc) def validate_timeseries_put_resp(self, resp_code, resp): - if resp is None and resp_code == MSG_CODE_TS_TTB: + if resp is None and resp_code == MSG_CODE_TS_TTB_MSG: return True if resp is not None: return True @@ -123,8 +122,8 @@ def encode_timeseries_put(self, tsobj): req_r.append(self.encode_to_ts_cell(cell)) req_rows.append(tuple(req_r)) req = tsputreq_a, tsobj.table.name, [], req_rows - mc = MSG_CODE_TS_TTB - rc = MSG_CODE_TS_TTB + mc = MSG_CODE_TS_TTB_MSG + rc = MSG_CODE_TS_TTB_MSG return Msg(mc, encode(req), rc) else: raise RiakError("TsObject requires a list of rows") @@ -135,8 +134,8 @@ def encode_timeseries_query(self, table, query, interpolations=None): q = q.format(table=table.name) tsi = tsinterpolation_a, q, [] req = tsqueryreq_a, tsi, False, [] - mc = MSG_CODE_TS_TTB - rc = MSG_CODE_TS_TTB + mc = MSG_CODE_TS_TTB_MSG + rc = MSG_CODE_TS_TTB_MSG return Msg(mc, encode(req), rc) def decode_timeseries(self, resp_ttb, tsobj): diff --git a/riak/pb/messages.py b/riak/pb/messages.py index 9bbf284a..76c25e82 100644 --- a/riak/pb/messages.py +++ b/riak/pb/messages.py @@ -81,9 +81,7 @@ MSG_CODE_TS_COVERAGE_RESP = 101 MSG_CODE_TS_COVERAGE_ENTRY = 102 MSG_CODE_TS_RANGE = 103 -MSG_CODE_TS_TTB_PUT_REQ = 104 -MSG_CODE_TOGGLE_ENCODING_REQ = 110 -MSG_CODE_TOGGLE_ENCODING_RESP = 111 +MSG_CODE_TS_TTB_MSG = 104 MSG_CODE_AUTH_REQ = 253 MSG_CODE_AUTH_RESP = 254 MSG_CODE_START_TLS = 255 @@ -168,9 +166,7 @@ MSG_CODE_TS_COVERAGE_RESP: riak.pb.riak_ts_pb2.TsCoverageResp, MSG_CODE_TS_COVERAGE_ENTRY: riak.pb.riak_ts_pb2.TsCoverageEntry, MSG_CODE_TS_RANGE: riak.pb.riak_ts_pb2.TsRange, - MSG_CODE_TS_TTB_PUT_REQ: riak.pb.riak_ts_pb2.TsTtbPutReq, - MSG_CODE_TOGGLE_ENCODING_REQ: riak.pb.riak_pb2.RpbToggleEncodingReq, - MSG_CODE_TOGGLE_ENCODING_RESP: riak.pb.riak_pb2.RpbToggleEncodingResp, + MSG_CODE_TS_TTB_MSG: None, MSG_CODE_AUTH_REQ: riak.pb.riak_pb2.RpbAuthReq, MSG_CODE_AUTH_RESP: None, MSG_CODE_START_TLS: None diff --git a/riak/pb/riak_pb2.py b/riak/pb/riak_pb2.py index d55a142c..a757940a 100644 --- a/riak/pb/riak_pb2.py +++ b/riak/pb/riak_pb2.py @@ -14,7 +14,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak.proto', package='', - serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\"*\n\x14RpbToggleEncodingReq\x12\x12\n\nuse_native\x18\x01 \x02(\x08\"+\n\x15RpbToggleEncodingResp\x12\x12\n\nuse_native\x18\x01 \x02(\x08\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') + serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') @@ -678,62 +678,6 @@ serialized_end=1344, ) - -_RPBTOGGLEENCODINGREQ = _descriptor.Descriptor( - name='RpbToggleEncodingReq', - full_name='RpbToggleEncodingReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='use_native', full_name='RpbToggleEncodingReq.use_native', index=0, - number=1, type=8, cpp_type=7, label=2, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1346, - serialized_end=1388, -) - - -_RPBTOGGLEENCODINGRESP = _descriptor.Descriptor( - name='RpbToggleEncodingResp', - full_name='RpbToggleEncodingResp', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='use_native', full_name='RpbToggleEncodingResp.use_native', index=0, - number=1, type=8, cpp_type=7, label=2, - has_default_value=False, default_value=False, - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=1390, - serialized_end=1433, -) - _RPBGETBUCKETRESP.fields_by_name['props'].message_type = _RPBBUCKETPROPS _RPBSETBUCKETREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS _RPBSETBUCKETTYPEREQ.fields_by_name['props'].message_type = _RPBBUCKETPROPS @@ -757,8 +701,6 @@ DESCRIPTOR.message_types_by_name['RpbCommitHook'] = _RPBCOMMITHOOK DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ -DESCRIPTOR.message_types_by_name['RpbToggleEncodingReq'] = _RPBTOGGLEENCODINGREQ -DESCRIPTOR.message_types_by_name['RpbToggleEncodingResp'] = _RPBTOGGLEENCODINGRESP @add_metaclass(_reflection.GeneratedProtocolMessageType) class RpbErrorResp(_message.Message): @@ -838,18 +780,6 @@ class RpbAuthReq(_message.Message): # @@protoc_insertion_point(class_scope:RpbAuthReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbToggleEncodingReq(_message.Message): - DESCRIPTOR = _RPBTOGGLEENCODINGREQ - - # @@protoc_insertion_point(class_scope:RpbToggleEncodingReq) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbToggleEncodingResp(_message.Message): - DESCRIPTOR = _RPBTOGGLEENCODINGRESP - - # @@protoc_insertion_point(class_scope:RpbToggleEncodingResp) - DESCRIPTOR.has_options = True DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\006RiakPB') diff --git a/riak/pb/riak_ts_pb2.py b/riak/pb/riak_ts_pb2.py index ce9b250f..6e2ee149 100644 --- a/riak/pb/riak_ts_pb2.py +++ b/riak/pb/riak_ts_pb2.py @@ -16,7 +16,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_ts.proto', package='', - serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"Y\n\x0bTsTtbPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') + serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') _TSCOLUMNTYPE = _descriptor.EnumDescriptor( name='TsColumnType', @@ -47,8 +47,8 @@ ], containing_type=None, options=None, - serialized_start=1450, - serialized_end=1529, + serialized_start=1359, + serialized_end=1438, ) TsColumnType = enum_type_wrapper.EnumTypeWrapper(_TSCOLUMNTYPE) @@ -263,48 +263,6 @@ ) -_TSTTBPUTREQ = _descriptor.Descriptor( - name='TsTtbPutReq', - full_name='TsTtbPutReq', - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name='table', full_name='TsTtbPutReq.table', index=0, - number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='columns', full_name='TsTtbPutReq.columns', index=1, - number=2, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - _descriptor.FieldDescriptor( - name='rows', full_name='TsTtbPutReq.rows', index=2, - number=3, type=11, cpp_type=10, label=3, - has_default_value=False, default_value=[], - message_type=None, enum_type=None, containing_type=None, - is_extension=False, extension_scope=None, - options=None), - ], - extensions=[ - ], - nested_types=[], - enum_types=[ - ], - options=None, - is_extendable=False, - extension_ranges=[], - serialized_start=446, - serialized_end=535, -) - - _TSPUTRESP = _descriptor.Descriptor( name='TsPutResp', full_name='TsPutResp', @@ -321,8 +279,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=537, - serialized_end=548, + serialized_start=446, + serialized_end=457, ) @@ -370,8 +328,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=550, - serialized_end=630, + serialized_start=459, + serialized_end=539, ) @@ -391,8 +349,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=632, - serialized_end=643, + serialized_start=541, + serialized_end=552, ) @@ -426,8 +384,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=645, - serialized_end=710, + serialized_start=554, + serialized_end=619, ) @@ -461,8 +419,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=712, - serialized_end=776, + serialized_start=621, + serialized_end=685, ) @@ -489,8 +447,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=778, - serialized_end=809, + serialized_start=687, + serialized_end=718, ) @@ -545,8 +503,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=811, - serialized_end=934, + serialized_start=720, + serialized_end=843, ) @@ -580,8 +538,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=936, - serialized_end=983, + serialized_start=845, + serialized_end=892, ) @@ -615,8 +573,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=985, - serialized_end=1037, + serialized_start=894, + serialized_end=946, ) @@ -664,8 +622,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1039, - serialized_end=1152, + serialized_start=948, + serialized_end=1061, ) @@ -692,8 +650,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1154, - serialized_end=1205, + serialized_start=1063, + serialized_end=1114, ) @@ -741,8 +699,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1207, - serialized_end=1298, + serialized_start=1116, + serialized_end=1207, ) @@ -804,8 +762,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1301, - serialized_end=1448, + serialized_start=1210, + serialized_end=1357, ) _TSQUERYREQ.fields_by_name['query'].message_type = _TSINTERPOLATION @@ -816,8 +774,6 @@ _TSGETRESP.fields_by_name['rows'].message_type = _TSROW _TSPUTREQ.fields_by_name['columns'].message_type = _TSCOLUMNDESCRIPTION _TSPUTREQ.fields_by_name['rows'].message_type = _TSROW -_TSTTBPUTREQ.fields_by_name['columns'].message_type = _TSCOLUMNDESCRIPTION -_TSTTBPUTREQ.fields_by_name['rows'].message_type = _TSROW _TSDELREQ.fields_by_name['key'].message_type = _TSCELL _TSINTERPOLATION.fields_by_name['interpolations'].message_type = riak.pb.riak_pb2._RPBPAIR _TSCOLUMNDESCRIPTION.fields_by_name['type'].enum_type = _TSCOLUMNTYPE @@ -831,7 +787,6 @@ DESCRIPTOR.message_types_by_name['TsGetReq'] = _TSGETREQ DESCRIPTOR.message_types_by_name['TsGetResp'] = _TSGETRESP DESCRIPTOR.message_types_by_name['TsPutReq'] = _TSPUTREQ -DESCRIPTOR.message_types_by_name['TsTtbPutReq'] = _TSTTBPUTREQ DESCRIPTOR.message_types_by_name['TsPutResp'] = _TSPUTRESP DESCRIPTOR.message_types_by_name['TsDelReq'] = _TSDELREQ DESCRIPTOR.message_types_by_name['TsDelResp'] = _TSDELRESP @@ -876,12 +831,6 @@ class TsPutReq(_message.Message): # @@protoc_insertion_point(class_scope:TsPutReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsTtbPutReq(_message.Message): - DESCRIPTOR = _TSTTBPUTREQ - - # @@protoc_insertion_point(class_scope:TsTtbPutReq) - @add_metaclass(_reflection.GeneratedProtocolMessageType) class TsPutResp(_message.Message): DESCRIPTOR = _TSPUTRESP diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 0cf7d1d4..a3b82800 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -1,5 +1,7 @@ +import logging import os import socket +import sys from riak.test_server import TestServer from riak.security import SecurityCreds @@ -27,6 +29,12 @@ def hostname_resolves(hostname): except socket.error: return 0 +distutils_debug = os.environ.get('DISTUTILS_DEBUG', '0') +if distutils_debug == '1': + logger = logging.getLogger() + logger.level = logging.DEBUG + logger.addHandler(logging.StreamHandler(sys.stdout)) + HOST = os.environ.get('RIAK_TEST_HOST', '127.0.0.1') PROTOCOL = os.environ.get('RIAK_TEST_PROTOCOL', 'pbc') diff --git a/riak/tests/base.py b/riak/tests/base.py index 97b9607c..4e9f5122 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -45,16 +45,16 @@ def create_client(cls, host=None, http_port=None, pb_port=None, if hasattr(cls, 'client_options'): kwargs.update(cls.client_options) - if hasattr(cls, 'logging_enabled') and cls.logging_enabled: - cls.logger.debug("RiakClient(protocol='%s', host='%s', " + - "pb_port='%d', http_port='%d', " + - "credentials='%s', kwargs='%s')", - protocol, - host, - pb_port, - http_port, - credentials, - kwargs) + logger = logging.getLogger() + logger.debug("RiakClient(protocol='%s', host='%s', " + + "pb_port='%d', http_port='%d', " + + "credentials='%s', kwargs='%s')", + protocol, + host, + pb_port, + http_port, + credentials, + kwargs) return RiakClient(protocol=protocol, host=host, @@ -63,23 +63,6 @@ def create_client(cls, host=None, http_port=None, pb_port=None, pb_port=pb_port, **kwargs) - @classmethod - def setUpClass(cls): - cls.logging_enabled = False - distutils_debug = os.environ.get('DISTUTILS_DEBUG', '0') - if distutils_debug == '1': - cls.logging_enabled = True - cls.logger = logging.getLogger() - cls.logger.level = logging.DEBUG - cls.logging_stream_handler = logging.StreamHandler(sys.stdout) - cls.logger.addHandler(cls.logging_stream_handler) - - @classmethod - def tearDownClass(cls): - if hasattr(cls, 'logging_enabled') and cls.logging_enabled: - cls.logger.removeHandler(cls.logging_stream_handler) - cls.logging_enabled = False - def setUp(self): self.bucket_name = self.randname() self.key_name = self.randname() diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 339dd040..58b1322b 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -38,7 +38,7 @@ bd1 = six.u('временные ряды') fiveMins = datetime.timedelta(0, 300) -ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0) +ts0 = datetime.datetime(2015, 1, 1, 12, 1, 2, 987000) ts1 = ts0 + fiveMins diff --git a/riak/tests/test_util.py b/riak/tests/test_util.py index af704516..becfd06a 100644 --- a/riak/tests/test_util.py +++ b/riak/tests/test_util.py @@ -1,10 +1,67 @@ +import datetime import unittest -from riak.util import is_timeseries_supported +from riak.util import is_timeseries_supported, \ + datetime_from_unix_time_millis, \ + unix_time_millis class UtilUnitTests(unittest.TestCase): + # NB: + # 144379690 secs, 987 msecs past epoch + # 144379690987 total msecs past epoch + def test_conv_ms_timestamp_to_datetime_and_back(self): + if is_timeseries_supported(): + # this is what would be stored in Riak TS + v = 144379690987 + dt = datetime_from_unix_time_millis(v) + + # This is how Python represents the above + utp = 144379690.987000 + dtp = datetime.datetime.utcfromtimestamp(utp) + self.assertEqual(dt, dtp) + + utm = unix_time_millis(dt) + self.assertEqual(v, utm) + else: + pass + + def test_conv_datetime_to_unix_millis(self): + # This is the "native" Python unix timestamp including + # microseconds, as float. timedelta "total_seconds()" + # returns a value like this + if is_timeseries_supported(): + v = 144379690.987000 + d = datetime.datetime.utcfromtimestamp(v) + utm = unix_time_millis(d) + self.assertEqual(utm, 144379690987) + else: + pass + + def test_unix_millis_validation(self): + v = 144379690.987 + with self.assertRaises(ValueError): + datetime_from_unix_time_millis(v) + + def test_unix_millis_small_value(self): + if is_timeseries_supported(): + # this is what would be stored in Riak TS + v = 1001 + dt = datetime_from_unix_time_millis(v) + + # This is how Python represents the above + utp = 1.001 + dtp = datetime.datetime.utcfromtimestamp(utp) + self.assertEqual(dt, dtp) + + utm = unix_time_millis(dt) + self.assertEqual(v, utm) + else: + pass + def test_is_timeseries_supported(self): + v = (2, 7, 10) + self.assertEqual(True, is_timeseries_supported(v)) v = (2, 7, 11) self.assertEqual(True, is_timeseries_supported(v)) v = (2, 7, 12) @@ -13,3 +70,9 @@ def test_is_timeseries_supported(self): self.assertEqual(False, is_timeseries_supported(v)) v = (3, 4, 3) self.assertEqual(False, is_timeseries_supported(v)) + v = (3, 4, 4) + self.assertEqual(True, is_timeseries_supported(v)) + v = (3, 4, 5) + self.assertEqual(True, is_timeseries_supported(v)) + v = (3, 5, 1) + self.assertEqual(True, is_timeseries_supported(v)) diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 58420767..7d99dcd1 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -1,10 +1,12 @@ import six + import riak.pb.messages from riak import RiakError from riak.codecs import Codec, Msg from riak.codecs.pbuf import PbufCodec -from riak.codecs.ttb import TtbCodec, MSG_CODE_TS_TTB +from riak.codecs.ttb import TtbCodec +from riak.pb.messages import MSG_CODE_TS_TTB_MSG from riak.transports.transport import Transport from riak.ts_object import TsObject @@ -54,7 +56,7 @@ def _get_ttb_codec(self): return codec def _get_codec(self, msg_code): - if msg_code == MSG_CODE_TS_TTB: + if msg_code == MSG_CODE_TS_TTB_MSG: codec = self._get_ttb_codec() elif msg_code == riak.pb.messages.MSG_CODE_TS_GET_REQ: codec = self._get_ttb_codec() @@ -140,7 +142,7 @@ def ts_describe(self, table): return self.ts_query(table, query) def ts_get(self, table, key): - msg_code = MSG_CODE_TS_TTB + msg_code = MSG_CODE_TS_TTB_MSG codec = self._get_codec(msg_code) msg = codec.encode_timeseries_keyreq(table, key) resp_code, resp = self._request(msg, codec) @@ -149,7 +151,7 @@ def ts_get(self, table, key): return tsobj def ts_put(self, tsobj): - msg_code = MSG_CODE_TS_TTB + msg_code = MSG_CODE_TS_TTB_MSG codec = self._get_codec(msg_code) msg = codec.encode_timeseries_put(tsobj) resp_code, resp = self._request(msg, codec) @@ -327,7 +329,7 @@ def stream_mapred(self, inputs, query, timeout=None): def get_index(self, bucket, index, startkey, endkey=None, return_terms=None, max_results=None, continuation=None, timeout=None, term_regex=None): - # TODO RTS-842 NUKE THIS + # TODO FUTURE NUKE THIS MAPRED if not self.pb_indexes(): return self._get_index_mapred_emu(bucket, index, startkey, endkey) @@ -428,10 +430,9 @@ def get_search_schema(self, schema): return codec.decode_get_search_schema(resp) def search(self, index, query, **kwargs): - # TODO RTS-842 NUKE THIS + # TODO FUTURE NUKE THIS MAPRED if not self.pb_search(): return self._search_mapred_emu(index, query) - # TODO RTS-842 six.u() instead? if six.PY2 and isinstance(query, unicode): # noqa query = query.encode('utf8') msg_code = riak.pb.messages.MSG_CODE_SEARCH_QUERY_REQ @@ -527,7 +528,7 @@ def _request(self, msg, codec=None): resp_code, data = self._send_recv(msg_code, data) codec.maybe_riak_error(resp_code, data) codec.maybe_incorrect_code(resp_code, expect) - if resp_code == MSG_CODE_TS_TTB or \ + if resp_code == MSG_CODE_TS_TTB_MSG or \ resp_code in riak.pb.messages.MESSAGE_CLASSES: msg = codec.parse_msg(resp_code, data) else: diff --git a/riak/transports/transport.py b/riak/transports/transport.py index 6e5fee2c..bda18e35 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -295,7 +295,7 @@ def get_preflist(self, bucket, key): """ raise NotImplementedError - # TODO RTS-842 NUKE THIS + # TODO FUTURE NUKE THIS MAPRED def _search_mapred_emu(self, index, query): """ Emulates a search request via MapReduce. Used in the case @@ -321,7 +321,7 @@ def _search_mapred_emu(self, index, query): result['docs'].append({u'id': key}) return result - # TODO RTS-842 NUKE THIS + # TODO FUTURE NUKE THIS MAPRED def _get_index_mapred_emu(self, bucket, index, startkey, endkey=None): """ Emulates a secondary index request via MapReduce. Used in the diff --git a/riak/util.py b/riak/util.py index 4cbe6c0f..b9ca0e2b 100644 --- a/riak/util.py +++ b/riak/util.py @@ -1,28 +1,35 @@ from __future__ import print_function +import datetime +import decimal import sys import warnings from collections import Mapping from six import string_types, PY2 -import datetime epoch = datetime.datetime.utcfromtimestamp(0) def unix_time_millis(dt): td = dt - epoch - return int(td.total_seconds() * 1000.0) + tdms = ((td.days * 24 * 3600) + td.seconds) * 1000 + ms = td.microseconds // 1000 + return tdms + ms def datetime_from_unix_time_millis(ut): - return datetime.datetime.utcfromtimestamp(ut / 1000.0) + if isinstance(ut, float): + raise ValueError('unix timestamp must not be a float, \ + it must be total milliseconds since epoch as an integer') + utms = ut / 1000.0 + return datetime.datetime.utcfromtimestamp(utms) def is_timeseries_supported(v=None): if v is None: v = sys.version_info - return v < (3,) or v >= (3, 4, 4) + return v < (3,) or v[:3] >= (3, 4, 4) def quacks_like_dict(object): diff --git a/riak_pb b/riak_pb index 341269c1..d14b2c97 160000 --- a/riak_pb +++ b/riak_pb @@ -1 +1 @@ -Subproject commit 341269c19c75fa0557d5aa5fd5ac1f0dfe18cfae +Subproject commit d14b2c9758427f47106ef8064d39415b59076f72 From cabb3737815f8e8d20d801969fa3811a841ee3e9 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 29 Apr 2016 13:54:10 -0700 Subject: [PATCH 173/324] Add ts_convert_timestamp transport option --- riak/codecs/pbuf.py | 16 +++++++++++----- riak/codecs/ttb.py | 14 ++++++++++---- riak/tests/test_timeseries_pbuf.py | 5 +++-- riak/tests/test_timeseries_ttb.py | 8 +++++--- riak/transports/tcp/stream.py | 7 ++++++- riak/transports/tcp/transport.py | 10 +++++++--- 6 files changed, 42 insertions(+), 18 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index a976d1ff..db941a34 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -773,7 +773,8 @@ def encode_timeseries_query(self, table, query, interpolations=None): rc = riak.pb.messages.MSG_CODE_TS_QUERY_RESP return Msg(mc, req.SerializeToString(), rc) - def decode_timeseries(self, resp, tsobj): + def decode_timeseries(self, resp, tsobj, + convert_timestamp=False): """ Fills an TsObject with the appropriate data and metadata from a TsGetResp / TsQueryResp. @@ -783,6 +784,8 @@ def decode_timeseries(self, resp, tsobj): riak.pb.riak_ts_pb2.TsGetResp :param tsobj: a TsObject :type tsobj: TsObject + :param convert_timestamp: Convert timestamps to datetime objects + :type tsobj: boolean """ if resp.columns is not None: col_names = [] @@ -798,7 +801,7 @@ def decode_timeseries(self, resp, tsobj): for row in resp.rows: tsobj.rows.append( self.decode_timeseries_row( - row, resp.columns)) + row, resp.columns, convert_timestamp)) def decode_timeseries_col_type(self, col_type): # NB: these match the atom names for column types @@ -816,7 +819,8 @@ def decode_timeseries_col_type(self, col_type): msg = 'could not decode column type: {}'.format(col_type) raise RiakError(msg) - def decode_timeseries_row(self, tsrow, tscols=None): + def decode_timeseries_row(self, tsrow, tscols=None, + convert_timestamp=False): """ Decodes a TsRow into a list @@ -850,8 +854,10 @@ def decode_timeseries_row(self, tsrow, tscols=None): if col and col.type != TsColumnType.Value('TIMESTAMP'): raise TypeError('expected TIMESTAMP column') else: - dt = datetime_from_unix_time_millis( - cell.timestamp_value) + dt = cell.timestamp_value + if convert_timestamp: + dt = datetime_from_unix_time_millis( + cell.timestamp_value) row.append(dt) elif cell.HasField('boolean_value'): if col and col.type != TsColumnType.Value('BOOLEAN'): diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index e8358326..3a9b1144 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -138,7 +138,8 @@ def encode_timeseries_query(self, table, query, interpolations=None): rc = MSG_CODE_TS_TTB_MSG return Msg(mc, encode(req), rc) - def decode_timeseries(self, resp_ttb, tsobj): + def decode_timeseries(self, resp_ttb, tsobj, + convert_timestamp=False): """ Fills an TsObject with the appropriate data and metadata from a TTB-encoded TsGetResp / TsQueryResp. @@ -147,6 +148,8 @@ def decode_timeseries(self, resp_ttb, tsobj): :type resp_ttb: TTB-encoded tsqueryrsp or tsgetresp :param tsobj: a TsObject :type tsobj: TsObject + :param convert_timestamp: Convert timestamps to datetime objects + :type tsobj: boolean """ if resp_ttb is None: return tsobj @@ -169,7 +172,8 @@ def decode_timeseries(self, resp_ttb, tsobj): tsobj.rows = [] for resp_row in resp_rows: tsobj.rows.append( - self.decode_timeseries_row(resp_row, resp_coltypes)) + self.decode_timeseries_row(resp_row, resp_coltypes, + convert_timestamp)) else: raise RiakError( "Expected 3-tuple in response, got: {}".format(resp_data)) @@ -181,7 +185,7 @@ def decode_timeseries_cols(self, cnames, ctypes): ctypes = [str(ctype) for ctype in ctypes] return TsColumns(cnames, ctypes) - def decode_timeseries_row(self, tsrow, tsct): + def decode_timeseries_row(self, tsrow, tsct, convert_timestamp=False): """ Decodes a TTB-encoded TsRow into a list @@ -189,6 +193,8 @@ def decode_timeseries_row(self, tsrow, tsct): :type tsrow: TTB dncoded row :param tsct: the TTB decoded column types (atoms). :type tsct: list + :param convert_timestamp: Convert timestamps to datetime objects + :type tsobj: boolean :rtype list """ row = [] @@ -198,7 +204,7 @@ def decode_timeseries_row(self, tsrow, tsct): elif isinstance(cell, list) and len(cell) == 0: row.append(None) else: - if tsct[i] == timestamp_a: + if convert_timestamp and tsct[i] == timestamp_a: row.append(datetime_from_unix_time_millis(cell)) else: row.append(cell) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 50c0818c..8ef75470 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -161,7 +161,7 @@ def test_decode_data_from_query(self): tsobj = TsObject(None, self.table) c = PbufCodec() - c.decode_timeseries(tqr, tsobj) + c.decode_timeseries(tqr, tsobj, True) self.assertEqual(len(tsobj.rows), len(self.rows)) self.assertEqual(len(tsobj.columns.names), len(tqr.columns)) @@ -197,7 +197,8 @@ def test_decode_data_from_query(self): @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesPbufTests(IntegrationTestBase, unittest.TestCase): - client_options = {'transport_options': {'use_ttb': False}} + client_options = {'transport_options': + {'use_ttb': False, 'ts_convert_timestamp': True}} @classmethod def setUpClass(cls): diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 58b1322b..45c2289f 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -92,8 +92,9 @@ def test_decode_data_from_get(self): self.assertEqual(r[0], dr[0].encode('utf-8')) self.assertEqual(r[1], dr[1]) self.assertEqual(r[2], dr[2]) - dt = datetime_from_unix_time_millis(dr[3]) - self.assertEqual(r[3], dt) + # NB *not* decoding timestamps + # dt = datetime_from_unix_time_millis(dr[3]) + self.assertEqual(r[3], dr[3]) if i == 0: self.assertEqual(r[4], True) else: @@ -123,7 +124,8 @@ def test_encode_data_for_put(self): @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): - client_options = {'transport_options': {'use_ttb': True}} + client_options = {'transport_options': + {'use_ttb': True, 'ts_convert_timestamp': True}} @classmethod def setUpClass(cls): diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 3cf0e974..3ef29ce1 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -174,6 +174,10 @@ class PbufTsKeyStream(PbufStream, TtbCodec): _expect = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_RESP + def __init__(self, transport, codec, convert_timestamp=False): + super(PbufTsKeyStream, self).__init__(transport, codec) + self._convert_timestamp = convert_timestamp + def next(self): response = super(PbufTsKeyStream, self).next() @@ -182,7 +186,8 @@ def next(self): keys = [] for tsrow in response.keys: - keys.append(self.codec.decode_timeseries_row(tsrow)) + keys.append(self.codec.decode_timeseries_row(tsrow, + convert_timestamp=self._convert_timestamp)) return keys diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 7d99dcd1..dcb597f2 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -38,6 +38,8 @@ def __init__(self, self._pbuf_c = None self._ttb_c = None self._use_ttb = kwargs.get('use_ttb', True) + self._ts_convert_timestamp = \ + kwargs.get('ts_convert_timestamp', False) def _get_pbuf_codec(self): if not self._pbuf_c: @@ -147,7 +149,8 @@ def ts_get(self, table, key): msg = codec.encode_timeseries_keyreq(table, key) resp_code, resp = self._request(msg, codec) tsobj = TsObject(self._client, table) - codec.decode_timeseries(resp, tsobj) + codec.decode_timeseries(resp, tsobj, + self._ts_convert_timestamp) return tsobj def ts_put(self, tsobj): @@ -173,7 +176,8 @@ def ts_query(self, table, query, interpolations=None): msg = codec.encode_timeseries_query(table, query, interpolations) resp_code, resp = self._request(msg, codec) tsobj = TsObject(self._client, table) - codec.decode_timeseries(resp, tsobj) + codec.decode_timeseries(resp, tsobj, + self._ts_convert_timestamp) return tsobj def ts_stream_keys(self, table, timeout=None): @@ -185,7 +189,7 @@ def ts_stream_keys(self, table, timeout=None): codec = self._get_codec(msg_code) msg = codec.encode_timeseries_listkeysreq(table, timeout) self._send_msg(msg.msg_code, msg.data) - return PbufTsKeyStream(self, codec) + return PbufTsKeyStream(self, codec, self._ts_convert_timestamp) def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None, timeout=None): From 5e0a909ebdd5828667b57bc7b71f1eb6f4a0a259 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 30 Apr 2016 11:29:37 -0700 Subject: [PATCH 174/324] v2.5.0 RELNOTES --- RELNOTES.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/RELNOTES.md b/RELNOTES.md index b36303ea..7894dcba 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,5 +1,11 @@ # Riak Python Client Release Notes +## [2.5.0 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) + +* [Socket Enhancements](https://github.com/basho/riak-python-client/pull/453) - Resolves [#399](https://github.com/basho/riak-python-client/issues/399) +* [Add multi-put](https://github.com/basho/riak-python-client/pull/452) +* [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) + ## 2.4.2 Patch Release - 2016-02-20 * [Fix SSL host name](https://github.com/basho/riak-python-client/pull/436) From 568648275d4b7e13ad03cb335c21432bd94e4a04 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 07:22:46 -0700 Subject: [PATCH 175/324] make grouchy linter happy --- riak/codecs/pbuf.py | 4 ++-- riak/codecs/ttb.py | 4 ++-- riak/tests/base.py | 14 +++----------- riak/tests/test_timeseries_pbuf.py | 2 +- riak/tests/test_timeseries_ttb.py | 5 ++--- riak/transports/tcp/stream.py | 2 +- riak/transports/tcp/transport.py | 4 ++-- riak/util.py | 1 - 8 files changed, 13 insertions(+), 23 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index db941a34..73914352 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -774,7 +774,7 @@ def encode_timeseries_query(self, table, query, interpolations=None): return Msg(mc, req.SerializeToString(), rc) def decode_timeseries(self, resp, tsobj, - convert_timestamp=False): + convert_timestamp=False): """ Fills an TsObject with the appropriate data and metadata from a TsGetResp / TsQueryResp. @@ -820,7 +820,7 @@ def decode_timeseries_col_type(self, col_type): raise RiakError(msg) def decode_timeseries_row(self, tsrow, tscols=None, - convert_timestamp=False): + convert_timestamp=False): """ Decodes a TsRow into a list diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 3a9b1144..1c0b3bfe 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -139,7 +139,7 @@ def encode_timeseries_query(self, table, query, interpolations=None): return Msg(mc, encode(req), rc) def decode_timeseries(self, resp_ttb, tsobj, - convert_timestamp=False): + convert_timestamp=False): """ Fills an TsObject with the appropriate data and metadata from a TTB-encoded TsGetResp / TsQueryResp. @@ -173,7 +173,7 @@ def decode_timeseries(self, resp_ttb, tsobj, for resp_row in resp_rows: tsobj.rows.append( self.decode_timeseries_row(resp_row, resp_coltypes, - convert_timestamp)) + convert_timestamp)) else: raise RiakError( "Expected 3-tuple in response, got: {}".format(resp_data)) diff --git a/riak/tests/base.py b/riak/tests/base.py index 4e9f5122..b2891b54 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -1,8 +1,6 @@ # -*- coding: utf-8 -*- import logging -import os import random -import sys from riak.client import RiakClient from riak.tests import HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS @@ -46,15 +44,9 @@ def create_client(cls, host=None, http_port=None, pb_port=None, kwargs.update(cls.client_options) logger = logging.getLogger() - logger.debug("RiakClient(protocol='%s', host='%s', " + - "pb_port='%d', http_port='%d', " + - "credentials='%s', kwargs='%s')", - protocol, - host, - pb_port, - http_port, - credentials, - kwargs) + logger.debug("RiakClient(protocol='%s', host='%s', pb_port='%d', " + "http_port='%d', credentials='%s', kwargs='%s')", + protocol, host, pb_port, http_port, credentials, kwargs) return RiakClient(protocol=protocol, host=host, diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 8ef75470..72f13e0f 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -198,7 +198,7 @@ def test_decode_data_from_query(self): 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesPbufTests(IntegrationTestBase, unittest.TestCase): client_options = {'transport_options': - {'use_ttb': False, 'ts_convert_timestamp': True}} + {'use_ttb': False, 'ts_convert_timestamp': True}} @classmethod def setUpClass(cls): diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 45c2289f..45ba6faf 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -12,8 +12,7 @@ from riak.ts_object import TsObject from riak.codecs.ttb import TtbCodec from riak.util import str_to_bytes, \ - unix_time_millis, datetime_from_unix_time_millis, \ - is_timeseries_supported + unix_time_millis, is_timeseries_supported from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase @@ -125,7 +124,7 @@ def test_encode_data_for_put(self): 'Timeseries not supported or RUN_TIMESERIES is 0') class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): client_options = {'transport_options': - {'use_ttb': True, 'ts_convert_timestamp': True}} + {'use_ttb': True, 'ts_convert_timestamp': True}} @classmethod def setUpClass(cls): diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 3ef29ce1..1e913bda 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -187,7 +187,7 @@ def next(self): keys = [] for tsrow in response.keys: keys.append(self.codec.decode_timeseries_row(tsrow, - convert_timestamp=self._convert_timestamp)) + convert_timestamp=self._convert_timestamp)) return keys diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index d42d4dcc..466ac8d8 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -155,7 +155,7 @@ def ts_get(self, table, key): resp_code, resp = self._request(msg, codec) tsobj = TsObject(self._client, table) codec.decode_timeseries(resp, tsobj, - self._ts_convert_timestamp) + self._ts_convert_timestamp) return tsobj def ts_put(self, tsobj): @@ -182,7 +182,7 @@ def ts_query(self, table, query, interpolations=None): resp_code, resp = self._request(msg, codec) tsobj = TsObject(self._client, table) codec.decode_timeseries(resp, tsobj, - self._ts_convert_timestamp) + self._ts_convert_timestamp) return tsobj def ts_stream_keys(self, table, timeout=None): diff --git a/riak/util.py b/riak/util.py index b9ca0e2b..c422293e 100644 --- a/riak/util.py +++ b/riak/util.py @@ -1,7 +1,6 @@ from __future__ import print_function import datetime -import decimal import sys import warnings From 7b4b01aebf748aae4a59cb4b42ace508484d5885 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 08:59:23 -0700 Subject: [PATCH 176/324] clean up multi-line string --- riak/util.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/riak/util.py b/riak/util.py index c422293e..7c2ca66d 100644 --- a/riak/util.py +++ b/riak/util.py @@ -19,8 +19,9 @@ def unix_time_millis(dt): def datetime_from_unix_time_millis(ut): if isinstance(ut, float): - raise ValueError('unix timestamp must not be a float, \ - it must be total milliseconds since epoch as an integer') + raise ValueError('unix timestamp must not be a float, ' + 'it must be total milliseconds since ' + 'epoch as an integer') utms = ut / 1000.0 return datetime.datetime.utcfromtimestamp(utms) From d60ea31f2bb6f0c95f0a8324ee783f1150bb9257 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 09:12:23 -0700 Subject: [PATCH 177/324] Use buffering for HTTP headers. Resolves #277 --- riak/transports/http/connection.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/riak/transports/http/connection.py b/riak/transports/http/connection.py index d455813f..c6b3b9c5 100644 --- a/riak/transports/http/connection.py +++ b/riak/transports/http/connection.py @@ -31,7 +31,10 @@ def _request(self, method, uri, headers={}, body='', stream=False): try: self._connection.request(method, uri, body, headers) - response = self._connection.getresponse(buffering=True) + try: + response = self._connection.getresponse(buffering=True) + except TypeError: + response = self._connection.getresponse() if stream: # The caller is responsible for fully reading the From e5b675b2a11a2ef90735f8975eb43f8b4edc9aef Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 09:18:26 -0700 Subject: [PATCH 178/324] add contributor --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 529b7a20..8e7dca5f 100644 --- a/README.md +++ b/README.md @@ -157,6 +157,7 @@ Contributors * Daniel Lindsley * Daniel Néri * Daniel Reverri +* [David Basden](https://github.com/dbasden) * David Koblas * Dmitry Rozhkov * Eric Florenzano From 711428bbc1a4db1a83685e34bf7cb34bb5fe801a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 09:35:47 -0700 Subject: [PATCH 179/324] Port fix to new pbuf.py codec --- riak/codecs/pbuf.py | 2 +- riak/transports/pbc/codec.py | 773 ----------------------------------- 2 files changed, 1 insertion(+), 774 deletions(-) delete mode 100644 riak/transports/pbc/codec.py diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 73914352..1d96c831 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -322,7 +322,7 @@ def encode_bucket_props(self, props, msg): else: setattr(msg.props, prop, value) if 'repl' in props: - msg.props.repl = REPL_TO_PY[props['repl']] + msg.props.repl = REPL_TO_PB[props['repl']] return msg diff --git a/riak/transports/pbc/codec.py b/riak/transports/pbc/codec.py deleted file mode 100644 index 1727bd6b..00000000 --- a/riak/transports/pbc/codec.py +++ /dev/null @@ -1,773 +0,0 @@ -import datetime -import logging -import riak.pb -import riak.pb.riak_pb2 -import riak.pb.riak_dt_pb2 -import riak.pb.riak_kv_pb2 -import riak.pb.riak_ts_pb2 - -from riak import RiakError -from riak.content import RiakContent -from riak.util import decode_index_value, str_to_bytes, bytes_to_str -from riak.multidict import MultiDict -from riak.pb.riak_ts_pb2 import TsColumnType - -from six import string_types, PY2 - -epoch = datetime.datetime.utcfromtimestamp(0) - - -def _invert(d): - out = {} - for key in d: - value = d[key] - out[value] = key - return out - -REPL_TO_PY = { - riak.pb.riak_pb2.RpbBucketProps.FALSE: False, - riak.pb.riak_pb2.RpbBucketProps.TRUE: True, - riak.pb.riak_pb2.RpbBucketProps.REALTIME: 'realtime', - riak.pb.riak_pb2.RpbBucketProps.FULLSYNC: 'fullsync' -} - -REPL_TO_PB = _invert(REPL_TO_PY) - -RIAKC_RW_ONE = 4294967294 -RIAKC_RW_QUORUM = 4294967293 -RIAKC_RW_ALL = 4294967292 -RIAKC_RW_DEFAULT = 4294967291 - -QUORUM_TO_PB = {'default': RIAKC_RW_DEFAULT, - 'all': RIAKC_RW_ALL, - 'quorum': RIAKC_RW_QUORUM, - 'one': RIAKC_RW_ONE} - -QUORUM_TO_PY = _invert(QUORUM_TO_PB) - -NORMAL_PROPS = ['n_val', 'allow_mult', 'last_write_wins', 'old_vclock', - 'young_vclock', 'big_vclock', 'small_vclock', 'basic_quorum', - 'notfound_ok', 'search', 'backend', 'search_index', 'datatype', - 'write_once'] -COMMIT_HOOK_PROPS = ['precommit', 'postcommit'] -MODFUN_PROPS = ['chash_keyfun', 'linkfun'] -QUORUM_PROPS = ['r', 'pr', 'w', 'pw', 'dw', 'rw'] - -MAP_FIELD_TYPES = { - riak.pb.riak_dt_pb2.MapField.COUNTER: 'counter', - riak.pb.riak_dt_pb2.MapField.SET: 'set', - riak.pb.riak_dt_pb2.MapField.REGISTER: 'register', - riak.pb.riak_dt_pb2.MapField.FLAG: 'flag', - riak.pb.riak_dt_pb2.MapField.MAP: 'map', - 'counter': riak.pb.riak_dt_pb2.MapField.COUNTER, - 'set': riak.pb.riak_dt_pb2.MapField.SET, - 'register': riak.pb.riak_dt_pb2.MapField.REGISTER, - 'flag': riak.pb.riak_dt_pb2.MapField.FLAG, - 'map': riak.pb.riak_dt_pb2.MapField.MAP -} - -DT_FETCH_TYPES = { - riak.pb.riak_dt_pb2.DtFetchResp.COUNTER: 'counter', - riak.pb.riak_dt_pb2.DtFetchResp.SET: 'set', - riak.pb.riak_dt_pb2.DtFetchResp.MAP: 'map' -} - - -class RiakPbcCodec(object): - """ - Protobuffs Encoding and decoding methods for RiakPbcTransport. - """ - - def __init__(self, **unused_args): - if riak.pb is None: - raise NotImplementedError("this transport is not available") - super(RiakPbcCodec, self).__init__(**unused_args) - - def _unix_time_millis(self, dt): - td = dt - epoch - return int(td.total_seconds() * 1000.0) - - def _datetime_from_unix_time_millis(self, ut): - return datetime.datetime.utcfromtimestamp(ut / 1000.0) - - def _encode_quorum(self, rw): - """ - Converts a symbolic quorum value into its on-the-wire - equivalent. - - :param rw: the quorum - :type rw: string, integer - :rtype: integer - """ - if rw in QUORUM_TO_PB: - return QUORUM_TO_PB[rw] - elif type(rw) is int and rw >= 0: - return rw - else: - return None - - def _decode_quorum(self, rw): - """ - Converts a protobuf quorum value to a symbolic value if - necessary. - - :param rw: the quorum - :type rw: int - :rtype int or string - """ - if rw in QUORUM_TO_PY: - return QUORUM_TO_PY[rw] - else: - return rw - - def _decode_contents(self, contents, obj): - """ - Decodes the list of siblings from the protobuf representation - into the object. - - :param contents: a list of RpbContent messages - :type contents: list - :param obj: a RiakObject - :type obj: RiakObject - :rtype RiakObject - """ - obj.siblings = [self._decode_content(c, RiakContent(obj)) - for c in contents] - # Invoke sibling-resolution logic - if len(obj.siblings) > 1 and obj.resolver is not None: - obj.resolver(obj) - return obj - - def _decode_content(self, rpb_content, sibling): - """ - Decodes a single sibling from the protobuf representation into - a RiakObject. - - :param rpb_content: a single RpbContent message - :type rpb_content: riak.pb.riak_pb2.RpbContent - :param sibling: a RiakContent sibling container - :type sibling: RiakContent - :rtype: RiakContent - """ - - if rpb_content.HasField("deleted") and rpb_content.deleted: - sibling.exists = False - else: - sibling.exists = True - if rpb_content.HasField("content_type"): - sibling.content_type = bytes_to_str(rpb_content.content_type) - if rpb_content.HasField("charset"): - sibling.charset = bytes_to_str(rpb_content.charset) - if rpb_content.HasField("content_encoding"): - sibling.content_encoding = \ - bytes_to_str(rpb_content.content_encoding) - if rpb_content.HasField("vtag"): - sibling.etag = bytes_to_str(rpb_content.vtag) - - sibling.links = [self._decode_link(link) - for link in rpb_content.links] - if rpb_content.HasField("last_mod"): - sibling.last_modified = float(rpb_content.last_mod) - if rpb_content.HasField("last_mod_usecs"): - sibling.last_modified += rpb_content.last_mod_usecs / 1000000.0 - - sibling.usermeta = dict([(bytes_to_str(usermd.key), - bytes_to_str(usermd.value)) - for usermd in rpb_content.usermeta]) - sibling.indexes = set([(bytes_to_str(index.key), - decode_index_value(index.key, index.value)) - for index in rpb_content.indexes]) - sibling.encoded_data = rpb_content.value - - return sibling - - def _encode_content(self, robj, rpb_content): - """ - Fills an RpbContent message with the appropriate data and - metadata from a RiakObject. - - :param robj: a RiakObject - :type robj: RiakObject - :param rpb_content: the protobuf message to fill - :type rpb_content: riak.pb.riak_pb2.RpbContent - """ - if robj.content_type: - rpb_content.content_type = str_to_bytes(robj.content_type) - if robj.charset: - rpb_content.charset = str_to_bytes(robj.charset) - if robj.content_encoding: - rpb_content.content_encoding = str_to_bytes(robj.content_encoding) - for uk in robj.usermeta: - pair = rpb_content.usermeta.add() - pair.key = str_to_bytes(uk) - pair.value = str_to_bytes(robj.usermeta[uk]) - for link in robj.links: - pb_link = rpb_content.links.add() - try: - bucket, key, tag = link - except ValueError: - raise RiakError("Invalid link tuple %s" % link) - - pb_link.bucket = str_to_bytes(bucket) - pb_link.key = str_to_bytes(key) - if tag: - pb_link.tag = str_to_bytes(tag) - else: - pb_link.tag = str_to_bytes('') - - for field, value in robj.indexes: - pair = rpb_content.indexes.add() - pair.key = str_to_bytes(field) - pair.value = str_to_bytes(str(value)) - - # Python 2.x data is stored in a string - if PY2: - rpb_content.value = str(robj.encoded_data) - else: - rpb_content.value = robj.encoded_data - - def _decode_link(self, link): - """ - Decodes an RpbLink message into a tuple - - :param link: an RpbLink message - :type link: riak.pb.riak_pb2.RpbLink - :rtype tuple - """ - - if link.HasField("bucket"): - bucket = bytes_to_str(link.bucket) - else: - bucket = None - if link.HasField("key"): - key = bytes_to_str(link.key) - else: - key = None - if link.HasField("tag"): - tag = bytes_to_str(link.tag) - else: - tag = None - - return (bucket, key, tag) - - def _decode_index_value(self, index, value): - """ - Decodes a secondary index value into the correct Python type. - :param index: the name of the index - :type index: str - :param value: the value of the index entry - :type value: str - :rtype str or int - """ - if index.endswith("_int"): - return int(value) - else: - return bytes_to_str(value) - - def _encode_bucket_props(self, props, msg): - """ - Encodes a dict of bucket properties into the protobuf message. - - :param props: bucket properties - :type props: dict - :param msg: the protobuf message to fill - :type msg: riak.pb.riak_pb2.RpbSetBucketReq - """ - for prop in NORMAL_PROPS: - if prop in props and props[prop] is not None: - if isinstance(props[prop], string_types): - setattr(msg.props, prop, str_to_bytes(props[prop])) - else: - setattr(msg.props, prop, props[prop]) - for prop in COMMIT_HOOK_PROPS: - if prop in props: - setattr(msg.props, 'has_' + prop, True) - self._encode_hooklist(props[prop], getattr(msg.props, prop)) - for prop in MODFUN_PROPS: - if prop in props and props[prop] is not None: - self._encode_modfun(props[prop], getattr(msg.props, prop)) - for prop in QUORUM_PROPS: - if prop in props and props[prop] not in (None, 'default'): - value = self._encode_quorum(props[prop]) - if value is not None: - if isinstance(value, string_types): - setattr(msg.props, prop, str_to_bytes(value)) - else: - setattr(msg.props, prop, value) - if 'repl' in props: - msg.props.repl = REPL_TO_PB[props['repl']] - - return msg - - def _decode_bucket_props(self, msg): - """ - Decodes the protobuf bucket properties message into a dict. - - :param msg: the protobuf message to decode - :type msg: riak.pb.riak_pb2.RpbBucketProps - :rtype dict - """ - props = {} - - for prop in NORMAL_PROPS: - if msg.HasField(prop): - props[prop] = getattr(msg, prop) - if isinstance(props[prop], bytes): - props[prop] = bytes_to_str(props[prop]) - for prop in COMMIT_HOOK_PROPS: - if getattr(msg, 'has_' + prop): - props[prop] = self._decode_hooklist(getattr(msg, prop)) - for prop in MODFUN_PROPS: - if msg.HasField(prop): - props[prop] = self._decode_modfun(getattr(msg, prop)) - for prop in QUORUM_PROPS: - if msg.HasField(prop): - props[prop] = self._decode_quorum(getattr(msg, prop)) - if msg.HasField('repl'): - props['repl'] = REPL_TO_PY[msg.repl] - - return props - - def _decode_modfun(self, modfun): - """ - Decodes a protobuf modfun pair into a dict with 'mod' and - 'fun' keys. Used in bucket properties. - - :param modfun: the protobuf message to decode - :type modfun: riak.pb.riak_pb2.RpbModFun - :rtype dict - """ - return {'mod': bytes_to_str(modfun.module), - 'fun': bytes_to_str(modfun.function)} - - def _encode_modfun(self, props, msg=None): - """ - Encodes a dict with 'mod' and 'fun' keys into a protobuf - modfun pair. Used in bucket properties. - - :param props: the module/function pair - :type props: dict - :param msg: the protobuf message to fill - :type msg: riak.pb.riak_pb2.RpbModFun - :rtype riak.pb.riak_pb2.RpbModFun - """ - if msg is None: - msg = riak.pb.riak_pb2.RpbModFun() - msg.module = str_to_bytes(props['mod']) - msg.function = str_to_bytes(props['fun']) - return msg - - def _decode_hooklist(self, hooklist): - """ - Decodes a list of protobuf commit hooks into their python - equivalents. Used in bucket properties. - - :param hooklist: a list of protobuf commit hooks - :type hooklist: list - :rtype list - """ - return [self._decode_hook(hook) for hook in hooklist] - - def _encode_hooklist(self, hooklist, msg): - """ - Encodes a list of commit hooks into their protobuf equivalent. - Used in bucket properties. - - :param hooklist: a list of commit hooks - :type hooklist: list - :param msg: a protobuf field that is a list of commit hooks - """ - for hook in hooklist: - pbhook = msg.add() - self._encode_hook(hook, pbhook) - - def _decode_hook(self, hook): - """ - Decodes a protobuf commit hook message into a dict. Used in - bucket properties. - - :param hook: the hook to decode - :type hook: riak.pb.riak_pb2.RpbCommitHook - :rtype dict - """ - if hook.HasField('modfun'): - return self._decode_modfun(hook.modfun) - else: - return {'name': bytes_to_str(hook.name)} - - def _encode_hook(self, hook, msg): - """ - Encodes a commit hook dict into the protobuf message. Used in - bucket properties. - - :param hook: the hook to encode - :type hook: dict - :param msg: the protobuf message to fill - :type msg: riak.pb.riak_pb2.RpbCommitHook - :rtype riak.pb.riak_pb2.RpbCommitHook - """ - if 'name' in hook: - msg.name = str_to_bytes(hook['name']) - else: - self._encode_modfun(hook, msg.modfun) - return msg - - def _encode_index_req(self, bucket, index, startkey, endkey=None, - return_terms=None, max_results=None, - continuation=None, timeout=None, term_regex=None): - """ - Encodes a secondary index request into the protobuf message. - - :param bucket: the bucket whose index to query - :type bucket: string - :param index: the index to query - :type index: string - :param startkey: the value or beginning of the range - :type startkey: integer, string - :param endkey: the end of the range - :type endkey: integer, string - :param return_terms: whether to return the index term with the key - :type return_terms: bool - :param max_results: the maximum number of results to return (page size) - :type max_results: integer - :param continuation: the opaque continuation returned from a - previous paginated request - :type continuation: string - :param timeout: a timeout value in milliseconds, or 'infinity' - :type timeout: int - :param term_regex: a regular expression used to filter index terms - :type term_regex: string - :rtype riak.pb.riak_kv_pb2.RpbIndexReq - """ - req = riak.pb.riak_kv_pb2.RpbIndexReq( - bucket=str_to_bytes(bucket.name), - index=str_to_bytes(index)) - self._add_bucket_type(req, bucket.bucket_type) - if endkey is not None: - req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.range - req.range_min = str_to_bytes(str(startkey)) - req.range_max = str_to_bytes(str(endkey)) - else: - req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.eq - req.key = str_to_bytes(str(startkey)) - if return_terms is not None: - req.return_terms = return_terms - if max_results: - req.max_results = max_results - if continuation: - req.continuation = str_to_bytes(continuation) - if timeout: - if timeout == 'infinity': - req.timeout = 0 - else: - req.timeout = timeout - if term_regex: - req.term_regex = str_to_bytes(term_regex) - return req - - def _decode_search_index(self, index): - """ - Fills an RpbYokozunaIndex message with the appropriate data. - - :param index: a yz index message - :type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex - :rtype dict - """ - result = {} - result['name'] = bytes_to_str(index.name) - if index.HasField('schema'): - result['schema'] = bytes_to_str(index.schema) - if index.HasField('n_val'): - result['n_val'] = index.n_val - return result - - def _add_bucket_type(self, req, bucket_type): - if bucket_type and not bucket_type.is_default(): - if not self.bucket_types(): - raise NotImplementedError( - 'Server does not support bucket-types') - req.type = str_to_bytes(bucket_type.name) - - def _encode_search_query(self, req, params): - if 'rows' in params: - req.rows = params['rows'] - if 'start' in params: - req.start = params['start'] - if 'sort' in params: - req.sort = str_to_bytes(params['sort']) - if 'filter' in params: - req.filter = str_to_bytes(params['filter']) - if 'df' in params: - req.df = str_to_bytes(params['df']) - if 'op' in params: - req.op = str_to_bytes(params['op']) - if 'q.op' in params: - req.op = params['q.op'] - if 'fl' in params: - if isinstance(params['fl'], list): - req.fl.extend(params['fl']) - else: - req.fl.append(params['fl']) - if 'presort' in params: - req.presort = params['presort'] - - def _decode_search_doc(self, doc): - resultdoc = MultiDict() - for pair in doc.fields: - if PY2: - ukey = unicode(pair.key, 'utf-8') # noqa - uval = unicode(pair.value, 'utf-8') # noqa - else: - ukey = bytes_to_str(pair.key) - uval = bytes_to_str(pair.value) - resultdoc.add(ukey, uval) - return resultdoc.mixed() - - def _decode_dt_fetch(self, resp): - dtype = DT_FETCH_TYPES.get(resp.type) - if dtype is None: - raise ValueError("Unknown datatype on wire: {}".format(resp.type)) - - value = self._decode_dt_value(dtype, resp.value) - - if resp.HasField('context'): - context = resp.context[:] - else: - context = None - - return dtype, value, context - - def _decode_dt_value(self, dtype, msg): - if dtype == 'counter': - return msg.counter_value - elif dtype == 'set': - return self._decode_set_value(msg.set_value) - elif dtype == 'map': - return self._decode_map_value(msg.map_value) - - def _encode_dt_options(self, req, params): - for q in ['r', 'pr', 'w', 'dw', 'pw']: - if q in params and params[q] is not None: - setattr(req, q, self._encode_quorum(params[q])) - - for o in ['basic_quorum', 'notfound_ok', 'timeout', 'return_body', - 'include_context']: - if o in params and params[o] is not None: - setattr(req, o, params[o]) - - def _decode_map_value(self, entries): - out = {} - for entry in entries: - name = bytes_to_str(entry.field.name[:]) - dtype = MAP_FIELD_TYPES[entry.field.type] - if dtype == 'counter': - value = entry.counter_value - elif dtype == 'set': - value = self._decode_set_value(entry.set_value) - elif dtype == 'register': - value = bytes_to_str(entry.register_value[:]) - elif dtype == 'flag': - value = entry.flag_value - elif dtype == 'map': - value = self._decode_map_value(entry.map_value) - out[(name, dtype)] = value - return out - - def _decode_set_value(self, set_value): - return [bytes_to_str(string[:]) for string in set_value] - - def _encode_dt_op(self, dtype, req, op): - if dtype == 'counter': - req.op.counter_op.increment = op[1] - elif dtype == 'set': - self._encode_set_op(req.op, op) - elif dtype == 'map': - self._encode_map_op(req.op.map_op, op) - else: - raise TypeError("Cannot send operation on datatype {!r}". - format(dtype)) - - def _encode_set_op(self, msg, op): - if 'adds' in op: - msg.set_op.adds.extend(str_to_bytes(op['adds'])) - if 'removes' in op: - msg.set_op.removes.extend(str_to_bytes(op['removes'])) - - def _encode_map_op(self, msg, ops): - for op in ops: - name, dtype = op[1] - ftype = MAP_FIELD_TYPES[dtype] - if op[0] == 'add': - add = msg.adds.add() - add.name = str_to_bytes(name) - add.type = ftype - elif op[0] == 'remove': - remove = msg.removes.add() - remove.name = str_to_bytes(name) - remove.type = ftype - elif op[0] == 'update': - update = msg.updates.add() - update.field.name = str_to_bytes(name) - update.field.type = ftype - self._encode_map_update(dtype, update, op[2]) - - def _encode_map_update(self, dtype, msg, op): - if dtype == 'counter': - # ('increment', some_int) - msg.counter_op.increment = op[1] - elif dtype == 'set': - self._encode_set_op(msg, op) - elif dtype == 'map': - self._encode_map_op(msg.map_op, op) - elif dtype == 'register': - # ('assign', some_str) - msg.register_op = str_to_bytes(op[1]) - elif dtype == 'flag': - if op == 'enable': - msg.flag_op = riak.pb.riak_dt_pb2.MapUpdate.ENABLE - else: - msg.flag_op = riak.pb.riak_dt_pb2.MapUpdate.DISABLE - - def _encode_to_ts_cell(self, cell, ts_cell): - if cell is not None: - if isinstance(cell, datetime.datetime): - ts_cell.timestamp_value = self._unix_time_millis(cell) - elif isinstance(cell, bool): - ts_cell.boolean_value = cell - elif isinstance(cell, string_types): - logging.debug("cell -> str: '%s'", cell) - ts_cell.varchar_value = str_to_bytes(cell) - elif (isinstance(cell, int) or - (PY2 and isinstance(cell, long))): # noqa - logging.debug("cell -> int/long: '%s'", cell) - ts_cell.sint64_value = cell - elif isinstance(cell, float): - ts_cell.double_value = cell - else: - t = type(cell) - raise RiakError("can't serialize type '{}', value '{}'" - .format(t, cell)) - - def _encode_timeseries_keyreq(self, table, key, req): - key_vals = None - if isinstance(key, list): - key_vals = key - else: - raise ValueError("key must be a list") - - req.table = str_to_bytes(table.name) - for cell in key_vals: - ts_cell = req.key.add() - self._encode_to_ts_cell(cell, ts_cell) - - def _encode_timeseries_listkeysreq(self, table, req, timeout=None): - req.table = str_to_bytes(table.name) - if timeout: - req.timeout = timeout - - def _encode_timeseries_put(self, tsobj, req): - """ - Fills an TsPutReq message with the appropriate data and - metadata from a TsObject. - - :param tsobj: a TsObject - :type tsobj: TsObject - :param req: the protobuf message to fill - :type req: riak.pb.riak_ts_pb2.TsPutReq - """ - req.table = str_to_bytes(tsobj.table.name) - - if tsobj.columns: - raise NotImplementedError("columns are not implemented yet") - - if tsobj.rows and isinstance(tsobj.rows, list): - for row in tsobj.rows: - tsr = req.rows.add() # NB: type TsRow - if not isinstance(row, list): - raise ValueError("TsObject row must be a list of values") - for cell in row: - tsc = tsr.cells.add() # NB: type TsCell - self._encode_to_ts_cell(cell, tsc) - else: - raise RiakError("TsObject requires a list of rows") - - def _decode_timeseries(self, resp, tsobj): - """ - Fills an TsObject with the appropriate data and - metadata from a TsQueryResp. - - :param resp: the protobuf message from which to process data - :type resp: riak.pb.TsQueryRsp or riak.pb.riak_ts_pb2.TsGetResp - :param tsobj: a TsObject - :type tsobj: TsObject - """ - if tsobj.columns is not None: - for col in resp.columns: - col_name = bytes_to_str(col.name) - col_type = col.type - col = (col_name, col_type) - tsobj.columns.append(col) - - for row in resp.rows: - tsobj.rows.append( - self._decode_timeseries_row(row, resp.columns)) - - def _decode_timeseries_row(self, tsrow, tscols=None): - """ - Decodes a TsRow into a list - - :param tsrow: the protobuf TsRow to decode. - :type tsrow: riak.pb.riak_ts_pb2.TsRow - :param tscols: the protobuf TsColumn data to help decode. - :type tscols: list - :rtype list - """ - row = [] - for i, cell in enumerate(tsrow.cells): - col = None - if tscols is not None: - col = tscols[i] - if cell.HasField('varchar_value'): - if col and col.type != TsColumnType.Value('VARCHAR'): - raise TypeError('expected VARCHAR column') - else: - row.append(bytes_to_str(cell.varchar_value)) - elif cell.HasField('sint64_value'): - if col and col.type != TsColumnType.Value('SINT64'): - raise TypeError('expected SINT64 column') - else: - row.append(cell.sint64_value) - elif cell.HasField('double_value'): - if col and col.type != TsColumnType.Value('DOUBLE'): - raise TypeError('expected DOUBLE column') - else: - row.append(cell.double_value) - elif cell.HasField('timestamp_value'): - if col and col.type != TsColumnType.Value('TIMESTAMP'): - raise TypeError('expected TIMESTAMP column') - else: - dt = self._datetime_from_unix_time_millis( - cell.timestamp_value) - row.append(dt) - elif cell.HasField('boolean_value'): - if col and col.type != TsColumnType.Value('BOOLEAN'): - raise TypeError('expected BOOLEAN column') - else: - row.append(cell.boolean_value) - else: - row.append(None) - return row - - def _decode_preflist(self, item): - """ - Decodes a preflist response - - :param preflist: a bucket/key preflist - :type preflist: list of - riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem - :rtype dict - """ - result = {'partition': item.partition, - 'node': bytes_to_str(item.node), - 'primary': item. primary} - return result From 0df94272350538ffb699e6dc56b11de5f2764384 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 09:36:38 -0700 Subject: [PATCH 180/324] add contributor --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 529b7a20..91d456cd 100644 --- a/README.md +++ b/README.md @@ -192,4 +192,5 @@ Contributors * Soren Hansen * Sreejith Kesavan * Timothée Peignier +* [Vitaly](https://github.com/lamp0chka) * William Kral From 4694597bcdd2efdf356d8158ab28cbac92697fb2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 09:37:40 -0700 Subject: [PATCH 181/324] full name for Vitaly --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 91d456cd..edd9b58a 100644 --- a/README.md +++ b/README.md @@ -192,5 +192,5 @@ Contributors * Soren Hansen * Sreejith Kesavan * Timothée Peignier -* [Vitaly](https://github.com/lamp0chka) +* [Vitaly Shestovskiy](https://github.com/lamp0chka) * William Kral From db405194cde86e1a5fae833176e0ee4325ec96f5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 10:48:33 -0700 Subject: [PATCH 182/324] Add scripts to publish to GH releases --- Makefile | 11 +++ build/publish | 193 ++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 204 insertions(+) create mode 100755 build/publish diff --git a/Makefile b/Makefile index efda2036..17bc1145 100644 --- a/Makefile +++ b/Makefile @@ -14,6 +14,9 @@ pb_compile: pb_clean @python setup.py build_messages release_sdist: +ifeq ($(VERSION),) + $(error VERSION must be set to build a release and deploy this package) +endif ifeq ($(PANDOC_VERSION),) $(error The pandoc command is required to correctly convert README.md to rst format) endif @@ -21,8 +24,16 @@ ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif @python -c 'import pypandoc' + @echo "==> Python tagging version $(VERSION)" + # NB: Python client version strings do NOT start with 'v'. Le Sigh. + # validate VERSION and allow pre-releases + @bash ./build/publish $(VERSION) validate + @git tag "$(VERSION)" + @git push --tags + @git push @echo "==> Python (sdist release)" @python setup.py sdist upload -s -i $(RELEASE_GPG_KEYNAME) + @bash ./build/publish $(VERSION) release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) diff --git a/build/publish b/build/publish new file mode 100755 index 00000000..b7223f54 --- /dev/null +++ b/build/publish @@ -0,0 +1,193 @@ +#!/usr/bin/env bash + +set -o errexit +set -o nounset + +declare -r debug='false' +declare -r tmpfile_file="/tmp/publish.$$.tmpfiles" + +function make_temp_file +{ + local template="${1:-publish.$$.XXXXXX}" + if [[ $template != *XXXXXX ]] + then + template="$template.XXXXXX" + fi + local tmp=$(mktemp -t "$template") + echo "$tmp" >> "$tmpfile_file" + echo "$tmp" +} + +function now +{ + date '+%Y-%m-%d %H:%M:%S' +} + +function pwarn +{ + echo "$(now) [warning]: $@" 1>&2 +} + +function perr +{ + echo "$(now) [error]: $@" 1>&2 +} + +function pinfo +{ + echo "$(now) [info]: $@" +} + +function pdebug +{ + if [[ $debug == 'true' ]] + then + echo "$(now) [debug]: $@" + fi +} + +function errexit +{ + perr "$@" + exit 1 +} + +function onexit +{ + if [[ -f $tmpfile_file ]] + then + for tmpfile in $(< $tmpfile_file) + do + pdebug "removing temp file $tmpfile" + rm -f $tmpfile + done + rm -f $tmpfile_file + fi +} + +function gh_publish { + if [[ -z $version_string ]] + then + errexit 'gh_publish: version_string required' + fi + + # NB: no 'v' here at start of version_string + local -r package_name="riak-$version_string.tar.gz" + local -r package="./dist/riak-$version_string.tar.gz" + if [[ ! -s $package ]] + then + errexit "gh_publish: expected to find $package in dist/" + fi + + # NB: we use a X.Y.Z tag + local -r release_json="{ + \"tag_name\" : \"$version_string\", + \"name\" : \"Riak Python Client $version_string\", + \"body\" : \"riak-python-client $version_string\nhttps://github.com/basho/riak-python-client/blob/master/RELNOTES.md\", + \"draft\" : false, + \"prerelease\" : $is_prerelease + }" + + pdebug "Release JSON: $release_json" + + local curl_content_file="$(make_temp_file)" + local curl_stdout_file="$(make_temp_file)" + local curl_stderr_file="$(make_temp_file)" + + curl -4so $curl_content_file -w '%{http_code}' -XPOST \ + -H "Authorization: token $(< $github_api_key_file)" -H 'Content-type: application/json' \ + 'https://api.github.com/repos/basho/riak-python-client/releases' -d "$release_json" 1> "$curl_stdout_file" 2> "$curl_stderr_file" + if [[ $? != 0 ]] + then + errexit "curl error exited with code: '$?' see '$curl_stderr_file'" + fi + + local -i curl_rslt="$(< $curl_stdout_file)" + if (( curl_rslt == 422 )) + then + pwarn "Release in GitHub already exists! (http code: '$curl_rslt')" + curl -4so $curl_content_file -w '%{http_code}' -XGET \ + -H "Authorization: token $(< $github_api_key_file)" -H 'Content-type: application/json' \ + "https://api.github.com/repos/basho/riak-python-client/releases/tags/$version_string" 1> "$curl_stdout_file" 2> "$curl_stderr_file" + if [[ $? != 0 ]] + then + errexit "curl error exited with code: '$?' see '$curl_stderr_file'" + fi + elif (( curl_rslt != 201 )) + then + errexit "Creating release in GitHub failed with http code '$curl_rslt'" + fi + + if [[ ! -s $curl_content_file ]] + then + errexit 'no release info to parse for asset uploads' + fi + + # "upload_url": "https://uploads.github.com/repos/basho/riak-python-client/releases/1115734/assets{?name,label}" + # https://uploads.github.com/repos/basho/riak-python-client/releases/1115734/assets{?name,label} + local -r upload_url_with_name=$(perl -ne 'print qq($1\n) and exit if /"upload_url"[ :]+"(https:\/\/[^"]+)"/' "$curl_content_file") + local -r upload_url="${upload_url_with_name/\{?name,label\}/?name=$package_name}" + + local curl_content_file="$(make_temp_file)" + local curl_stdout_file="$(make_temp_file)" + local curl_stderr_file="$(make_temp_file)" + + curl -4so $curl_content_file -w '%{http_code}' -XPOST \ + -H "Authorization: token $(< $github_api_key_file)" -H 'Content-type: application/x-compressed, application/x-tar' \ + "$upload_url" --data-binary "@$package" 1> "$curl_stdout_file" 2> "$curl_stderr_file" + if [[ $? != 0 ]] + then + errexit "curl error exited with code: '$?' see '$curl_stderr_file'" + fi + + curl_rslt="$(< $curl_stdout_file)" + if (( curl_rslt != 201 )) + then + errexit "Uploading release assets to GitHub failed with http code '$curl_rslt'" + fi +} + +trap onexit EXIT + +declare -r version_string="${1:-unknown}" + +if [[ ! $version_string =~ ^[0-9].[0-9].[0-9](-[a-z]+[0-9]+)?$ ]] +then + errexit 'first argument must be valid version string in X.Y.Z format' +fi + +is_prerelease='false' +if [[ $version_string =~ ^[0-9].[0-9].[0-9]-[a-z]+[0-9]+$ ]] +then + pinfo "publishing pre-release version: $version_string" + is_prerelease='true' +else + pinfo "publishing version $version_string" +fi + +declare -r current_branch="$(git rev-parse --abbrev-ref HEAD)" + +if [[ $debug == 'false' && $is_prerelease == 'false' && $current_branch != 'master' ]] +then + errexit 'publish must be run on master branch' +fi + +declare -r github_api_key_file="$HOME/.ghapi" +if [[ ! -s $github_api_key_file ]] +then + errexit "please save your GitHub API token in $github_api_key_file" +fi + +# Validate commands +if ! hash curl 2>/dev/null +then + errexit "'curl' must be in your PATH" +fi + +validate=${2:-''} +if [[ $validate == 'validate' ]] +then + exit 0 +fi + +gh_publish From 80daa45601a0c7be5b3eee0de479f655fe460696 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 11:13:49 -0700 Subject: [PATCH 183/324] Add GPG to tagging --- Makefile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 17bc1145..258de49a 100644 --- a/Makefile +++ b/Makefile @@ -28,9 +28,8 @@ endif # NB: Python client version strings do NOT start with 'v'. Le Sigh. # validate VERSION and allow pre-releases @bash ./build/publish $(VERSION) validate - @git tag "$(VERSION)" + @git tag --sign -a "$(VERSION)" -m "riak-python-client $(VERSION)" --local-user "$(RELEASE_GPG_KEYNAME)" @git push --tags - @git push @echo "==> Python (sdist release)" @python setup.py sdist upload -s -i $(RELEASE_GPG_KEYNAME) @bash ./build/publish $(VERSION) From 3e99362da1d20434046ad4fabf50ce263bf7b4cc Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 15:50:25 -0700 Subject: [PATCH 184/324] Add Riak TS compatibility note. --- RELNOTES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELNOTES.md b/RELNOTES.md index 7894dcba..b444dd24 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -4,7 +4,7 @@ * [Socket Enhancements](https://github.com/basho/riak-python-client/pull/453) - Resolves [#399](https://github.com/basho/riak-python-client/issues/399) * [Add multi-put](https://github.com/basho/riak-python-client/pull/452) -* [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) +* [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) *Note:* This requires at least version `1.3.0` of Riak TS. ## 2.4.2 Patch Release - 2016-02-20 From 8dad0cbac60ab5f0de95d9a697ea21bd92147493 Mon Sep 17 00:00:00 2001 From: Dan Root Date: Mon, 2 May 2016 18:54:41 -0700 Subject: [PATCH 185/324] Don't use six.PY2 in setup.py MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit six is not guaranteed to be installed before running pip/setuptools. The client fails to install on a machine or fresh virtualenv without six with errors like the following: ``` ; virtualenv ve ; ./ve/bin/pip install riak Collecting riak Downloading riak-2.5.0.tar.gz (193kB) 100% |████████████████████████████████| 196kB 9.2MB/s Complete output from command python setup.py egg_info: Traceback (most recent call last): File "", line 20, in File "/tmp/pip-build-ReAmHf/riak/setup.py", line 4, in import six ImportError: No module named six ``` This snuck past tox testing, because six is listed as an explict dependency in tox.ini. --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 6338efd8..54ae8418 100755 --- a/setup.py +++ b/setup.py @@ -1,7 +1,6 @@ #!/usr/bin/env python import codecs -import six import sys from setuptools import setup, find_packages @@ -15,7 +14,7 @@ install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") -if six.PY2: +if sys.version_info[0:3] <= (3, 0, 0): install_requires.append('protobuf >=2.4.1, <2.7.0') requires.append('protobuf(>=2.4.1, <2.7.0)') else: From 052dc73c38714a521229c6b69470f3479deadf68 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 20:44:04 -0700 Subject: [PATCH 186/324] Update basho-erlastic dependency --- setup.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/setup.py b/setup.py index 54ae8418..f130ff7a 100755 --- a/setup.py +++ b/setup.py @@ -7,14 +7,14 @@ from version import get_version from commands import setup_timeseries, build_messages -install_requires = ['six >= 1.8.0', 'basho_erlastic >= 2.1.0'] -requires = ['six(>=1.8.0)', 'basho_erlastic(>= 2.1.0)'] +install_requires = ['six >= 1.8.0', 'basho_erlastic >= 2.1.1'] +requires = ['six(>=1.8.0)', 'basho_erlastic(>= 2.1.1)'] -if sys.version_info[0:3] <= (2, 7, 9): +if sys.version_info[:3] <= (2, 7, 9): install_requires.append("pyOpenSSL >= 0.14") requires.append("pyOpenSSL(>=0.14)") -if sys.version_info[0:3] <= (3, 0, 0): +if sys.version_info[:3] <= (3, 0, 0): install_requires.append('protobuf >=2.4.1, <2.7.0') requires.append('protobuf(>=2.4.1, <2.7.0)') else: From 6a440b26f961509b80c6f22118be288dc759bd7a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 20:45:43 -0700 Subject: [PATCH 187/324] Add ignored dir, add contributor --- .gitignore | 1 + README.md | 1 + 2 files changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 3b1e0f43..c7f6e9f4 100644 --- a/.gitignore +++ b/.gitignore @@ -8,6 +8,7 @@ docs/_build .coverage riak-*/ py-build/ +build/ dist/ riak.egg-info/ *.egg diff --git a/README.md b/README.md index 10075362..0dc09edf 100644 --- a/README.md +++ b/README.md @@ -157,6 +157,7 @@ Contributors * Daniel Lindsley * Daniel Néri * Daniel Reverri +* [Dan Root](https://github.com/daroot) * [David Basden](https://github.com/dbasden) * David Koblas * Dmitry Rozhkov From f5ee619589209da246b669945a9078b578f7ee7b Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 20:48:10 -0700 Subject: [PATCH 188/324] 2.5.1 RELNOTES --- RELNOTES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/RELNOTES.md b/RELNOTES.md index b444dd24..e6c5f285 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,5 +1,9 @@ # Riak Python Client Release Notes +## 2.5.1 Patch Release + +* [Ensure `six` is not required during installation](https://github.com/basho/riak-python-client/pull/459) + ## [2.5.0 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) * [Socket Enhancements](https://github.com/basho/riak-python-client/pull/453) - Resolves [#399](https://github.com/basho/riak-python-client/issues/399) From eb91c873739d8a5d00533a9c4cd5fb54a044527e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 2 May 2016 21:01:39 -0700 Subject: [PATCH 189/324] Make a note that 2.5.0 will not install correctly --- RELNOTES.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/RELNOTES.md b/RELNOTES.md index e6c5f285..a0fe9333 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -4,8 +4,9 @@ * [Ensure `six` is not required during installation](https://github.com/basho/riak-python-client/pull/459) -## [2.5.0 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) +## [2.5.0 Release - Deprecated](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) +* *NOTE*: due to the `basho-erlastic` dependency, this version will not install correctly. Please use `2.5.1`. * [Socket Enhancements](https://github.com/basho/riak-python-client/pull/453) - Resolves [#399](https://github.com/basho/riak-python-client/issues/399) * [Add multi-put](https://github.com/basho/riak-python-client/pull/452) * [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) *Note:* This requires at least version `1.3.0` of Riak TS. From 77a606e9585ce787f9a82b661b559cf0d6ef913e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 10:00:04 -0700 Subject: [PATCH 190/324] Remove six module from tox.ini, ensure tox uses pyenv virtualenvs --- buildbot/Makefile | 3 -- buildbot/tox_cleanup.sh | 13 --------- buildbot/tox_setup.sh | 62 +++++++++++++++++++++++++++-------------- tox.ini | 9 +++--- 4 files changed, 45 insertions(+), 42 deletions(-) delete mode 100755 buildbot/tox_cleanup.sh diff --git a/buildbot/Makefile b/buildbot/Makefile index ecc933b8..e41e220f 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -18,9 +18,6 @@ preconfigure: configure: $(TOOLS_DIR)/riak-cluster-config $(RIAK_ADMIN) 8098 true true -configure_timeseries: - @../setup.py setup_timeseries --riak-admin=$(RIAK_ADMIN) - compile: @echo NO-OP diff --git a/buildbot/tox_cleanup.sh b/buildbot/tox_cleanup.sh deleted file mode 100755 index bd5324c7..00000000 --- a/buildbot/tox_cleanup.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env bash - -for pbin in .tox/*/bin -do - echo $pbin - pip="$pbin/pip" - $pip uninstall riak_pb --yes - $pip uninstall riak --yes - $pip uninstall protobuf --yes - $pip uninstall python3-riak-pb --yes - $pip uninstall python3-protobuf --yes - echo ----- -done diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh index df28ea34..2b45963f 100755 --- a/buildbot/tox_setup.sh +++ b/buildbot/tox_setup.sh @@ -1,5 +1,7 @@ #!/usr/bin/env bash +unset PYENV_VERSION + if [[ ! -d $PYENV_ROOT ]] then export PYENV_ROOT="$HOME/.pyenv" @@ -12,10 +14,12 @@ then exit 1 fi +rm -f $PROJDIR/.python-version + # Install pyenv if it's missing if [[ ! -d $PYENV_ROOT ]] then - git clone https://github.com/yyuu/pyenv.git $PYENV_ROOT + git clone 'https://github.com/yyuu/pyenv.git' $PYENV_ROOT else (cd $PYENV_ROOT && git fetch --all) fi @@ -25,7 +29,7 @@ fi declare -r pyenv_virtualenv_dir="$PYENV_ROOT/plugins/pyenv-virtualenv" if [[ ! -d $pyenv_virtualenv_dir ]] then - git clone https://github.com/yyuu/pyenv-virtualenv.git $pyenv_virtualenv_dir + git clone 'https://github.com/yyuu/pyenv-virtualenv.git' $pyenv_virtualenv_dir else (cd $pyenv_virtualenv_dir && git fetch --all) fi @@ -35,7 +39,7 @@ fi declare -r pyenv_alias_dir="$PYENV_ROOT/plugins/pyenv-alias" if [[ ! -d $pyenv_alias_dir ]] then - git clone https://github.com/s1341/pyenv-alias.git $pyenv_alias_dir + git clone 'https://github.com/s1341/pyenv-alias.git' $pyenv_alias_dir else (cd $pyenv_alias_dir && git pull origin master) fi @@ -55,50 +59,66 @@ then eval "$(pyenv virtualenv-init -)" fi +do_pip_upgrades='false' + # NB: 2.7.8 is special-cased for pyver in 2.7 3.3 3.4 3.5 do - if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "riak_$pyver" + riak_py_alias="riak_$pyver" + if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "$riak_py_alias" then + # Need to install it + do_pip_upgrades='true' + declare -i pymaj="${pyver%.*}" declare -i pymin="${pyver#*.}" pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]+\$" | tail -n1 | sed -e 's/[[:space:]]//g')" echo "[INFO] installing Python $pyver_latest" - riak_pyver="riak_$pyver_latest" - VERSION_ALIAS="$riak_pyver" pyenv install "$pyver_latest" - pyenv virtualenv "$riak_pyver" "riak-py$pymaj$pymin" + VERSION_ALIAS="$riak_py_alias" pyenv install "$pyver_latest" + pyenv virtualenv "$riak_py_alias" "riak-py$pymaj$pymin" fi done if ! pyenv versions | fgrep -q 'riak_2.7.8' then + # Need to install it + do_pip_upgrades='true' + echo "[INFO] installing Python 2.7.8" VERSION_ALIAS='riak_2.7.8' pyenv install '2.7.8' pyenv virtualenv 'riak_2.7.8' 'riak-py278' fi -(cd $PROJDIR && pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278) +pushd $PROJDIR +pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278 -pyenv versions +pyenv rehash -if [[ $(python --version) == Python\ 3.* ]] +if [[ $do_pip_upgrades == 'true' ]] then - pip install --upgrade pip - for module in six tox python3-protobuf + for PY in $(pyenv versions --bare --skip-aliases | grep '^riak_') do - if ! pip show --quiet $module - then - pip install --ignore-installed $module - if ! pip show --quiet $module - then - echo "[ERROR] install of $module failed" 1>&2 - exit 1 - fi - fi + echo "[INFO] $PY - upgrading pip / setuptools" + PYENV_VERSION="$PY" pip install --upgrade pip setuptools done +fi + +python_version="$(python --version)" +if [[ $python_version == Python\ 3* ]] +then + pip install --ignore-installed tox + if ! pip show --quiet tox + then + echo "[ERROR] install of 'tox' failed" 1>&2 + popd + exit 1 + fi pyenv rehash else echo "[ERROR] expected Python 3 to be 'python' at this point" 1>&2 + popd exit 1 fi + +popd diff --git a/tox.ini b/tox.ini index 87e74bb1..9fa2a230 100644 --- a/tox.ini +++ b/tox.ini @@ -1,17 +1,16 @@ -# Tox (http://tox.testrun.org/) is a tool for running tests +#pyver Tox (http://tox.testrun.org/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. [tox] -envlist = py278, py27, py33, py34, py35 +envlist = riak-py278, riak-py27, riak-py33, riak-py34, riak-py35 -[testenv:py278] +[testenv:riak-py278] basepython = {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 [testenv] install_command = pip install --upgrade {packages} commands = {envpython} setup.py test -deps = six - pip +deps = pip passenv = RUN_* SKIP_* RIAK_* From a2ab0998e6f9340abd4c550fbc2fc93136e16562 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 10:09:36 -0700 Subject: [PATCH 191/324] add script to clean env --- buildbot/clean-env | 100 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 100 insertions(+) create mode 100755 buildbot/clean-env diff --git a/buildbot/clean-env b/buildbot/clean-env new file mode 100755 index 00000000..da865250 --- /dev/null +++ b/buildbot/clean-env @@ -0,0 +1,100 @@ +#!/usr/bin/env bash + +set -o nounset +set -o errexit + +function now +{ + date '+%Y-%m-%d %H:%M:%S' +} + +function perr +{ + echo "$(now) [error]: $@" 1>&2 +} + +function pinfo +{ + echo "$(now) [info]: $@" +} + +function errexit +{ + perr "$@" + exit 1 +} + +function clean_venvs +{ + for VENV in $(pyenv virtualenvs | awk '/^[[:space:]]*riak-/ { print $1 }') + do + pinfo Uninstalling virtualenv "$VENV" + pyenv uninstall --force "$VENV" + done +} + +function clean_pythons +{ + for RPY in $(pyenv versions | awk '/^[[:space:]]*riak_/ { print $1 }') + do + pinfo Uninstalling python "$RPY" + pyenv uninstall --force "$RPY" + done +} + +function clean_tox +{ + if [[ -d ./.tox ]] + then + pinfo Removing ./.tox + rm -rf ./.tox + fi +} + +function usage +{ + echo " +clean-env: Clean up your pyenv + +Usage: + +clean-env [-p] [-t] [-v] + +-p Clean up Riak-specific Python versions +-t Clean up tox +-v Clean up Riak-specific virtualenvs +" + exit 0 +} + +opt_clean_pythons='false' +opt_clean_venvs='false' +opt_clean_tox='false' + +while getopts 'ptv' opt; do + case $opt in + p) + opt_clean_pythons='true';; + t) + opt_clean_tox='true';; + v) + opt_clean_venvs='true';; + *) + usage;; + esac +done + +if [[ $opt_clean_venvs == 'true' ]] +then + clean_venvs +fi + +if [[ $opt_clean_pythons == 'true' ]] +then + clean_pythons +fi + +if [[ $opt_clean_tox == 'true' ]] +then + clean_tox +fi From 1fb6f00b12784beca7cac101878a448877120ebf Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 10:40:10 -0700 Subject: [PATCH 192/324] Catch environment exceptions when socket.shutdown() is called --- riak/transports/tcp/connection.py | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 054889ad..fe67d1d4 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,4 +1,4 @@ -import errno +import logging import socket import struct @@ -19,12 +19,6 @@ class TcpConnection(object): - # These are set in the TcpTransport initializer - _address = None - _timeout = None - _socket_keepalive = None - _socket_tcp_options = None - """ Connection-related methods for TcpTransport. """ @@ -228,10 +222,10 @@ def close(self): # shutdown() method due to the SSL lib try: self._socket.shutdown(socket.SHUT_RDWR) - except IOError as e: - # NB: sometimes this is the exception if the initial - # connection didn't succeed correctly - if e.errno != errno.EBADF: - raise + except EnvironmentError: + # NB: sometimes these exceptions are raised if the initial + # connection didn't succeed correctly, or if shutdown() is + # called after the connection dies + logging.exception('Exception occurred while shutting down socket.') self._socket.close() del self._socket From f4ebb9e0c7da53b2923e13a5cba6abff78a6682e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 11:15:44 -0700 Subject: [PATCH 193/324] Change regex to determine if pre-release --- build/publish | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/build/publish b/build/publish index b7223f54..39b5ef6a 100755 --- a/build/publish +++ b/build/publish @@ -151,13 +151,14 @@ trap onexit EXIT declare -r version_string="${1:-unknown}" -if [[ ! $version_string =~ ^[0-9].[0-9].[0-9](-[a-z]+[0-9]+)?$ ]] +# https://www.python.org/dev/peps/pep-0440/ +if [[ ! $version_string =~ ^[0-9].[0-9].[0-9]([abcr]+[0-9]+)?$ ]] then - errexit 'first argument must be valid version string in X.Y.Z format' + errexit 'first argument must be valid version string in X.Y.Z, X.Y.ZaN, X.Y.ZbN or X.Y.ZrcN format' fi is_prerelease='false' -if [[ $version_string =~ ^[0-9].[0-9].[0-9]-[a-z]+[0-9]+$ ]] +if [[ $version_string =~ ^[0-9].[0-9].[0-9][abcr]+[0-9]+$ ]] then pinfo "publishing pre-release version: $version_string" is_prerelease='true' From dc90be6971ff622f0920c22af4bdbb195d0ebc66 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 12:58:06 -0700 Subject: [PATCH 194/324] dat linter --- riak/transports/tcp/connection.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index fe67d1d4..30b53681 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -226,6 +226,7 @@ def close(self): # NB: sometimes these exceptions are raised if the initial # connection didn't succeed correctly, or if shutdown() is # called after the connection dies - logging.exception('Exception occurred while shutting down socket.') + logging.exception('Exception occurred while shutting ' + 'down socket.') self._socket.close() del self._socket From 4664cdfd731333bfc54db053ddf722ba43a8ea1a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 13:07:25 -0700 Subject: [PATCH 195/324] No longer needed as we use riak-client-tools --- riak/tests/resources/Makefile | 138 ------------ riak/tests/resources/README.md | 16 -- riak/tests/resources/bad_ca.crt | 21 -- riak/tests/resources/ca.crt | 22 -- riak/tests/resources/client.crt | 95 --------- riak/tests/resources/client.key | 27 --- riak/tests/resources/openssl.cnf | 355 ------------------------------- riak/tests/resources/server.crl | 13 -- riak/tests/resources/server.crt | 95 --------- riak/tests/resources/server.key | 27 --- 10 files changed, 809 deletions(-) delete mode 100644 riak/tests/resources/Makefile delete mode 100644 riak/tests/resources/README.md delete mode 100644 riak/tests/resources/bad_ca.crt delete mode 100644 riak/tests/resources/ca.crt delete mode 100644 riak/tests/resources/client.crt delete mode 100644 riak/tests/resources/client.key delete mode 100644 riak/tests/resources/openssl.cnf delete mode 100644 riak/tests/resources/server.crl delete mode 100644 riak/tests/resources/server.crt delete mode 100644 riak/tests/resources/server.key diff --git a/riak/tests/resources/Makefile b/riak/tests/resources/Makefile deleted file mode 100644 index 9a726a91..00000000 --- a/riak/tests/resources/Makefile +++ /dev/null @@ -1,138 +0,0 @@ -# -# Copyright 2014 Basho Technologies, Inc. -# -# This file is provided to you under the Apache License, -# Version 2.0 (the "License"); you may not use this file -# except in compliance with the License. You may obtain -# a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -#under the License. - -CNTRY=US -STATE=Washington -CITY=Seattle -ORG=Basho -UNIT=Riak Python Client -EMAIL=clients@basho.com -PASSWD=#testpasswd -COMPANY=Basho Technologies -DAYS=3650 - -SSL=openssl -SSL_CONF=openssl.cnf - -CA_DIR=ca -CA_KEY=${CA_DIR}/ca.key -# Certification Signing Request -CA_CSR=${CA_DIR}/ca.csr -CA_CERT=${CA_DIR}/ca.crt -# Common Name -CA_CN=CA -CA_STRING="${CNTRY}\n${STATE}\n${CITY}\n${ORG}\n${UNIT}\n${CA_CN}\n${EMAIL}\n${PASSWD}\n${COMPANY}\n" - -SERVER_DIR=server -SERVER_KEY=${SERVER_DIR}/server.key -# Certification Signing Request -SERVER_CSR=${SERVER_DIR}/server.csr -SERVER_CERT=${SERVER_DIR}/server.crt -# Certificate Revocation List -SERVER_CRL=${SERVER_DIR}/server.crl -# Common Name (must match nodename) -SERVER_CN=riak@127.0.0.1 -SERVER_STRING="${CNTRY}\n${STATE}\n${CITY}\n${ORG}\n${UNIT}\n${SERVER_CN}\n${EMAIL}\n${PASSWD}\n${COMPANY}\n" - -CLIENT_DIR=client -CLIENT_KEY=${CLIENT_DIR}/client.key -CLIENT_CSR=${CLIENT_DIR}/client.csr -CLIENT_CERT=${CLIENT_DIR}/client.crt -# Common Name (must name Riak username) -CLIENT_CN=certuser -CLIENT_STRING="${CNTRY}\n${STATE}\n${CITY}\n${ORG}\n${UNIT}\n${CLIENT_CN}\n${EMAIL}\n${PASSWD}\n${COMPANY}\n" - -RANDOM=od -vAn -N3 -tu4 < /dev/urandom| awk '{print $1;}' -SERIAL=serial -CRL=crlnumber -NEWCERT_DIR=newcerts -INDEX=index.txt -INDEX_ATTR=index.txt.attr - -all: ${CA_CERT} ${SERVER_CERT} ${CLIENT_CERT} ${SERVER_CRL} - -install: - cp ${CA_CERT} . - cp ${SERVER_CERT} . - cp ${SERVER_KEY} . - cp ${SERVER_CRL} . - cp ${CLIENT_KEY} . - cp ${CLIENT_CERT} . - -# Certificate Serial Number -${SERIAL}: - printf "%06x" `${RANDOM}` > $@ - -# Certificate Revocation List Number -${CRL}: - printf "%06x" `${RANDOM}` > $@ - -${CA_KEY}: ${CA_DIR} ${SERIAL} ${CRL} ${INDEX} ${INDEX_ATTR} ${NEWCERT_DIR} - ${SSL} genrsa -out $@ 2048 - -${CA_CSR}: ${CA_KEY} - printf ${CA_STRING} | ${SSL} req -config ${SSL_CONF} -new -key $< -out $@ - -${CA_CERT}: ${CA_CSR} - ${SSL} x509 -req -days ${DAYS} -in $< -out $@ -signkey ${CA_KEY} - -${SERVER_KEY}: ${SERVER_DIR} ${SERIAL} ${CRL} - ${SSL} genrsa -out $@ 2048 - -${SERVER_CSR}: ${SERVER_KEY} - printf ${SERVER_STRING} | ${SSL} req -config ${SSL_CONF} -new -key $< -out $@ - -${SERVER_CERT}: ${SERVER_CSR} - yes | OPENSSL_CONF=${SSL_CONF} ${SSL} ca -days ${DAYS} -in $< -cert ${CA_CERT} -out $@ - -${SERVER_CRL}: ${CRL} ${SERVER_CERT} ${CA_KEY} ${CA_CERT} - rm -f ${INDEX} - touch ${INDEX} - OPENSSL_CONF=${SSL_CONF} ${SSL} ca -gencrl -keyfile ${CA_KEY} -cert ${CA_CERT} -out $@ - OPENSSL_CONF=${SSL_CONF} ${SSL} ca -revoke ${SERVER_CERT} -keyfile ${CA_KEY} -cert ${CA_CERT} - OPENSSL_CONF=${SSL_CONF} ${SSL} ca -gencrl -keyfile ${CA_KEY} -cert ${CA_CERT} -out $@ - -${CLIENT_KEY}: ${CLIENT_DIR} ${SERIAL} ${CRL} - ${SSL} genrsa -out $@ 2048 - -${CLIENT_CSR}: ${CLIENT_KEY} - printf ${CLIENT_STRING} | ${SSL} req -config ${SSL_CONF} -new -key $< -out $@ - -${CLIENT_CERT}: ${CLIENT_CSR} - yes | OPENSSL_CONF=${SSL_CONF} ${SSL} ca -days ${DAYS} -in $< -cert ${CA_CERT} -key ${CA_KEY} -out $@ - -clean: - rm -rf ${CA_DIR} ${SERVER_DIR} ${CLIENT_DIR} ${NEWCERT_DIR} ${SERIAL}* ${CRL}* ${INDEX}* - -${CA_DIR}: - mkdir -p $@ - -${SERVER_DIR}: - mkdir -p $@ - -${CLIENT_DIR}: - mkdir -p $@ - -${NEWCERT_DIR}: - mkdir -p $@ - -${INDEX}: - touch ${INDEX} - -${INDEX_ATTR}: - touch ${INDEX_ATTR} - diff --git a/riak/tests/resources/README.md b/riak/tests/resources/README.md deleted file mode 100644 index 2bc4a6dc..00000000 --- a/riak/tests/resources/README.md +++ /dev/null @@ -1,16 +0,0 @@ -**DO NOT USE THESE IN PRODUCTION** - -This directory has certificates and a key for testing Riak authentication. - -* server.key - a private key for a Riak server (PEM format) -* server.crt - the certificate for server.key (PEM format) -* server.crl - certificate revocation list -* ca.crt - a certificate for the CA that issued server.crt (PEM format) -* empty_ca.crt - a certificate for a CA that has and cannot ever issue a - certificate (I deleted its private key) -* client.crt - certificate for client authenication (PEM format) - -**DO NOT USE THESE IN PRODUCTION** - -Generation of values inspired by https://github.com/basho-labs/riak-ruby-ca - diff --git a/riak/tests/resources/bad_ca.crt b/riak/tests/resources/bad_ca.crt deleted file mode 100644 index 265001d9..00000000 --- a/riak/tests/resources/bad_ca.crt +++ /dev/null @@ -1,21 +0,0 @@ ------BEGIN CERTIFICATE----- -IIDfjCCAuegAwIBAgIJAO0pDelK8iopMA0GCSqGSIb3DQEBBQUAMIGHMQswCQYD -VQQGEwJVUzEQMA4GA1UECBMHRmxvcmlkYTEOMAwGA1UEBxMFTWlhbWkxGzAZBgNV -BAoTEkJhc2hvIFRlY2hub2xvZ2llczEZMBcGA1UECxMQUmlhayBSdWJ5IENsaWVu -dDEeMBwGCSqGSIb3DQEJARYPYnJ5Y2VAYmFzaG8uY29tMB4XDTE0MDIwNDIyNTAw -NFoXDTI0MDIwMjIyNTAwNFowgYcxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdGbG9y -aWRhMQ4wDAYDVQQHEwVNaWFtaTEbMBkGA1UEChMSQmFzaG8gVGVjaG5vbG9naWVz -MRkwFwYDVQQLExBSaWFrIFJ1YnkgQ2xpZW50MR4wHAYJKoZIhvcNAQkBFg9icnlj -ZUBiYXNoby5jb20wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAN4yelOGl+MW -FY7Pf9vZjNHDuVQfBkiY9myriNpr3YGGou0xEIJvikkhl4eQAzDsw52qTsESlfwK -+uFmCBvhPBgeWYRd2LnAvRSrD4c7fDp+2eVUL3EKDHKdVNwnobvMiN2GQRZT2E+J -gBX3Wx3VGDtI0+M1Q9QPI7J1iewE0rB/AgMBAAGjge8wgewwHQYDVR0OBBYEFAkx -E0bwW0jX8FhWFW9XMhzGkMkhMIG8BgNVHSMEgbQwgbGAFAkxE0bwW0jX8FhWFW9X -MhzGkMkhoYGNpIGKMIGHMQswCQYDVQQGEwJVUzEQMA4GA1UECBMHRmxvcmlkYTEO -MAwGA1UEBxMFTWlhbWkxGzAZBgNVBAoTEkJhc2hvIFRlY2hub2xvZ2llczEZMBcG -A1UECxMQUmlhayBSdWJ5IENsaWVudDEeMBwGCSqGSIb3DQEJARYPYnJ5Y2VAYmFz -aG8uY29tggkA7SkN6UryKikwDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOB -gQCPgOgPnv33+/LrfSSDh/6OdtYAGdrxMkCsuPdwmyZlUl9W7gxFjX7EPxYycUgO -HNGuI10vOipgXrsJZUtQFi9OZ8+2m2Y4JHZR1xqSoHmXL/LoZYggY0BcwfjpSujL -pMhBUfzTLlULaaaBEGCVwxTabP+qzRma/d1FjkMUzbHrmQ== ------END CERTIFICATE----- diff --git a/riak/tests/resources/ca.crt b/riak/tests/resources/ca.crt deleted file mode 100644 index cefc7396..00000000 --- a/riak/tests/resources/ca.crt +++ /dev/null @@ -1,22 +0,0 @@ ------BEGIN CERTIFICATE----- -MIIDnjCCAoYCCQDb6VQV9V3A/zANBgkqhkiG9w0BAQUFADCBkDELMAkGA1UEBhMC -VVMxEzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxDjAMBgNV -BAoMBUJhc2hvMRswGQYDVQQLDBJSaWFrIFB5dGhvbiBDbGllbnQxCzAJBgNVBAMM -AkNBMSAwHgYJKoZIhvcNAQkBFhFjbGllbnRzQGJhc2hvLmNvbTAeFw0xNDA5MjQy -MjU1MjVaFw0yNDA5MjEyMjU1MjVaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UECAwK -V2FzaGluZ3RvbjEQMA4GA1UEBwwHU2VhdHRsZTEOMAwGA1UECgwFQmFzaG8xGzAZ -BgNVBAsMElJpYWsgUHl0aG9uIENsaWVudDELMAkGA1UEAwwCQ0ExIDAeBgkqhkiG -9w0BCQEWEWNsaWVudHNAYmFzaG8uY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A -MIIBCgKCAQEA1NQZ/aMes16da5Qs1czlmn9kXunhrOVwhIljXijCTBNAZU7gkqLx -XcbNGIfPRfx5IWNSEcn8ZEAjMe7i29zuMdjvtZVhjOw5u0PJ1TGFqpmLRsMYnMqS -HRXNBQq/t+RsriunqxkHYFSfaNEepckosuQF+ao6eIfjbCnAo/0YMM4DY4zfFlZc -XWdVscQxq3piNJEt7Ob/p8TrBZM9bdkks+Sk/l1ZabYmbRo+AtCmzdvcsqI3uqAA -rm7rKkcuS+A/0z0g/vhJILFcVl+RDexTmVifM8iQE4buUi4CJMqy6fwmDBSlt4MB -8DW8MWNZ/RVGoC9hAhaAq3D9t1rudTpqnQIDAQABMA0GCSqGSIb3DQEBBQUAA4IB -AQAc9Dgbq8Ca/6I2u7uN9hVk7hhgLTmOXWokhfY4tnpVNu4M1TB9dXSbvaIAQd6g -40GQ0W3nZaN1x1LdxgG4El+WxO12rhTjQEEge7mDQMcFjCXIJvrbqDyZ/J2tLG7k -Z9ZqigtTt1VpDE8OjqI/K50R2YU5/CwBDwa33QB6t6GWjL/72vrNoKkQhzd0olkk -xJjoBde7FSfXBuef3a2IMcUyU7ukm2DRvLUslG332ow3oQoL7na8fdsGQ9bDP+HI -lbq0xLvqQbgmbdwwxfa0r9nhqArsSG4q+k3kCpQcy1E2k1NdTn9yNDiMtcWpC+G0 -7eKc5VzGTi8NwadJLtpYDElq ------END CERTIFICATE----- diff --git a/riak/tests/resources/client.crt b/riak/tests/resources/client.crt deleted file mode 100644 index ef81a369..00000000 --- a/riak/tests/resources/client.crt +++ /dev/null @@ -1,95 +0,0 @@ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: 11568451 (0xb08543) - Signature Algorithm: sha1WithRSAEncryption - Issuer: C=US, ST=Washington, L=Seattle, O=Basho, OU=Riak Python Client, CN=CA/emailAddress=clients@basho.com - Validity - Not Before: Sep 24 22:55:25 2014 GMT - Not After : Sep 21 22:55:25 2024 GMT - Subject: C=US, ST=Washington, O=Basho, OU=Riak Python Client, CN=certuser/emailAddress=clients@basho.com - Subject Public Key Info: - Public Key Algorithm: rsaEncryption - Public-Key: (2048 bit) - Modulus: - 00:c7:e0:44:3a:eb:60:8a:86:70:66:0d:90:f6:29: - a9:85:17:21:3c:d0:7b:62:24:7d:a5:b6:d8:95:b1: - ef:8e:03:24:51:89:5a:34:8a:7a:e4:23:48:34:ce: - e9:f7:8c:d5:41:f7:fe:e5:7e:15:71:3f:6e:d3:07: - 10:cd:60:92:39:24:d4:89:b4:74:34:4b:0d:28:47: - c1:ff:72:d0:e6:e4:a8:c5:95:eb:60:b8:f1:af:d0: - e4:3a:8c:5f:5d:d7:e4:20:85:11:cc:b0:fc:05:95: - df:d1:0b:1f:b2:4a:9d:21:40:28:2d:c6:a7:37:ee: - 3c:f1:f9:c1:ee:7b:bd:ec:74:e4:9a:4f:d5:db:fe: - 91:e7:9f:95:1f:19:a1:c7:d3:3e:18:4b:d2:58:5b: - 26:80:f2:7e:1d:94:4e:c6:b3:4a:ae:b2:ea:50:b0: - eb:3d:c5:76:f1:18:ba:73:cf:87:ec:f4:bd:dc:4a: - 59:1d:c7:bc:79:88:c6:e8:2c:89:09:8c:1b:4b:93: - 8f:23:f5:2d:40:f8:70:66:0c:3d:c5:e7:99:cb:58: - f6:46:b4:60:bf:b7:02:f4:1b:04:30:ca:aa:30:6a: - a8:b0:ad:ec:ad:40:d2:fb:78:b2:51:2a:d3:40:4a: - 60:bb:24:40:6f:21:49:58:fa:56:b7:e9:5e:9f:b9: - 51:bd - Exponent: 65537 (0x10001) - X509v3 extensions: - X509v3 Basic Constraints: - CA:FALSE - Netscape Cert Type: - SSL Client, SSL Server - X509v3 Key Usage: - Digital Signature, Non Repudiation, Key Encipherment - Netscape Comment: - Riak Python Client Testing Certificate - X509v3 Subject Key Identifier: - 7C:AD:B9:A3:4C:9B:59:3E:2F:F3:4B:07:64:92:2D:1C:28:99:AC:A2 - X509v3 Authority Key Identifier: - DirName:/C=US/ST=Washington/L=Seattle/O=Basho/OU=Riak Python Client/CN=CA/emailAddress=clients@basho.com - serial:DB:E9:54:15:F5:5D:C0:FF - - X509v3 Extended Key Usage: - TLS Web Server Authentication, TLS Web Client Authentication - Signature Algorithm: sha1WithRSAEncryption - ad:f7:51:6a:c5:1b:eb:93:81:a3:b2:de:3e:a6:15:9d:4f:e7: - f9:37:19:f7:0d:fc:e5:7e:02:11:92:be:da:e4:c3:78:ed:90: - a9:a7:57:f0:08:72:a5:90:cc:5e:27:9b:8d:ad:9f:38:95:26: - d3:79:c8:03:0b:7e:40:dd:a8:0b:13:98:2f:6c:52:01:a9:b9: - eb:fe:0c:19:2e:36:82:b0:fc:a5:46:88:64:fa:8d:d3:73:b4: - be:9c:f1:74:a8:a0:28:2a:81:9d:cd:62:8a:e0:12:5f:c3:c4: - 0b:d0:15:f6:02:0e:41:da:50:f2:c8:70:91:24:71:e9:89:e7: - ac:47:73:05:97:7c:3f:4e:24:22:05:06:29:1e:08:b3:49:97: - 3b:11:4f:56:ba:83:c1:0f:8d:20:ed:80:9a:0d:6d:53:ee:63: - bf:3a:24:e3:62:9c:eb:6f:b8:af:01:0d:89:63:47:b3:fc:f7: - 30:f6:3f:96:ed:2e:52:bc:75:c9:27:82:70:b6:e6:d2:f4:0c: - aa:fc:39:c7:54:97:44:98:3f:5f:e5:27:2d:33:d9:74:98:e0: - 96:aa:71:b3:5a:27:78:3b:ed:70:93:3a:bd:df:f9:35:78:68: - 70:36:d6:16:61:83:66:8a:f9:96:c3:e0:ca:a3:20:3e:50:1b: - 9a:fd:7c:4c ------BEGIN CERTIFICATE----- -MIIE7DCCA9SgAwIBAgIEALCFQzANBgkqhkiG9w0BAQUFADCBkDELMAkGA1UEBhMC -VVMxEzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxDjAMBgNV -BAoMBUJhc2hvMRswGQYDVQQLDBJSaWFrIFB5dGhvbiBDbGllbnQxCzAJBgNVBAMM -AkNBMSAwHgYJKoZIhvcNAQkBFhFjbGllbnRzQGJhc2hvLmNvbTAeFw0xNDA5MjQy -MjU1MjVaFw0yNDA5MjEyMjU1MjVaMIGEMQswCQYDVQQGEwJVUzETMBEGA1UECAwK -V2FzaGluZ3RvbjEOMAwGA1UECgwFQmFzaG8xGzAZBgNVBAsMElJpYWsgUHl0aG9u -IENsaWVudDERMA8GA1UEAwwIY2VydHVzZXIxIDAeBgkqhkiG9w0BCQEWEWNsaWVu -dHNAYmFzaG8uY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAx+BE -OutgioZwZg2Q9imphRchPNB7YiR9pbbYlbHvjgMkUYlaNIp65CNINM7p94zVQff+ -5X4VcT9u0wcQzWCSOSTUibR0NEsNKEfB/3LQ5uSoxZXrYLjxr9DkOoxfXdfkIIUR -zLD8BZXf0QsfskqdIUAoLcanN+488fnB7nu97HTkmk/V2/6R55+VHxmhx9M+GEvS -WFsmgPJ+HZROxrNKrrLqULDrPcV28Ri6c8+H7PS93EpZHce8eYjG6CyJCYwbS5OP -I/UtQPhwZgw9xeeZy1j2RrRgv7cC9BsEMMqqMGqosK3srUDS+3iyUSrTQEpguyRA -byFJWPpWt+len7lRvQIDAQABo4IBVjCCAVIwCQYDVR0TBAIwADARBglghkgBhvhC -AQEEBAMCBsAwCwYDVR0PBAQDAgXgMDUGCWCGSAGG+EIBDQQoFiZSaWFrIFB5dGhv -biBDbGllbnQgVGVzdGluZyBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUfK25o0ybWT4v -80sHZJItHCiZrKIwga8GA1UdIwSBpzCBpKGBlqSBkzCBkDELMAkGA1UEBhMCVVMx -EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxDjAMBgNVBAoM -BUJhc2hvMRswGQYDVQQLDBJSaWFrIFB5dGhvbiBDbGllbnQxCzAJBgNVBAMMAkNB -MSAwHgYJKoZIhvcNAQkBFhFjbGllbnRzQGJhc2hvLmNvbYIJANvpVBX1XcD/MB0G -A1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjANBgkqhkiG9w0BAQUFAAOCAQEA -rfdRasUb65OBo7LePqYVnU/n+TcZ9w385X4CEZK+2uTDeO2QqadX8AhypZDMXieb -ja2fOJUm03nIAwt+QN2oCxOYL2xSAam56/4MGS42grD8pUaIZPqN03O0vpzxdKig -KCqBnc1iiuASX8PEC9AV9gIOQdpQ8shwkSRx6YnnrEdzBZd8P04kIgUGKR4Is0mX -OxFPVrqDwQ+NIO2Amg1tU+5jvzok42Kc62+4rwENiWNHs/z3MPY/lu0uUrx1ySeC -cLbm0vQMqvw5x1SXRJg/X+UnLTPZdJjglqpxs1oneDvtcJM6vd/5NXhocDbWFmGD -Zor5lsPgyqMgPlAbmv18TA== ------END CERTIFICATE----- diff --git a/riak/tests/resources/client.key b/riak/tests/resources/client.key deleted file mode 100644 index f397c320..00000000 --- a/riak/tests/resources/client.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAx+BEOutgioZwZg2Q9imphRchPNB7YiR9pbbYlbHvjgMkUYla -NIp65CNINM7p94zVQff+5X4VcT9u0wcQzWCSOSTUibR0NEsNKEfB/3LQ5uSoxZXr -YLjxr9DkOoxfXdfkIIURzLD8BZXf0QsfskqdIUAoLcanN+488fnB7nu97HTkmk/V -2/6R55+VHxmhx9M+GEvSWFsmgPJ+HZROxrNKrrLqULDrPcV28Ri6c8+H7PS93EpZ -Hce8eYjG6CyJCYwbS5OPI/UtQPhwZgw9xeeZy1j2RrRgv7cC9BsEMMqqMGqosK3s -rUDS+3iyUSrTQEpguyRAbyFJWPpWt+len7lRvQIDAQABAoIBAARWO2TD4q5eyGWO -ecy9jq4SmGgHZgX1ILzNwNlOqRH9w3X5cvmc35m/ojtzGeSDK8VoqiE0oSZ559+w -UY8DP2j6AZqTTcosrrZnCDCQvzOjVn4jCJ5qpOPZtnoGvbL9kjeaa45zcKR2YgrZ -IPDvaYVlLPoBS3ONOclOUATqrm5I+SWzsl45pLrbKUxDpMYcoezY6ok1q1o79i+7 -QBkBliO2IXquMFF/JJwHua4r1R9KqdPKFTynbCpaZ477flCHEWcfWPgFpOpfT6Go -7rqCiR5ug8CyBjNsHnYx1pVZs8I0c2F7WVUQilEh/PqmQcGoy6L5OOCOeQCVMw1i -QfiGIkECgYEA+4W2tGikhAdNRixlvPQeELmGTQzl5rBT7HwIvyjcv8waYBNg5Roy -MKH03R1C5hGFVw+p3JpWQb4uMsX6SHJUZYzLZiqjjknZQ8/fYGJZLhCCu1w3H7wk -Fr1kZLy8hKcbmr/c3Xd4VUhp9mIHehRRYqfccBgeUkBfPFjRINPoY7kCgYEAy28u -Lm06jq+7xvNA2VllPPI+QCQsxtp24w8aiLqzDP3pVpr8q9JMrzv/MOPHctj0D5WN -8FGjBQipzpK0W/OA0FrpRM/NT7/+DF3nqgBQMqqzRX/z6UKGs6DgP9MPBvLEgeQV -UtOCwF5jr+/+6NlrnftcuVYMZ/a5adbB8a7+KCUCgYEA8XaYw/GBns5zvN56fT/O -bTfuWqH7Q9AbgXhB3WKZKfgDiiCQcOEJNe8FaBDjXIONgtsiswnnrQ2qxEuTz2ES -7LqRue9NPRhgX918EMfZ3YM0PjJ1KR4xdzMy4hLe2Gqk8ZcnreU3vIfcUhAiJWzE -BPYpheNhmIz74K9TdTR6cOECgYBchW4tc0QEjOwL6an+r+eZNlsVdN5geg9D6SSa -Nr8kE37CAq8TvgteTx0asR2OoBkv1Ua+m4JW0b/Y0WPxxec5237n6tJniNwT58lq -ycWvpW7vFuhpl/YHUA1tOaJF+Ldik8cW6lc3Aja4V6BJakjFiwJ60CXISq+88Q03 -y+yPeQKBgCFx40pPW6o9thCM96P/K1AtVXAjTUxMZmIcZbch01r0ahwdu/ITBwon -fCrTAYaqDEAs4y85XVWmdWn9bjUwSk3M6wewY86rtOp3f+PmyCxdyyNR4xq/C7nn -W3GZ3hNu55OI0yMWbd3tLjtyUY0x9Mg/Oyz+k7VRQkhNKagyEoDp ------END RSA PRIVATE KEY----- diff --git a/riak/tests/resources/openssl.cnf b/riak/tests/resources/openssl.cnf deleted file mode 100644 index e23b3a7b..00000000 --- a/riak/tests/resources/openssl.cnf +++ /dev/null @@ -1,355 +0,0 @@ -# -# OpenSSL example configuration file. -# This is mostly being used for generation of certificate requests. -# - -# This definition stops the following lines choking if HOME isn't -# defined. -HOME = . -RANDFILE = $ENV::HOME/.rnd - -# Extra OBJECT IDENTIFIER info: -#oid_file = $ENV::HOME/.oid -oid_section = new_oids - -# To use this configuration file with the "-extfile" option of the -# "openssl x509" utility, name here the section containing the -# X.509v3 extensions to use: -# extensions = -# (Alternatively, use a configuration file that has only -# X.509v3 extensions in its main [= default] section.) - -[ new_oids ] - -# We can add new OIDs in here for use by 'ca', 'req' and 'ts'. -# Add a simple OID like this: -# testoid1=1.2.3.4 -# Or use config file substitution like this: -# testoid2=${testoid1}.5.6 - -# Policies used by the TSA examples. -tsa_policy1 = 1.2.3.4.1 -tsa_policy2 = 1.2.3.4.5.6 -tsa_policy3 = 1.2.3.4.5.7 - -#################################################################### -[ ca ] -default_ca = CA_default # The default ca section - -#################################################################### -[ CA_default ] - -dir = . # Where everything is kept -certs = $dir/certs # Where the issued certs are kept -crl_dir = $dir/crl # Where the issued crl are kept -database = $dir/index.txt # database index file. -#unique_subject = no # Set to 'no' to allow creation of - # several ctificates with same subject. -new_certs_dir = $dir/newcerts # default place for new certs. - -certificate = $dir/cacert.pem # The CA certificate -serial = $dir/serial # The current serial number -crlnumber = $dir/crlnumber # the current crl number - # must be commented out to leave a V1 CRL -crl = $dir/crl.pem # The current CRL -private_key = $dir/ca/ca.key # The private key -RANDFILE = $dir/private/.rand # private random number file - -x509_extensions = usr_cert # The extentions to add to the cert - -# Comment out the following two lines for the "traditional" -# (and highly broken) format. -name_opt = ca_default # Subject Name options -cert_opt = ca_default # Certificate field options - -# Extension copying option: use with caution. -# copy_extensions = copy - -# Extensions to add to a CRL. Note: Netscape communicator chokes on V2 CRLs -# so this is commented out by default to leave a V1 CRL. -# crlnumber must also be commented out to leave a V1 CRL. -# crl_extensions = crl_ext - -default_days = 365 # how long to certify for -default_crl_days= 30 # how long before next CRL -default_md = default # use public key default MD -preserve = no # keep passed DN ordering - -# A few difference way of specifying how similar the request should look -# For type CA, the listed attributes must be the same, and the optional -# and supplied fields are just that :-) -policy = policy_match - -# For the CA policy -[ policy_match ] -countryName = match -stateOrProvinceName = match -organizationName = match -organizationalUnitName = optional -commonName = supplied -emailAddress = optional - -# For the 'anything' policy -# At this point in time, you must list all acceptable 'object' -# types. -[ policy_anything ] -countryName = optional -stateOrProvinceName = optional -localityName = optional -organizationName = optional -organizationalUnitName = optional -commonName = supplied -emailAddress = optional - -#################################################################### -[ req ] -default_bits = 2048 -default_keyfile = privkey.pem -distinguished_name = req_distinguished_name -attributes = req_attributes -x509_extensions = v3_ca # The extentions to add to the self signed cert - -# Passwords for private keys if not present they will be prompted for -# input_password = secret -# output_password = secret - -# This sets a mask for permitted string types. There are several options. -# default: PrintableString, T61String, BMPString. -# pkix : PrintableString, BMPString (PKIX recommendation before 2004) -# utf8only: only UTF8Strings (PKIX recommendation after 2004). -# nombstr : PrintableString, T61String (no BMPStrings or UTF8Strings). -# MASK:XXXX a literal mask value. -# WARNING: ancient versions of Netscape crash on BMPStrings or UTF8Strings. -string_mask = utf8only - -# req_extensions = v3_req # The extensions to add to a certificate request - -[ req_distinguished_name ] -countryName = Country Name (2 letter code) -countryName_default = US -countryName_min = 2 -countryName_max = 2 - -stateOrProvinceName = State or Province Name (full name) -stateOrProvinceName_default = Washington - -localityName = Locality Name (eg, city) -localityName_default = Seattle - -0.organizationName = Organization Name (eg, company) -0.organizationName_default = Basho - -# we can do this but it is not needed normally :-) -#1.organizationName = Second Organization Name (eg, company) -#1.organizationName_default = Clients - -organizationalUnitName = Organizational Unit Name (eg, section) -organizationalUnitName_default = Riak Python Client - -commonName = Common Name (e.g. server FQDN or YOUR name) -commonName_max = 64 - -emailAddress = Email Address -emailAddress_max = 64 -emailAddress_default = clients@basho.com - -# SET-ex3 = SET extension number 3 - -[ req_attributes ] -challengePassword = A challenge password -challengePassword_min = 4 -challengePassword_max = 20 - -unstructuredName = Basho Technologies - -[ usr_cert ] - -# These extensions are added when 'ca' signs a request. - -# This goes against PKIX guidelines but some CAs do it and some software -# requires this to avoid interpreting an end user certificate as a CA. - -basicConstraints=CA:FALSE - -# Here are some examples of the usage of nsCertType. If it is omitted -# the certificate can be used for anything *except* object signing. - -nsCertType = server, client - -# This is OK for an SSL server. -# nsCertType = server - -# For an object signing certificate this would be used. -# nsCertType = objsign - -# For normal client use this is typical -# nsCertType = client, email - -# and for everything including object signing: -# nsCertType = client, email, objsign - -# This is typical in keyUsage for a client certificate. -keyUsage = nonRepudiation, digitalSignature, keyEncipherment - -# This will be displayed in Netscape's comment listbox. -nsComment = "Riak Python Client Testing Certificate" - -# PKIX recommendations harmless if included in all certificates. -subjectKeyIdentifier=hash -authorityKeyIdentifier=keyid,issuer - -# This stuff is for subjectAltName and issuerAltname. -# Import the email address. -# subjectAltName=email:copy -# An alternative to produce certificates that aren't -# deprecated according to PKIX. -# subjectAltName=email:move - -# Copy subject details -# issuerAltName=issuer:copy - -#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem -#nsBaseUrl -#nsRevocationUrl -#nsRenewalUrl -#nsCaPolicyUrl -#nsSslServerName - -# This is required for TSA certificates. -# extendedKeyUsage = critical,timeStamping -extendedKeyUsage = serverAuth,clientAuth - -[ v3_req ] - -# Extensions to add to a certificate request - -basicConstraints = CA:FALSE -keyUsage = nonRepudiation, digitalSignature, keyEncipherment - -[ v3_ca ] - - -# Extensions for a typical CA - - -# PKIX recommendation. - -subjectKeyIdentifier=hash - -authorityKeyIdentifier=keyid:always,issuer - -# This is what PKIX recommends but some broken software chokes on critical -# extensions. -#basicConstraints = critical,CA:true -# So we do this instead. -basicConstraints = CA:true - -# Key usage: this is typical for a CA certificate. However since it will -# prevent it being used as an test self-signed certificate it is best -# left out by default. -# keyUsage = cRLSign, keyCertSign - -# Some might want this also -# nsCertType = sslCA, emailCA - -# Include email address in subject alt name: another PKIX recommendation -# subjectAltName=email:copy -# Copy issuer details -# issuerAltName=issuer:copy - -# DER hex encoding of an extension: beware experts only! -# obj=DER:02:03 -# Where 'obj' is a standard or added object -# You can even override a supported extension: -# basicConstraints= critical, DER:30:03:01:01:FF - -[ crl_ext ] - -# CRL extensions. -# Only issuerAltName and authorityKeyIdentifier make any sense in a CRL. - -# issuerAltName=issuer:copy -authorityKeyIdentifier=keyid:always - -[ proxy_cert_ext ] -# These extensions should be added when creating a proxy certificate - -# This goes against PKIX guidelines but some CAs do it and some software -# requires this to avoid interpreting an end user certificate as a CA. - -basicConstraints=CA:FALSE - -# Here are some examples of the usage of nsCertType. If it is omitted -# the certificate can be used for anything *except* object signing. - -# This is OK for an SSL server. -# nsCertType = server - -# For an object signing certificate this would be used. -# nsCertType = objsign - -# For normal client use this is typical -# nsCertType = client, email - -# and for everything including object signing: -# nsCertType = client, email, objsign - -# This is typical in keyUsage for a client certificate. -# keyUsage = nonRepudiation, digitalSignature, keyEncipherment - -# This will be displayed in Netscape's comment listbox. -nsComment = "OpenSSL Generated Certificate" - -# PKIX recommendations harmless if included in all certificates. -subjectKeyIdentifier=hash -authorityKeyIdentifier=keyid,issuer - -# This stuff is for subjectAltName and issuerAltname. -# Import the email address. -# subjectAltName=email:copy -# An alternative to produce certificates that aren't -# deprecated according to PKIX. -# subjectAltName=email:move - -# Copy subject details -# issuerAltName=issuer:copy - -#nsCaRevocationUrl = http://www.domain.dom/ca-crl.pem -#nsBaseUrl -#nsRevocationUrl -#nsRenewalUrl -#nsCaPolicyUrl -#nsSslServerName - -# This really needs to be in place for it to be a proxy certificate. -proxyCertInfo=critical,language:id-ppl-anyLanguage,pathlen:3,policy:foo - -#################################################################### -[ tsa ] - -default_tsa = tsa_config1 # the default TSA section - -[ tsa_config1 ] - -# These are used by the TSA reply generation only. -dir = ./demoCA # TSA root directory -serial = $dir/tsaserial # The current serial number (mandatory) -crypto_device = builtin # OpenSSL engine to use for signing -signer_cert = $dir/tsacert.pem # The TSA signing certificate - # (optional) -certs = $dir/cacert.pem # Certificate chain to include in reply - # (optional) -signer_key = $dir/private/tsakey.pem # The TSA private key (optional) - -default_policy = tsa_policy1 # Policy if request did not specify it - # (optional) -other_policies = tsa_policy2, tsa_policy3 # acceptable policies (optional) -digests = md5, sha1 # Acceptable message digests (mandatory) -accuracy = secs:1, millisecs:500, microsecs:100 # (optional) -clock_precision_digits = 0 # number of digits after dot. (optional) -ordering = yes # Is ordering defined for timestamps? - # (optional, default: no) -tsa_name = yes # Must the TSA name be included in the reply? - # (optional, default: no) -ess_cert_id_chain = no # Must the ESS cert id chain be included? - # (optional, default: no) diff --git a/riak/tests/resources/server.crl b/riak/tests/resources/server.crl deleted file mode 100644 index 317b7a5c..00000000 --- a/riak/tests/resources/server.crl +++ /dev/null @@ -1,13 +0,0 @@ ------BEGIN X509 CRL----- -MIICBjCB7wIBATANBgkqhkiG9w0BAQUFADCBkDELMAkGA1UEBhMCVVMxEzARBgNV -BAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxDjAMBgNVBAoMBUJhc2hv -MRswGQYDVQQLDBJSaWFrIFB5dGhvbiBDbGllbnQxCzAJBgNVBAMMAkNBMSAwHgYJ -KoZIhvcNAQkBFhFjbGllbnRzQGJhc2hvLmNvbRcNMTQwOTI0MjI1NTI1WhcNMTQx -MDI0MjI1NTI1WjAXMBUCBACwhUIXDTE0MDkyNDIyNTUyNVqgETAPMA0GA1UdFAQG -AgQA8ocDMA0GCSqGSIb3DQEBBQUAA4IBAQC4ReWlkd6Ld7Unk3WPyUsvh8gUn5PJ -J5usc9KVO3iiLZEK57LGtwVFoUPVrt9F4Mg9+0qp1NTzGmgLzkkTyUGzEbTJce/L -3m5zTJW6x8wOFT2b/dQRoO6WUsSaJ4ZkUX04RZc7NQ8SWozxW6mZSrQrEqXNtjUo -1ifsnNyN5OxPZ/PV8DQN5Rtl87j5ETTUJ62tDucnEwoFHqN4AY0riLqLWmyHHokx -gQtQscv7LmCZHPF8hgwYxzatxAEBR1MhZu1jku/j7Im7EDwxGyfvMbPZOhuVkuGI -y7SFCmvcwIR4APHtB3io93UngiQ64PxBOFQxNh9P2tf2fi0dI8oJvm8F ------END X509 CRL----- diff --git a/riak/tests/resources/server.crt b/riak/tests/resources/server.crt deleted file mode 100644 index 052697f2..00000000 --- a/riak/tests/resources/server.crt +++ /dev/null @@ -1,95 +0,0 @@ -Certificate: - Data: - Version: 3 (0x2) - Serial Number: 11568450 (0xb08542) - Signature Algorithm: sha1WithRSAEncryption - Issuer: C=US, ST=Washington, L=Seattle, O=Basho, OU=Riak Python Client, CN=CA/emailAddress=clients@basho.com - Validity - Not Before: Sep 24 22:55:25 2014 GMT - Not After : Sep 21 22:55:25 2024 GMT - Subject: C=US, ST=Washington, O=Basho, OU=Riak Python Client, CN=riak@127.0.0.1/emailAddress=clients@basho.com - Subject Public Key Info: - Public Key Algorithm: rsaEncryption - Public-Key: (2048 bit) - Modulus: - 00:eb:38:1c:40:d6:8d:e9:65:4d:d1:8f:1a:c3:1f: - 03:7f:a0:1c:cc:c3:e2:53:fb:b0:27:60:2d:2a:0e: - ad:5c:67:7c:c3:62:f1:79:d5:04:c0:83:b6:5a:41: - f2:a7:8d:f8:4a:50:17:35:c7:6f:75:af:72:e7:44: - 65:99:e3:cb:c7:88:86:66:64:20:ce:6f:f8:14:5d: - 96:dc:19:7a:5c:4e:24:f7:50:df:d7:71:f5:2e:ce: - 73:d4:a2:5e:98:52:0e:66:e3:88:22:d9:8d:88:8e: - ac:96:2d:b1:0c:05:e8:59:30:4c:0e:fa:e7:8d:29: - 7f:b4:93:93:92:9c:8b:07:b9:b1:da:02:c4:d2:41: - 57:df:d1:ab:4c:15:e2:9f:da:65:5d:48:88:fa:51: - 0b:79:b9:3c:99:0a:16:de:66:58:13:cb:98:48:bd: - 2b:bd:d2:56:35:bf:16:c5:42:5d:39:1f:3a:26:8e: - 0c:7f:a4:a0:cb:4b:90:d9:49:a0:1d:52:c9:64:d6: - 10:01:25:ae:15:a5:aa:92:dd:cf:91:92:16:0d:9d: - 95:ec:1f:e6:3c:8c:00:7f:30:c4:e1:f5:87:c9:5e: - 08:a2:2b:8c:63:eb:d0:46:9d:83:66:42:d8:60:ed: - 77:c2:6e:93:ad:89:bc:3d:5b:a5:c9:5f:dd:8f:69: - c5:a9 - Exponent: 65537 (0x10001) - X509v3 extensions: - X509v3 Basic Constraints: - CA:FALSE - Netscape Cert Type: - SSL Client, SSL Server - X509v3 Key Usage: - Digital Signature, Non Repudiation, Key Encipherment - Netscape Comment: - Riak Python Client Testing Certificate - X509v3 Subject Key Identifier: - 6C:09:ED:0E:F7:5B:0C:A0:8E:7C:31:2F:78:F6:78:45:8D:69:EF:36 - X509v3 Authority Key Identifier: - DirName:/C=US/ST=Washington/L=Seattle/O=Basho/OU=Riak Python Client/CN=CA/emailAddress=clients@basho.com - serial:DB:E9:54:15:F5:5D:C0:FF - - X509v3 Extended Key Usage: - TLS Web Server Authentication, TLS Web Client Authentication - Signature Algorithm: sha1WithRSAEncryption - 85:7a:8e:95:68:8e:e5:4f:be:89:b4:9b:ab:bc:43:b0:4f:7d: - ad:14:e4:a4:3b:c2:a8:b3:42:78:f7:91:78:34:96:1a:93:57: - d5:4f:23:7d:b7:62:cf:0c:cf:59:09:4b:99:93:41:b6:ed:a7: - d9:51:6f:4f:83:c3:93:2f:9d:59:96:c0:63:47:1d:9b:e9:5d: - 2f:aa:4e:7c:bf:9a:5e:12:66:4a:83:df:e2:e3:14:49:ad:96: - 61:9c:55:fa:7e:ed:3a:7d:a2:bf:fd:8d:e6:5f:fb:d8:c0:a2: - c3:32:a7:c7:e6:65:77:d7:94:cf:54:67:e0:b9:86:bc:28:1c: - 19:71:1a:e7:23:42:81:52:50:29:07:10:6f:d3:c0:42:92:ba: - 36:9e:f5:8e:0f:ab:3d:a4:e7:79:16:e5:ec:fd:fd:dc:fd:1f: - 35:87:67:d8:dd:15:68:74:01:b9:cb:57:a0:9b:7d:bd:b6:12: - 8d:7f:b8:5e:c5:f3:fb:4d:74:72:78:59:1d:f2:b6:80:ae:fe: - ee:4d:c6:a6:89:89:ca:ba:48:67:4d:35:e5:9f:bc:cc:ea:ef: - 26:3e:02:37:5f:b3:c6:ba:2e:ac:0a:fd:60:f5:85:2e:fd:81: - 68:78:b9:47:81:c7:f8:0e:6f:0a:08:a6:b8:41:97:ac:db:3b: - 75:aa:24:c8 ------BEGIN CERTIFICATE----- -MIIE8jCCA9qgAwIBAgIEALCFQjANBgkqhkiG9w0BAQUFADCBkDELMAkGA1UEBhMC -VVMxEzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxDjAMBgNV -BAoMBUJhc2hvMRswGQYDVQQLDBJSaWFrIFB5dGhvbiBDbGllbnQxCzAJBgNVBAMM -AkNBMSAwHgYJKoZIhvcNAQkBFhFjbGllbnRzQGJhc2hvLmNvbTAeFw0xNDA5MjQy -MjU1MjVaFw0yNDA5MjEyMjU1MjVaMIGKMQswCQYDVQQGEwJVUzETMBEGA1UECAwK -V2FzaGluZ3RvbjEOMAwGA1UECgwFQmFzaG8xGzAZBgNVBAsMElJpYWsgUHl0aG9u -IENsaWVudDEXMBUGA1UEAwwOcmlha0AxMjcuMC4wLjExIDAeBgkqhkiG9w0BCQEW -EWNsaWVudHNAYmFzaG8uY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC -AQEA6zgcQNaN6WVN0Y8awx8Df6AczMPiU/uwJ2AtKg6tXGd8w2LxedUEwIO2WkHy -p434SlAXNcdvda9y50RlmePLx4iGZmQgzm/4FF2W3Bl6XE4k91Df13H1Ls5z1KJe -mFIOZuOIItmNiI6sli2xDAXoWTBMDvrnjSl/tJOTkpyLB7mx2gLE0kFX39GrTBXi -n9plXUiI+lELebk8mQoW3mZYE8uYSL0rvdJWNb8WxUJdOR86Jo4Mf6Sgy0uQ2Umg -HVLJZNYQASWuFaWqkt3PkZIWDZ2V7B/mPIwAfzDE4fWHyV4IoiuMY+vQRp2DZkLY -YO13wm6TrYm8PVulyV/dj2nFqQIDAQABo4IBVjCCAVIwCQYDVR0TBAIwADARBglg -hkgBhvhCAQEEBAMCBsAwCwYDVR0PBAQDAgXgMDUGCWCGSAGG+EIBDQQoFiZSaWFr -IFB5dGhvbiBDbGllbnQgVGVzdGluZyBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUbAnt -DvdbDKCOfDEvePZ4RY1p7zYwga8GA1UdIwSBpzCBpKGBlqSBkzCBkDELMAkGA1UE -BhMCVVMxEzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxDjAM -BgNVBAoMBUJhc2hvMRswGQYDVQQLDBJSaWFrIFB5dGhvbiBDbGllbnQxCzAJBgNV -BAMMAkNBMSAwHgYJKoZIhvcNAQkBFhFjbGllbnRzQGJhc2hvLmNvbYIJANvpVBX1 -XcD/MB0GA1UdJQQWMBQGCCsGAQUFBwMBBggrBgEFBQcDAjANBgkqhkiG9w0BAQUF -AAOCAQEAhXqOlWiO5U++ibSbq7xDsE99rRTkpDvCqLNCePeReDSWGpNX1U8jfbdi -zwzPWQlLmZNBtu2n2VFvT4PDky+dWZbAY0cdm+ldL6pOfL+aXhJmSoPf4uMUSa2W -YZxV+n7tOn2iv/2N5l/72MCiwzKnx+Zld9eUz1Rn4LmGvCgcGXEa5yNCgVJQKQcQ -b9PAQpK6Np71jg+rPaTneRbl7P393P0fNYdn2N0VaHQBuctXoJt9vbYSjX+4XsXz -+010cnhZHfK2gK7+7k3GpomJyrpIZ0015Z+8zOrvJj4CN1+zxrourAr9YPWFLv2B -aHi5R4HH+A5vCgimuEGXrNs7daokyA== ------END CERTIFICATE----- diff --git a/riak/tests/resources/server.key b/riak/tests/resources/server.key deleted file mode 100644 index 23531a6d..00000000 --- a/riak/tests/resources/server.key +++ /dev/null @@ -1,27 +0,0 @@ ------BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEA6zgcQNaN6WVN0Y8awx8Df6AczMPiU/uwJ2AtKg6tXGd8w2Lx -edUEwIO2WkHyp434SlAXNcdvda9y50RlmePLx4iGZmQgzm/4FF2W3Bl6XE4k91Df -13H1Ls5z1KJemFIOZuOIItmNiI6sli2xDAXoWTBMDvrnjSl/tJOTkpyLB7mx2gLE -0kFX39GrTBXin9plXUiI+lELebk8mQoW3mZYE8uYSL0rvdJWNb8WxUJdOR86Jo4M -f6Sgy0uQ2UmgHVLJZNYQASWuFaWqkt3PkZIWDZ2V7B/mPIwAfzDE4fWHyV4IoiuM -Y+vQRp2DZkLYYO13wm6TrYm8PVulyV/dj2nFqQIDAQABAoIBAB09jW2V/nA8Mq9R -Xd9RtKqh6dKeqz0Ldbt/Xj3zMyPgjYbwftpJW9zewsV3m0WvBWWfjTMXRsjXda5A -N20o/0UfuK1z1wwyqSFG/SEBXc+puzivahQrS3J1IbsWMDI5SScLM/o5FzoZmmTU -60R2W4ry0RGvqSdIOGLnUZoRMctU0j85+M6gZy2a4CB8eVdH9Awp6IDnn2s6VNOI -ZVDUJMa0I5INQD3JCN6u12nYSf82jGT3OKmB+VbMxl4moWJ1IqwSDBcy+YyJY37J -nEtrtShw1UOvwBU8BQKMEF2XQ3uZxGvVPt5WDXKJh7OQur88YzqphtTj3BoGyu/H -x/qaZIECgYEA9tWB0wffwCdZizqvZCfUTx3SPqYwc96Siase773Dr+Gzcg/MwBV6 -y4veIXRb5VWZOoaytHGKhpoYzmiLpuPcuqRzOpSv2gMhMmtl6vMIO+l30snI9lKF -BDcFpvxEGG1ztoxTSA3lfNRbUCToQr7669e7fHlU+zBOyEZh3TAoHxECgYEA8/Qw -QAujrjgLGORqbWH0sysyJSlbb8RSmDlzmDXc1SqOOhNU9rOVJAvVA7+0qx+ODhgX -f5/qD9x6tkg9B5z93LBWgs5O51mI1goVueo8SG4Nw78LLzswTGxVYghFBlFJX5QE -XLut6Neg7o/9uItRpe7i7oesmaaNks1Y0gL87RkCgYEAwtjr8MBoenEVmHis920R -hZrO2rGp0e3C03YHp8yu6upKEyIxyPerxX4VzWbjG/gkSzUFYLe4WGTQbC/O/eEl -3xft9jJlVr3duMVa4MsUlubHtdegEgI2Cou8pILC5l2QWGwRWfHOVGn14yAfUIEd -5oqX77x/vwH1TtdwbYCUS2ECgYAvz2+VXbKadklWe42QGl6GrrSJK+3LncJCKyBI -eGXrMQfmwdso7lEQW3FH5s5Vqz1/7aDNVl3c9ezmxImRcGcgVT7fK+ey141FxXw0 -j395AniYIFzkyMjScXjaWZyNfGjQ3oVsVyviMkBMANRM0qER0BuRe+2Lv2SHnM8H -eaZwIQKBgCb5Mc1D7pyyGW5SusfVjxUWMKBdsUaOsgr9jbBvjH3URHHoBpNd1Her -muJZdrKjEs4k1TUMs3GX2bGkHW/fSVPAhsk0DjaMfh1Q/PrudRur15eyVkM5sup9 -Rd8MzUiQ1ybJgrhV0T6ssXRY7cPwbxtX/wMQnmwAvuAHO3X8o0Zq ------END RSA PRIVATE KEY----- From 8eec2199bf8be4f264e431313052e76cb6c3215e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 15:09:30 -0700 Subject: [PATCH 196/324] Ensure msg code 0 is always handled. Fix cover context arg in tsqueryreq --- riak/codecs/__init__.py | 14 +++++++++----- riak/codecs/pbuf.py | 16 ++-------------- riak/codecs/ttb.py | 5 +---- riak/codecs/util.py | 10 ++++++++++ riak/tests/test_timeseries_ttb.py | 27 +++++++++++++++++++++++++-- riak/transports/tcp/transport.py | 1 + 6 files changed, 48 insertions(+), 25 deletions(-) create mode 100644 riak/codecs/util.py diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index e356b5f9..b155bdff 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -1,6 +1,10 @@ import collections +import riak.pb.messages + from riak import RiakError +from riak.codecs.util import parse_pbuf_msg +from riak.util import bytes_to_str Msg = collections.namedtuple('Msg', ['msg_code', 'data', 'resp_code'], @@ -16,10 +20,10 @@ def maybe_incorrect_code(self, resp_code, expect=None): raise RiakError("unexpected message code: %d, expected %d" % (resp_code, expect)) - def maybe_riak_error(self, err_code, msg_code, data=None): - if msg_code == err_code: + def maybe_riak_error(self, msg_code, data=None): + if msg_code == riak.pb.messages.MSG_CODE_ERROR_RESP: if data is None: raise RiakError('no error provided!') - return data - else: - return None + else: + err = parse_pbuf_msg(msg_code, data) + raise RiakError(bytes_to_str(err.errmsg)) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 1d96c831..b78b585e 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -9,6 +9,7 @@ from riak import RiakError from riak.codecs import Codec, Msg +from riak.codecs.util import parse_pbuf_msg from riak.content import RiakContent from riak.pb.riak_ts_pb2 import TsColumnType from riak.riak_object import VClock @@ -90,20 +91,7 @@ def __init__(self, self._bucket_types = bucket_types def parse_msg(self, msg_code, data): - pbclass = riak.pb.messages.MESSAGE_CLASSES.get(msg_code, None) - if pbclass is None: - return None - pbo = pbclass() - pbo.ParseFromString(data) - return pbo - - def maybe_riak_error(self, msg_code, data=None): - err_code = riak.pb.messages.MSG_CODE_ERROR_RESP - err_data = super(PbufCodec, self).maybe_riak_error( - err_code, msg_code, data) - if err_data: - err = self.parse_msg(msg_code, err_data) - raise RiakError(bytes_to_str(err.errmsg)) + return parse_pbuf_msg(msg_code, data) def encode_auth(self, username, password): req = riak.pb.riak_pb2.RpbAuthReq() diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 1c0b3bfe..00def900 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -50,9 +50,6 @@ def maybe_err_ttb(self, err_ttb): # errcode = err_ttb[2] raise RiakError(bytes_to_str(errmsg)) - def maybe_riak_error(self, msg_code, data=None): - pass - def encode_to_ts_cell(self, cell): if cell is None: return [] @@ -133,7 +130,7 @@ def encode_timeseries_query(self, table, query, interpolations=None): if '{table}' in q: q = q.format(table=table.name) tsi = tsinterpolation_a, q, [] - req = tsqueryreq_a, tsi, False, [] + req = tsqueryreq_a, tsi, False, udef_a mc = MSG_CODE_TS_TTB_MSG rc = MSG_CODE_TS_TTB_MSG return Msg(mc, encode(req), rc) diff --git a/riak/codecs/util.py b/riak/codecs/util.py new file mode 100644 index 00000000..52aecb9f --- /dev/null +++ b/riak/codecs/util.py @@ -0,0 +1,10 @@ +import riak.pb.messages + + +def parse_pbuf_msg(msg_code, data): + pbclass = riak.pb.messages.MESSAGE_CLASSES.get(msg_code, None) + if pbclass is None: + return None + pbo = pbclass() + pbo.ParseFromString(data) + return pbo diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 45ba6faf..e1bf96a8 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -11,7 +11,7 @@ from riak.table import Table from riak.ts_object import TsObject from riak.codecs.ttb import TtbCodec -from riak.util import str_to_bytes, \ +from riak.util import str_to_bytes, bytes_to_str, \ unix_time_millis, is_timeseries_supported from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase @@ -141,7 +141,7 @@ def test_query_that_returns_table_description(self): row = ts_obj.rows[0] self.assertEqual(len(row), 5) - def test_store_and_fetch(self): + def test_store_and_fetch_and_query(self): now = datetime.datetime.utcfromtimestamp(144379690.987000) fiveMinsAgo = now - fiveMins tenMinsAgo = fiveMinsAgo - fiveMins @@ -187,6 +187,29 @@ def test_store_and_fetch(self): self.assertEqual(len(row), 5) self.assertEqual(row, exp) + fmt = """ + select * from {table} where + time > {t1} and time < {t2} and + geohash = 'hash1' and + user = 'user2' + """ + query = fmt.format( + table=table_name, + t1=unix_time_millis(tenMinsAgo), + t2=unix_time_millis(now)) + ts_obj = self.client.ts_query(table_name, query) + if ts_obj.columns is not None: + self.assertEqual(len(ts_obj.columns.names), 5) + self.assertEqual(len(ts_obj.columns.types), 5) + self.assertEqual(len(ts_obj.rows), 1) + row = ts_obj.rows[0] + self.assertEqual(bytes_to_str(row[0]), 'hash1') + self.assertEqual(bytes_to_str(row[1]), 'user2') + self.assertEqual(row[2], fiveMinsAgo) + self.assertEqual(row[2].microsecond, 987000) + self.assertEqual(bytes_to_str(row[3]), 'wind') + self.assertIsNone(row[4]) + def test_create_error_via_put(self): table = Table(self.client, table_name) ts_obj = table.new([]) diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 466ac8d8..8b7841b9 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -535,6 +535,7 @@ def _request(self, msg, codec=None): raise ValueError('expected a Codec argument') resp_code, data = self._send_recv(msg_code, data) + # NB: decodes errors with msg code 0 codec.maybe_riak_error(resp_code, data) codec.maybe_incorrect_code(resp_code, expect) if resp_code == MSG_CODE_TS_TTB_MSG or \ From 89f4dfbcd2729b263e42f73246efa04d3e06fc93 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 3 May 2016 16:11:55 -0700 Subject: [PATCH 197/324] Remove added text --- tox.ini | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 9fa2a230..079cb0b0 100644 --- a/tox.ini +++ b/tox.ini @@ -1,4 +1,4 @@ -#pyver Tox (http://tox.testrun.org/) is a tool for running tests +# Tox (http://tox.testrun.org/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. From 7c39f3fb58e3536c6963237cc593bd62ad3caf4f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 4 May 2016 10:07:21 -0700 Subject: [PATCH 198/324] Change logging of shutdown exception to DEBUG level --- riak/transports/tcp/connection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 30b53681..90967043 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -226,7 +226,7 @@ def close(self): # NB: sometimes these exceptions are raised if the initial # connection didn't succeed correctly, or if shutdown() is # called after the connection dies - logging.exception('Exception occurred while shutting ' - 'down socket.') + logging.debug('Exception occurred while shutting ' + 'down socket.', exc_info=True) self._socket.close() del self._socket From 1b9ee6ca5c76fb2db3f0e2d8f753702ff98c3d51 Mon Sep 17 00:00:00 2001 From: Brett Hazen Date: Thu, 5 May 2016 23:26:07 +0000 Subject: [PATCH 199/324] Handle creation of tables via SQL --- riak/codecs/ttb.py | 7 +++++-- riak/tests/test_timeseries_ttb.py | 20 ++++++++++++++++++++ 2 files changed, 25 insertions(+), 2 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 00def900..238bcc91 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -154,7 +154,10 @@ def decode_timeseries(self, resp_ttb, tsobj, self.maybe_err_ttb(resp_ttb) resp_a = resp_ttb[0] - if resp_a == tsputresp_a: + + if resp_ttb == tsqueryresp_a: + return tsobj + elif resp_a == tsputresp_a: return elif resp_a == tsgetresp_a or resp_a == tsqueryresp_a: resp_data = resp_ttb[1] @@ -175,7 +178,7 @@ def decode_timeseries(self, resp_ttb, tsobj, raise RiakError( "Expected 3-tuple in response, got: {}".format(resp_data)) else: - raise RiakError("Unknown TTB response type: {}".format(resp_a)) + raise RiakError("Unknown TTB response type: {}".format(resp_ttb)) def decode_timeseries_cols(self, cnames, ctypes): cnames = [bytes_to_str(cname) for cname in cnames] diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index e1bf96a8..0e4badaa 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -1,5 +1,7 @@ # -*- coding: utf-8 -*- import datetime +import random +import string import logging import six import unittest @@ -130,6 +132,24 @@ class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() + def test_query_that_creates_table_using_interpolation(self): + table = ''.join( + [random.choice(string.ascii_letters + string.digits) + for n in range(32)]) + query = """CREATE TABLE test-{table} ( + geohash varchar not null, + user varchar not null, + time timestamp not null, + weather varchar not null, + temperature double, + PRIMARY KEY((geohash, user, quantum(time, 15, m)), + geohash, user, time)) + """ + ts_obj = self.client.ts_query(table, query) + self.assertIsNotNone(ts_obj) + self.assertFalse(hasattr(ts_obj, 'ts_cols')) + self.assertIsNone(ts_obj.rows) + def test_query_that_returns_table_description(self): fmt = 'DESCRIBE {table}' query = fmt.format(table=table_name) From 6de4fd0e821514194ccf9148ad6b03e4ab6507b6 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 5 May 2016 15:30:49 -0700 Subject: [PATCH 200/324] More explicitly check for bare tsqueryresp atom --- riak/codecs/ttb.py | 9 ++++++--- riak/tests/test_timeseries_ttb.py | 6 +----- 2 files changed, 7 insertions(+), 8 deletions(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 238bcc91..15c0af9b 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -153,11 +153,14 @@ def decode_timeseries(self, resp_ttb, tsobj, self.maybe_err_ttb(resp_ttb) - resp_a = resp_ttb[0] - + # NB: some queries return a BARE 'tsqueryresp' atom + # catch that here: if resp_ttb == tsqueryresp_a: return tsobj - elif resp_a == tsputresp_a: + + # The response atom is the first element in the response tuple + resp_a = resp_ttb[0] + if resp_a == tsputresp_a: return elif resp_a == tsgetresp_a or resp_a == tsqueryresp_a: resp_data = resp_ttb[1] diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 0e4badaa..cd7b181c 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- import datetime -import random -import string import logging import six import unittest @@ -133,9 +131,7 @@ def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() def test_query_that_creates_table_using_interpolation(self): - table = ''.join( - [random.choice(string.ascii_letters + string.digits) - for n in range(32)]) + table = self.randname() query = """CREATE TABLE test-{table} ( geohash varchar not null, user varchar not null, From d57eeb543ce02ede3a5b848a8d02a33913769936 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 5 May 2016 15:32:10 -0700 Subject: [PATCH 201/324] Revert exception message --- riak/codecs/ttb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 15c0af9b..5cec7038 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -181,7 +181,7 @@ def decode_timeseries(self, resp_ttb, tsobj, raise RiakError( "Expected 3-tuple in response, got: {}".format(resp_data)) else: - raise RiakError("Unknown TTB response type: {}".format(resp_ttb)) + raise RiakError("Unknown TTB response type: {}".format(resp_a)) def decode_timeseries_cols(self, cnames, ctypes): cnames = [bytes_to_str(cname) for cname in cnames] From 2c95699e532b13ef516ebce091f7a41c814f4b04 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 5 May 2016 15:54:18 -0700 Subject: [PATCH 202/324] 2.5.2 RELNOTES --- RELNOTES.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/RELNOTES.md b/RELNOTES.md index a0fe9333..cfbe0ce6 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,12 +1,13 @@ # Riak Python Client Release Notes -## 2.5.1 Patch Release +## [2.5.2 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.2) +* Miscellaneous fixes for term-to-binary encoding of messages for Riak TS. * [Ensure `six` is not required during installation](https://github.com/basho/riak-python-client/pull/459) ## [2.5.0 Release - Deprecated](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) -* *NOTE*: due to the `basho-erlastic` dependency, this version will not install correctly. Please use `2.5.1`. +* *NOTE*: due to the `basho-erlastic` dependency, this version will not install correctly. Please use `2.5.2`. * [Socket Enhancements](https://github.com/basho/riak-python-client/pull/453) - Resolves [#399](https://github.com/basho/riak-python-client/issues/399) * [Add multi-put](https://github.com/basho/riak-python-client/pull/452) * [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) *Note:* This requires at least version `1.3.0` of Riak TS. From 4be87cc3a3ee1b2d6788a2bca17a769d6974ca76 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 5 May 2016 16:54:19 -0700 Subject: [PATCH 203/324] Add loud note about timestamp conversion --- RELNOTES.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/RELNOTES.md b/RELNOTES.md index cfbe0ce6..6a0b5df6 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -2,12 +2,14 @@ ## [2.5.2 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.2) +* *NOTE*: for Riak TS data, automatic conversion from epoch values *to* Python `datetime` objects has been removed. If you would like to have automatic conversion, use `RiakClient(transport_options={'ts_convert_timestamp': True})` * Miscellaneous fixes for term-to-binary encoding of messages for Riak TS. * [Ensure `six` is not required during installation](https://github.com/basho/riak-python-client/pull/459) ## [2.5.0 Release - Deprecated](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) * *NOTE*: due to the `basho-erlastic` dependency, this version will not install correctly. Please use `2.5.2`. +* *NOTE*: for Riak TS data, automatic conversion from epoch values *to* Python `datetime` objects has been removed. If you would like to have automatic conversion, use `RiakClient(transport_options={'ts_convert_timestamp': True})` * [Socket Enhancements](https://github.com/basho/riak-python-client/pull/453) - Resolves [#399](https://github.com/basho/riak-python-client/issues/399) * [Add multi-put](https://github.com/basho/riak-python-client/pull/452) * [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) *Note:* This requires at least version `1.3.0` of Riak TS. From fc224918e514d93bddb3776b115ded40149c2c81 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 5 May 2016 23:58:00 -0700 Subject: [PATCH 204/324] Multi-put for Riak TS data --- riak/client/multi.py | 43 +++++++++++++++++++------------ riak/transports/tcp/connection.py | 1 + 2 files changed, 28 insertions(+), 16 deletions(-) diff --git a/riak/client/multi.py b/riak/client/multi.py index 672c32b9..84b19a3a 100644 --- a/riak/client/multi.py +++ b/riak/client/multi.py @@ -3,6 +3,10 @@ from threading import Thread, Lock, Event from multiprocessing import cpu_count from six import PY2 + +from riak.riak_object import RiakObject +from riak.ts_object import TsObject + if PY2: from Queue import Queue else: @@ -20,12 +24,16 @@ POOL_SIZE = 6 #: A :class:`namedtuple` for tasks that are fed to workers in the -#: multi pool. -Task = namedtuple( - 'Task', - ['client', 'outq', - 'bucket_type', 'bucket', 'key', - 'object', 'options']) +#: multi get pool. +Task = namedtuple('Task', + ['client', 'outq', 'bucket_type', 'bucket', 'key', + 'object', 'options']) + + +#: A :class:`namedtuple` for tasks that are fed to workers in the +#: multi put pool. +PutTask = namedtuple('PutTask', + ['client', 'outq', 'object', 'options']) class MultiPool(object): @@ -55,7 +63,7 @@ def enq(self, task): stopping. :param task: the Task object - :type task: Task + :type task: Task or PutTask """ if not self._stop.is_set(): self._inq.put(task) @@ -164,8 +172,13 @@ def _worker_method(self): while not self._should_quit(): task = self._inq.get() try: - robj = task.object - rv = task.client.put(robj, **task.options) + obj = task.object + if isinstance(obj, RiakObject): + rv = task.client.put(obj, **task.options) + elif isinstance(obj, TsObject): + rv = task.client.ts_put(obj, **task.options) + else: + raise ValueError('unknown obj type: %s'.format(type(obj))) task.outq.put(rv) except KeyboardInterrupt: raise @@ -236,8 +249,9 @@ def multiput(client, objs, **options): :param client: the client to use :type client: :class:`RiakClient ` - :param objs: the Riak Objects to store in parallel - :type keys: list of `RiakObject ` + :param objs: the objects to store in parallel + :type objs: list of `RiakObject ` or + `TsObject ` :param options: request options to :meth:`RiakClient.put ` :type options: dict @@ -252,11 +266,8 @@ def multiput(client, objs, **options): pool = RIAK_MULTIPUT_POOL pool.start() - for robj in objs: - bucket_type = robj.bucket.bucket_type - bucket = robj.bucket.name - key = robj.key - task = Task(client, outq, bucket_type, bucket, key, robj, options) + for obj in objs: + task = PutTask(client, outq, obj, options) pool.enq(task) results = [] diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 90967043..a80d9193 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -202,6 +202,7 @@ def _connect(self): self._timeout) else: self._socket = socket.create_connection(self._address) + # logging.debug('[TcpConnection] new connection created') if self._socket_tcp_options: ka_opts = self._socket_tcp_options for k, v in ka_opts.iteritems(): From 714ed2dfd31bdb5e0b9c8b1c6459ba7f312cd08f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sun, 8 May 2016 08:43:04 -0700 Subject: [PATCH 205/324] remove comment --- riak/transports/tcp/connection.py | 1 - 1 file changed, 1 deletion(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index a80d9193..90967043 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -202,7 +202,6 @@ def _connect(self): self._timeout) else: self._socket = socket.create_connection(self._address) - # logging.debug('[TcpConnection] new connection created') if self._socket_tcp_options: ka_opts = self._socket_tcp_options for k, v in ka_opts.iteritems(): From e195e6c712634c9f0cb3a84836dd77dc5555e517 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 9 May 2016 14:13:44 -0700 Subject: [PATCH 206/324] Workaround for version returned by Riak TS OSS --- riak/transports/feature_detect.py | 34 ++++++++----------------------- riak/transports/tcp/transport.py | 11 +++++++++- 2 files changed, 18 insertions(+), 27 deletions(-) diff --git a/riak/transports/feature_detect.py b/riak/transports/feature_detect.py index 8f5808ac..87e30fbf 100644 --- a/riak/transports/feature_detect.py +++ b/riak/transports/feature_detect.py @@ -1,34 +1,16 @@ -""" -Copyright 2012 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from distutils.version import LooseVersion from riak.util import lazy_property versions = { - 1: LooseVersion("1.0.0"), - 1.1: LooseVersion("1.1.0"), - 1.2: LooseVersion("1.2.0"), - 1.4: LooseVersion("1.4.0"), - 1.44: LooseVersion("1.4.4"), - 2.0: LooseVersion("2.0.0"), - 2.1: LooseVersion("2.1.0"), - 2.12: LooseVersion("2.1.2") + 1: LooseVersion('1.0.0'), + 1.1: LooseVersion('1.1.0'), + 1.2: LooseVersion('1.2.0'), + 1.4: LooseVersion('1.4.0'), + 1.44: LooseVersion('1.4.4'), + 2.0: LooseVersion('2.0.0'), + 2.1: LooseVersion('2.1.0'), + 2.12: LooseVersion('2.1.2') } diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 8b7841b9..0d845875 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -78,7 +78,16 @@ def _get_codec(self, msg_code): # FeatureDetection API def _server_version(self): server_info = self.get_server_info() - return server_info['server_version'] + ver = server_info['server_version'] + (maj, min, patch) = [int(v) for v in ver.split('.')] + if maj == 0: + import datetime + now = datetime.datetime.now() + if now.year == 2016: + # GH-471 As of 20160509 Riak TS OSS 1.3.0 returns '0.8.0' as + # the version string. + return '2.1.1' + return ver def ping(self): """ From c6136f20befdab8a6b19716c59e374499574f426 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 10 May 2016 16:55:43 -0700 Subject: [PATCH 207/324] Add INSERT sql tests --- riak/client/__init__.py | 1 + riak/tests/test_timeseries_pbuf.py | 17 ++++++++++++----- riak/tests/test_timeseries_ttb.py | 11 +++++++++++ 3 files changed, 24 insertions(+), 5 deletions(-) diff --git a/riak/client/__init__.py b/riak/client/__init__.py index fae5e133..491324a4 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -4,6 +4,7 @@ import json import random + from weakref import WeakValueDictionary from riak.client.operations import RiakClientOperations from riak.node import RiakNode diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 72f13e0f..4428da02 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -1,7 +1,5 @@ # -*- coding: utf-8 -*- import datetime -import random -import string import unittest import riak.pb.riak_ts_pb2 @@ -265,10 +263,19 @@ def validate_data(self, ts_obj): self.assertEqual(bytes_to_str(row[3]), 'wind') self.assertIsNone(row[4]) + def test_insert_data_via_sql(self): + query = """ + INSERT INTO GeoCheckin_Wide + (geohash, user, time, weather, temperature, uv_index, observed) + VALUES + ('hash3', 'user3', 1460203200000, 'tornado', 43.5, 128, True); + """ + ts_obj = self.client.ts_query('GeoCheckin_Wide', query) + self.assertIsNotNone(ts_obj) + self.validate_len(ts_obj, 0) + def test_query_that_creates_table_using_interpolation(self): - table = ''.join( - [random.choice(string.ascii_letters + string.digits) - for n in range(32)]) + table = self.randname() query = """CREATE TABLE test-{table} ( geohash varchar not null, user varchar not null, diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index cd7b181c..a2dc6ea8 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -130,6 +130,17 @@ class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() + def test_insert_data_via_sql(self): + query = """ + INSERT INTO GeoCheckin_Wide + (geohash, user, time, weather, temperature, uv_index, observed) + VALUES + ('hash3', 'user3', 1460203200000, 'tornado', 43.5, 128, True); + """ + ts_obj = self.client.ts_query('GeoCheckin_Wide', query) + self.assertIsNotNone(ts_obj) + self.validate_len(ts_obj, 0) + def test_query_that_creates_table_using_interpolation(self): table = self.randname() query = """CREATE TABLE test-{table} ( From 94c507b9d8550b29b6992d134c5e82a0b402c4bc Mon Sep 17 00:00:00 2001 From: Yasser Souri Date: Sun, 15 May 2016 10:50:49 +0430 Subject: [PATCH 208/324] open `readme.md` with utf-8 encoding Since the `README.md` contains some unicode characters, it is better to open it with `codecs.open` --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index f130ff7a..acc6981c 100755 --- a/setup.py +++ b/setup.py @@ -27,7 +27,7 @@ with codecs.open('README.rst', 'w', 'utf-8') as f: f.write(long_description) except(IOError, ImportError): - with open('README.md') as f: + with codecs.open('README.md', 'r', 'utf-8') as f: long_description = f.read() setup( From e882bc20dc0a10678b9d53e912b20932c3e8e5f1 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 16 May 2016 12:48:07 -0700 Subject: [PATCH 209/324] Add contributor Yasser Souri --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 0dc09edf..4838b598 100644 --- a/README.md +++ b/README.md @@ -196,3 +196,4 @@ Contributors * Timothée Peignier * [Vitaly Shestovskiy](https://github.com/lamp0chka) * William Kral +* [Yasser Souri](https://github.com/yassersouri) From 5e5c5506271769aa27e06824b9ac74b2a8b6cd0f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 17 May 2016 07:53:22 -0700 Subject: [PATCH 210/324] Unexport vars related to encoding, ensure files are opened with utf-8 when necessary --- Makefile | 19 ++++++++++++++++++- buildbot/Makefile | 13 +++++++++++++ riak/security.py | 2 +- setup.py | 6 ++++-- version.py | 3 ++- 5 files changed, 38 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index 258de49a..a0f8fae7 100644 --- a/Makefile +++ b/Makefile @@ -1,4 +1,18 @@ -.PHONY: pb_clean pb_compile pb_build release release_sdist +.PHONY: pb_clean pb_compile pb_build release release_sdist test_sdist + +unexport LANG +unexport LC_ADDRESS +unexport LC_COLLATE +unexport LC_CTYPE +unexport LC_IDENTIFICATION +unexport LC_MEASUREMENT +unexport LC_MESSAGES +unexport LC_MONETARY +unexport LC_NAME +unexport LC_NUMERIC +unexport LC_PAPER +unexport LC_TELEPHONE +unexport LC_TIME PANDOC_VERSION := $(shell pandoc --version) @@ -13,6 +27,9 @@ pb_compile: pb_clean @protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto @python setup.py build_messages +test_sdist: + @python setup.py sdist + release_sdist: ifeq ($(VERSION),) $(error VERSION must be set to build a release and deploy this package) diff --git a/buildbot/Makefile b/buildbot/Makefile index e41e220f..7c7db7a1 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -3,6 +3,19 @@ $(error RIAK_DIR is not set) endif unexport PYENV_VERSION +unexport LANG +unexport LC_ADDRESS +unexport LC_COLLATE +unexport LC_CTYPE +unexport LC_IDENTIFICATION +unexport LC_MEASUREMENT +unexport LC_MESSAGES +unexport LC_MONETARY +unexport LC_NAME +unexport LC_NUMERIC +unexport LC_PAPER +unexport LC_TELEPHONE +unexport LC_TIME PROJDIR = $(realpath $(CURDIR)/..) TOOLS_DIR = $(PROJDIR)/tools/devrel diff --git a/riak/security.py b/riak/security.py index c1b36123..d6adee91 100644 --- a/riak/security.py +++ b/riak/security.py @@ -254,7 +254,7 @@ def _cached_cert(self, key, loader): if not isinstance(key_file, list): key_file = [key_file] for filename in key_file: - with open(filename, 'r') as f: + with open(filename, 'rb') as f: cert_list.append(loader(OpenSSL.SSL.FILETYPE_PEM, f.read())) # If it is not a list, just store the first element diff --git a/setup.py b/setup.py index acc6981c..37eb8da0 100755 --- a/setup.py +++ b/setup.py @@ -21,14 +21,16 @@ install_requires.append('python3_protobuf >=2.4.1, <2.6.0') requires.append('python3_protobuf(>=2.4.1, <2.6.0)') +with codecs.open('README.md', 'r', 'utf-8') as f: + readme_md = f.read() + try: import pypandoc long_description = pypandoc.convert('README.md', 'rst') with codecs.open('README.rst', 'w', 'utf-8') as f: f.write(long_description) except(IOError, ImportError): - with codecs.open('README.md', 'r', 'utf-8') as f: - long_description = f.read() + long_description = readme_md setup( name='riak', diff --git a/version.py b/version.py index ff30e22f..6c802c58 100644 --- a/version.py +++ b/version.py @@ -80,7 +80,8 @@ def get_version(): else: # Extract the version from the PKG-INFO file. - with open(join(d, 'PKG-INFO')) as f: + import codecs + with codecs.open(join(d, 'PKG-INFO'), 'r', 'utf-8') as f: version = version_re.search(f.read()).group(1) return version From 2991246089eadabb1f1ed6e921ed5114fb6e7203 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 31 May 2016 13:29:24 -0700 Subject: [PATCH 211/324] Ensure that BadResource only wraps other exceptions. --- riak/tests/test_pool.py | 19 ++++++++++++++++++- riak/transports/tcp/connection.py | 3 ++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index a5f8ffd5..eb6e0d54 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -3,11 +3,13 @@ from six import PY2 from threading import Thread, currentThread -from riak.transports.pool import Pool, BadResource from random import SystemRandom from time import sleep + +from riak import RiakError from riak.tests import RUN_POOL from riak.tests.comparison import Comparison +from riak.transports.pool import Pool, BadResource if PY2: from Queue import Queue @@ -36,6 +38,21 @@ def create_resource(self): @unittest.skipUnless(RUN_POOL, 'RUN_POOL is 0') class PoolTest(unittest.TestCase, Comparison): + def test_can_raise_bad_resource(self): + ex_msg = 'exception-message!' + with self.assertRaises(BadResource) as cm: + raise BadResource(ex_msg) + ex = cm.exception + self.assertEqual(ex.args[0], ex_msg) + + def test_bad_resource_inner_exception(self): + ex_msg = 'exception-message!' + ex = RiakError(ex_msg) + with self.assertRaises(BadResource) as cm: + raise BadResource(ex) + br_ex = cm.exception + self.assertEqual(br_ex.args[0], ex) + def test_yields_new_object_when_empty(self): """ The pool should create new resources as needed. diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 90967043..b92f862f 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -186,7 +186,8 @@ def _recv(self, msglen): # https://docs.python.org/2/howto/sockets.html#using-a-socket # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: - raise BadResource('recv_into returned zero bytes unexpectedly') + ex = RiakError('recv_into returned zero bytes unexpectedly') + raise BadResource(ex) view = view[nbytes:] # slicing views is cheap toread -= nbytes nread += nbytes From a4e06e5be9bc9c6aa36a2d6174225f3a945a759e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 31 May 2016 15:15:22 -0700 Subject: [PATCH 212/324] Remove requirement for publish to be from master branch --- build/publish | 5 ----- 1 file changed, 5 deletions(-) diff --git a/build/publish b/build/publish index 39b5ef6a..13268ac3 100755 --- a/build/publish +++ b/build/publish @@ -168,11 +168,6 @@ fi declare -r current_branch="$(git rev-parse --abbrev-ref HEAD)" -if [[ $debug == 'false' && $is_prerelease == 'false' && $current_branch != 'master' ]] -then - errexit 'publish must be run on master branch' -fi - declare -r github_api_key_file="$HOME/.ghapi" if [[ ! -s $github_api_key_file ]] then From 4b560b3488fd4bfa3a7e5afbeb2eabc666116400 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 31 May 2016 18:44:01 -0700 Subject: [PATCH 213/324] 2.5.3 RELNOTES --- RELNOTES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/RELNOTES.md b/RELNOTES.md index 6a0b5df6..a99c8bb5 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,5 +1,9 @@ # Riak Python Client Release Notes +## [2.5.3 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.3) + + * [Bug fix for raising `BadResource`](https://github.com/basho/riak-python-client/pull/481) + ## [2.5.2 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.2) * *NOTE*: for Riak TS data, automatic conversion from epoch values *to* Python `datetime` objects has been removed. If you would like to have automatic conversion, use `RiakClient(transport_options={'ts_convert_timestamp': True})` From d3a85b880af4185934399ba2029c2b96e760e004 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 9 Jun 2016 10:26:59 -0700 Subject: [PATCH 214/324] Clarify release info --- RELNOTES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELNOTES.md b/RELNOTES.md index a99c8bb5..bd36721d 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -45,7 +45,7 @@ Python 3.5.x. There are also many bugfixes and new enhancements: -* [Protocol buffers are now integrated into the Python Client] +* [The `riak_pb` module is now integrated into the Python Client] (https://github.com/basho/riak-python-client/pull/418) * [Support for Preflists and Write-Once bucket types] (https://github.com/basho/riak-python-client/pull/414) From 45c6091519e6bac1c034b53ffb41ce646d983007 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 16 Jun 2016 11:01:11 -0700 Subject: [PATCH 215/324] Sibling resolution works just fine without .reload() --- riak/tests/test_kv.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index aeebed68..18a49948 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -417,7 +417,6 @@ def test_siblings(self): # Make sure the object has five siblings... obj = bucket.get(self.key_name) - obj.reload() self.assertEqual(len(obj.siblings), 5) # When the object is in conflict, using the shortcut methods @@ -433,9 +432,9 @@ def test_siblings(self): # Resolve the conflict, and then do a get... resolved_sibling = obj.siblings[3] obj.siblings = [resolved_sibling] + self.assertEqual(len(obj.siblings), 1) obj.store() - obj.reload() self.assertEqual(len(obj.siblings), 1) self.assertEqual(obj.data, resolved_sibling.data) From 4e962216880f2ae53b5203b89be0fee6dfce35fc Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 16 Jun 2016 14:04:03 -0700 Subject: [PATCH 216/324] Only run unit tests for Travis CI --- .travis.yml | 12 ++++++++++++ buildbot/Makefile | 10 +++++----- riak/tests/__init__.py | 18 +++++++++--------- riak/tests/test_client.py | 4 +++- riak/tests/test_kv.py | 10 +++++++++- riak/tests/test_mapreduce.py | 8 ++++++-- 6 files changed, 44 insertions(+), 18 deletions(-) create mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 00000000..35e7a600 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,12 @@ +language: python +python: + - "2.7" + - "3.3" + - "3.4" + - "3.5" + - "3.5-dev" # 3.5 development branch +install: + - pip install --upgrade flake8 +script: + - python setup.py test + - flake8 --exclude=riak/pb riak *.py diff --git a/buildbot/Makefile b/buildbot/Makefile index 7c7db7a1..a758b746 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -43,19 +43,19 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @$(RIAK_ADMIN) security enable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=18098 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 RUN_POOL=0 RUN_RESOLVE=0 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=18098 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. test_timeseries: @echo "Testing Riak Python Client (timeseries)" @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_YZ=0 RUN_DATATYPES=0 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=0 RUN_DATATYPES=0 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. setup: ./tox_setup.sh diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index a3b82800..aa475c9c 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -50,17 +50,17 @@ def hostname_resolves(hostname): DUMMY_HTTP_PORT = int(os.environ.get('DUMMY_HTTP_PORT', '1023')) DUMMY_PB_PORT = int(os.environ.get('DUMMY_PB_PORT', '1022')) -RUN_SEARCH = int(os.environ.get('RUN_SEARCH', '0')) -RUN_YZ = int(os.environ.get('RUN_YZ', '0')) - +RUN_BTYPES = int(os.environ.get('RUN_BTYPES', '0')) +RUN_DATATYPES = int(os.environ.get('RUN_DATATYPES', '0')) +RUN_CLIENT = int(os.environ.get('RUN_CLIENT', '0')) RUN_INDEXES = int(os.environ.get('RUN_INDEXES', '0')) - -RUN_TIMESERIES = int(os.environ.get('RUN_TIMESERIES', '0')) - +RUN_KV = int(os.environ.get('RUN_KV', '0')) +RUN_MAPREDUCE = int(os.environ.get('RUN_MAPREDUCE', '0')) RUN_POOL = int(os.environ.get('RUN_POOL', '0')) -RUN_RESOLVE = int(os.environ.get('RUN_RESOLVE', '1')) -RUN_BTYPES = int(os.environ.get('RUN_BTYPES', '1')) -RUN_DATATYPES = int(os.environ.get('RUN_DATATYPES', '1')) +RUN_RESOLVE = int(os.environ.get('RUN_RESOLVE', '0')) +RUN_SEARCH = int(os.environ.get('RUN_SEARCH', '0')) +RUN_TIMESERIES = int(os.environ.get('RUN_TIMESERIES', '0')) +RUN_YZ = int(os.environ.get('RUN_YZ', '0')) RUN_SECURITY = int(os.environ.get('RUN_SECURITY', '0')) if RUN_SECURITY: diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index a9c3a380..97bbbed7 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -4,7 +4,8 @@ from threading import Thread from riak.riak_object import RiakObject from riak.transports.tcp import TcpTransport -from riak.tests import DUMMY_HTTP_PORT, DUMMY_PB_PORT, RUN_POOL +from riak.tests import DUMMY_HTTP_PORT, DUMMY_PB_PORT, \ + RUN_POOL, RUN_CLIENT from riak.tests.base import IntegrationTestBase if PY2: @@ -13,6 +14,7 @@ from queue import Queue +@unittest.skipUnless(RUN_CLIENT, 'RUN_CLIENT is 0') class ClientTests(IntegrationTestBase, unittest.TestCase): def test_can_set_tcp_keepalive(self): if self.protocol == 'pbc': diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 18a49948..aec403e2 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -8,7 +8,7 @@ from time import sleep from riak import ConflictError, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver -from riak.tests import RUN_RESOLVE +from riak.tests import RUN_KV, RUN_RESOLVE from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -32,12 +32,16 @@ def setUpModule(): + if not RUN_KV: + return c = IntegrationTestBase.create_client() c.bucket(testrun_sibs_bucket).allow_mult = True c.close() def tearDownModule(): + if not RUN_KV: + return c = IntegrationTestBase.create_client() c.bucket(testrun_sibs_bucket).clear_properties() c.bucket(testrun_props_bucket).clear_properties() @@ -68,6 +72,7 @@ def __eq__(self, other): return True +@unittest.skipUnless(RUN_KV, 'RUN_KV is 0') class BasicKVTests(IntegrationTestBase, unittest.TestCase, Comparison): def test_no_returnbody(self): bucket = self.client.bucket(self.bucket_name) @@ -641,6 +646,7 @@ def generate_siblings(self, original, count=5, delay=None): return vals +@unittest.skipUnless(RUN_KV, 'RUN_KV is 0') class BucketPropsTest(IntegrationTestBase, unittest.TestCase): def test_rw_settings(self): bucket = self.client.bucket(testrun_props_bucket) @@ -694,6 +700,7 @@ def test_clear_bucket_properties(self): self.assertEqual(bucket.n_val, 3) +@unittest.skipUnless(RUN_KV, 'RUN_KV is 0') class KVFileTests(IntegrationTestBase, unittest.TestCase): def test_store_binary_object_from_file(self): bucket = self.client.bucket(self.bucket_name) @@ -724,6 +731,7 @@ def test_store_binary_object_from_file_should_fail_if_file_not_found(self): self.assertFalse(obj.exists) +@unittest.skipUnless(RUN_KV, 'RUN_KV is 0') class CounterTests(IntegrationTestBase, unittest.TestCase): def test_counter_requires_allow_mult(self): bucket = self.client.bucket(self.bucket_name) diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index b22a70ba..5a9d2c7a 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -6,10 +6,9 @@ from six import PY2 from riak.mapreduce import RiakMapReduce from riak import key_filter, RiakError -from riak.tests import RUN_YZ +from riak.tests import RUN_MAPREDUCE, RUN_SECURITY, RUN_YZ from riak.tests.base import IntegrationTestBase from riak.tests.test_yokozuna import wait_for_yz_index -from riak.tests import RUN_SECURITY from riak.tests.yz_setup import yzSetUp, yzTearDown @@ -26,6 +25,7 @@ def tearDownModule(): yzTearDown(testrun_yz_mr) +@unittest.skipUnless(RUN_MAPREDUCE, 'RUN_MAPREDUCE is 0') class LinkTests(IntegrationTestBase, unittest.TestCase): def test_store_and_get_links(self): # Create the object... @@ -92,6 +92,7 @@ def test_link_walking(self): self.assertEqual(len(results), 1) +@unittest.skipUnless(RUN_MAPREDUCE, 'RUN_MAPREDUCE is 0') class ErlangMapReduceTests(IntegrationTestBase, unittest.TestCase): def test_erlang_map_reduce(self): # Create the object... @@ -198,6 +199,7 @@ def test_client_exceptional_paths(self): mr.add_key_filter("tokenize", "-", 1) +@unittest.skipUnless(RUN_MAPREDUCE, 'RUN_MAPREDUCE is 0') class JSMapReduceTests(IntegrationTestBase, unittest.TestCase): def test_javascript_source_map(self): @@ -559,6 +561,7 @@ def test_mr_search(self): self.assertEqual(result, [100]) +@unittest.skipUnless(RUN_MAPREDUCE, 'RUN_MAPREDUCE is 0') class MapReduceAliasTests(IntegrationTestBase, unittest.TestCase): """This tests the map reduce aliases""" @@ -754,6 +757,7 @@ def test_filter_not_found(self): self.assertEqual(sorted(result), [1, 2]) +@unittest.skipUnless(RUN_MAPREDUCE, 'RUN_MAPREDUCE is 0') class MapReduceStreamTests(IntegrationTestBase, unittest.TestCase): def test_stream_results(self): bucket = self.client.bucket(self.bucket_name) From 9ea8bba0bf23c633a29abd151d083c332de77276 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 16 Jun 2016 14:07:15 -0700 Subject: [PATCH 217/324] whitespace --- .travis.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 35e7a600..c451e0c9 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,7 +6,7 @@ python: - "3.5" - "3.5-dev" # 3.5 development branch install: - - pip install --upgrade flake8 + - pip install --upgrade flake8 script: - - python setup.py test - - flake8 --exclude=riak/pb riak *.py + - python setup.py test + - flake8 --exclude=riak/pb riak *.py From f8e1dcedbe67bb54b8e00157a61015d17615622c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 16 Jun 2016 14:28:30 -0700 Subject: [PATCH 218/324] Turns out Python 3.5.0 has the same time conversion bug as some other versions --- riak/tests/test_util.py | 2 ++ riak/util.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_util.py b/riak/tests/test_util.py index becfd06a..f3415557 100644 --- a/riak/tests/test_util.py +++ b/riak/tests/test_util.py @@ -74,5 +74,7 @@ def test_is_timeseries_supported(self): self.assertEqual(True, is_timeseries_supported(v)) v = (3, 4, 5) self.assertEqual(True, is_timeseries_supported(v)) + v = (3, 5, 0) + self.assertEqual(False, is_timeseries_supported(v)) v = (3, 5, 1) self.assertEqual(True, is_timeseries_supported(v)) diff --git a/riak/util.py b/riak/util.py index 7c2ca66d..4dfc310a 100644 --- a/riak/util.py +++ b/riak/util.py @@ -29,7 +29,7 @@ def datetime_from_unix_time_millis(ut): def is_timeseries_supported(v=None): if v is None: v = sys.version_info - return v < (3,) or v[:3] >= (3, 4, 4) + return v < (3,) or (v[:3] >= (3, 4, 4) and v[:3] != (3, 5, 0)) def quacks_like_dict(object): From ecbac9ba72e1d0481e2dd2d88bbd83958c62b3e8 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 16 Jun 2016 14:34:06 -0700 Subject: [PATCH 219/324] Add Travis build status, ensure all integration tests are run on buildbot --- README.md | 5 +++++ buildbot/Makefile | 10 +++++----- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 4838b598..d5f75feb 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,11 @@ Python Client for Riak ====================== +Build Status +============ + +[![Build Status](https://travis-ci.org/basho/riak-python-client.svg?branch=master)](https://travis-ci.org/basho/riak-python-client) + Documentation ============= diff --git a/buildbot/Makefile b/buildbot/Makefile index a758b746..cd561d48 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -43,19 +43,19 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: @echo "Testing Riak Python Client (with security)" @$(RIAK_ADMIN) security enable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=18098 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_BTYPES=1 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=18098 RUN_BTYPES=1 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. test_timeseries: @echo "Testing Riak Python Client (timeseries)" @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=0 RUN_DATATYPES=0 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_KV=1 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. setup: ./tox_setup.sh From b5f0bdbd610ab52ecb740c7d66aa6e2d6ef43922 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 17 Jun 2016 13:17:59 -0700 Subject: [PATCH 220/324] Add tests for GH-483 --- riak/tests/test_timeseries_pbuf.py | 24 ++++++++++++++++++++++++ riak/tests/test_timeseries_ttb.py | 24 ++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 4428da02..2db1af9e 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -435,3 +435,27 @@ def test_create_error_via_put(self): ts_obj = table.new([]) with self.assertRaises(RiakError): ts_obj.store() + + def test_store_and_fetch_gh_483(self): + now = datetime.datetime(2015, 1, 1, 12, 0, 0) + table = self.client.table(table_name) + rows = [ + ['hash1', 'user2', now, 'frazzle', 12.3] + ] + + ts_obj = table.new(rows) + result = ts_obj.store() + self.assertTrue(result) + + k = ['hash1', 'user2', now] + ts_obj = self.client.ts_get(table_name, k) + self.assertIsNotNone(ts_obj) + ts_cols = ts_obj.columns + self.assertEqual(len(ts_cols.names), 5) + self.assertEqual(len(ts_cols.types), 5) + self.assertEqual(len(ts_obj.rows), 1) + + row = ts_obj.rows[0] + self.assertEqual(len(row), 5) + exp = rows[0] + self.assertEqual(row, exp) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index a2dc6ea8..99e51a6b 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -168,6 +168,30 @@ def test_query_that_returns_table_description(self): row = ts_obj.rows[0] self.assertEqual(len(row), 5) + def test_store_and_fetch_gh_483(self): + now = datetime.datetime(2015, 1, 1, 12, 0, 0) + table = self.client.table(table_name) + rows = [ + ['hash1', 'user2', now, 'frazzle', 12.3] + ] + + ts_obj = table.new(rows) + result = ts_obj.store() + self.assertTrue(result) + + k = ['hash1', 'user2', now] + ts_obj = self.client.ts_get(table_name, k) + self.assertIsNotNone(ts_obj) + ts_cols = ts_obj.columns + self.assertEqual(len(ts_cols.names), 5) + self.assertEqual(len(ts_cols.types), 5) + self.assertEqual(len(ts_obj.rows), 1) + + row = ts_obj.rows[0] + self.assertEqual(len(row), 5) + exp = rows[0] + self.assertEqual(row, exp) + def test_store_and_fetch_and_query(self): now = datetime.datetime.utcfromtimestamp(144379690.987000) fiveMinsAgo = now - fiveMins From c83d1b50cc89dc2af054079e7e5c234380a64524 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 21 Jun 2016 09:20:18 -0700 Subject: [PATCH 221/324] Add fix for old Python versions, and better clarify which versions to use --- README.md | 2 +- riak/transports/tcp/connection.py | 14 ++++++++++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index d5f75feb..c180bcfb 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ Repository Cloning Install ======= -The recommended versions of Python for use with this client are Python `2.7.x`, `3.3.x`, `3.4.x` and `3.5.x`. The latest version from each series should be preferred. +The recommended versions of Python for use with this client are Python `2.7.8` (or greater, `2.7.11` as of `2016-06-21`), `3.3.x`, `3.4.x` and `3.5.x`. The latest version from each series should be preferred. Older versions of the Python `2.7.X` and `3.X` series should be used with caution as they are not covered by integration tests. Riak TS (Timeseries) =================== diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index b92f862f..666b12a4 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -163,7 +163,12 @@ def _recv_msg(self): # https://github.com/basho/riak-python-client/issues/425 raise BadResource(e) mv = memoryview(msgbuf) - msg_code, = struct.unpack("B", mv[0:1]) + try: + msg_code, = struct.unpack("B", mv[0:1]) + except struct.error: + # NB: Python 2.7.3 requires this + # http://bugs.python.org/issue10212 + msg_code, = struct.unpack("B", mv[0:1].tobytes()) data = mv[1:].tobytes() return (msg_code, data) @@ -171,7 +176,12 @@ def _recv_pkt(self): # TODO FUTURE re-use buffer msglen_buf = self._recv(4) # NB: msg length is an unsigned int - msglen, = struct.unpack('!I', msglen_buf) + try: + msglen, = struct.unpack('!I', msglen_buf) + except struct.error: + # NB: Python 2.7.3 requires this + # http://bugs.python.org/issue10212 + msglen, = struct.unpack('!I', bytes(msglen_buf)) return self._recv(msglen) def _recv(self, msglen): From 519b9fd244a3e329708cac5238922282d23707ee Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 7 Jul 2016 15:45:00 -0700 Subject: [PATCH 222/324] Fix TTB tests for Python 3 --- riak/tests/test_timeseries_ttb.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 99e51a6b..06139b43 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -189,7 +189,8 @@ def test_store_and_fetch_gh_483(self): row = ts_obj.rows[0] self.assertEqual(len(row), 5) - exp = rows[0] + exp = [six.b('hash1'), six.b('user2'), now, + six.b('frazzle'), 12.3] self.assertEqual(row, exp) def test_store_and_fetch_and_query(self): From 3c95a9830b91b97c093d4e343622a6b38bd1141d Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 7 Jul 2016 15:48:22 -0700 Subject: [PATCH 223/324] Make the linter happy --- riak/tests/test_timeseries_ttb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 06139b43..aed4879d 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -190,7 +190,7 @@ def test_store_and_fetch_gh_483(self): row = ts_obj.rows[0] self.assertEqual(len(row), 5) exp = [six.b('hash1'), six.b('user2'), now, - six.b('frazzle'), 12.3] + six.b('frazzle'), 12.3] self.assertEqual(row, exp) def test_store_and_fetch_and_query(self): From 40a3a1c46ae86e16aef1e78f7540a44448055b8f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 8 Jul 2016 09:31:28 -0700 Subject: [PATCH 224/324] Make 2.7.3 compatibility a bit smarter --- riak/transports/tcp/connection.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 666b12a4..da2eef19 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -19,6 +19,9 @@ class TcpConnection(object): + def __init__(self): + self.bytes_required = False + """ Connection-related methods for TcpTransport. """ @@ -163,12 +166,16 @@ def _recv_msg(self): # https://github.com/basho/riak-python-client/issues/425 raise BadResource(e) mv = memoryview(msgbuf) + mcb = mv[0:1] + if self.bytes_required: + mcb = mcb.tobytes() try: - msg_code, = struct.unpack("B", mv[0:1]) + msg_code, = struct.unpack("B", mcb) except struct.error: # NB: Python 2.7.3 requires this # http://bugs.python.org/issue10212 msg_code, = struct.unpack("B", mv[0:1].tobytes()) + self.bytes_required = True data = mv[1:].tobytes() return (msg_code, data) @@ -176,12 +183,15 @@ def _recv_pkt(self): # TODO FUTURE re-use buffer msglen_buf = self._recv(4) # NB: msg length is an unsigned int + if self.bytes_required: + msglen_buf = bytes(msglen_buf) try: msglen, = struct.unpack('!I', msglen_buf) except struct.error: # NB: Python 2.7.3 requires this # http://bugs.python.org/issue10212 msglen, = struct.unpack('!I', bytes(msglen_buf)) + self.bytes_required = True return self._recv(msglen) def _recv(self, msglen): From 944a7950f9ccb85743321fc21a4aaec5a2365df5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 8 Jul 2016 11:57:37 -0700 Subject: [PATCH 225/324] Take datetime objects into account that have tzinfo information. Fixes #483 --- riak/tests/test_datetime.py | 30 ++++++++++++++++++++++++++++++ riak/tz.py | 17 +++++++++++++++++ riak/util.py | 12 ++++++++++-- tox.ini | 2 +- 4 files changed, 58 insertions(+), 3 deletions(-) create mode 100644 riak/tests/test_datetime.py create mode 100644 riak/tz.py diff --git a/riak/tests/test_datetime.py b/riak/tests/test_datetime.py new file mode 100644 index 00000000..2b2aa6a3 --- /dev/null +++ b/riak/tests/test_datetime.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +import datetime +import unittest + +from riak.util import epoch, epoch_tz, \ + unix_time_millis, \ + datetime_from_unix_time_millis + +# NB: without tzinfo, this is UTC +ts0 = datetime.datetime(2015, 1, 1, 12, 1, 2, 987000) +ts0_ts = 1420113662987 +ts0_ts_pst = 1420142462987 + + +class DatetimeUnitTests(unittest.TestCase): + def test_get_unix_time_without_tzinfo(self): + self.assertIsNone(epoch.tzinfo) + self.assertIsNone(ts0.tzinfo) + utm = unix_time_millis(ts0) + self.assertEqual(utm, ts0_ts) + + def test_get_unix_time_with_tzinfo(self): + try: + import pytz + tz = pytz.timezone('America/Los_Angeles') + ts0_pst = tz.localize(ts0) + utm = unix_time_millis(ts0_pst) + self.assertEqual(utm, ts0_ts_pst) + except ImportError: + pass diff --git a/riak/tz.py b/riak/tz.py new file mode 100644 index 00000000..dd4dec01 --- /dev/null +++ b/riak/tz.py @@ -0,0 +1,17 @@ +from datetime import tzinfo, timedelta + +ZERO = timedelta(0) + +class UTC(tzinfo): + """UTC""" + + def utcoffset(self, dt): + return ZERO + + def tzname(self, dt): + return "UTC" + + def dst(self, dt): + return ZERO + +utc = UTC() diff --git a/riak/util.py b/riak/util.py index 4dfc310a..3f826837 100644 --- a/riak/util.py +++ b/riak/util.py @@ -8,10 +8,18 @@ from six import string_types, PY2 epoch = datetime.datetime.utcfromtimestamp(0) - +try: + import pytz + epoch_tz = pytz.utc.localize(epoch) +except ImportError: + from riak.tz import utc + epoch_tz = datetime.datetime.fromtimestamp(0, tz=utc) def unix_time_millis(dt): - td = dt - epoch + if dt.tzinfo: + td = dt - epoch_tz + else: + td = dt - epoch tdms = ((td.days * 24 * 3600) + td.seconds) * 1000 ms = td.microseconds // 1000 return tdms + ms diff --git a/tox.ini b/tox.ini index 079cb0b0..01d165d8 100644 --- a/tox.ini +++ b/tox.ini @@ -12,5 +12,5 @@ basepython = {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 [testenv] install_command = pip install --upgrade {packages} commands = {envpython} setup.py test -deps = pip +deps = pip pytz passenv = RUN_* SKIP_* RIAK_* From e8887760d5b500efe19a869ee9a1f2f6c28df5d2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 8 Jul 2016 12:00:05 -0700 Subject: [PATCH 226/324] Fix lint --- riak/tests/test_datetime.py | 4 ++-- riak/tz.py | 1 + riak/util.py | 1 + 3 files changed, 4 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_datetime.py b/riak/tests/test_datetime.py index 2b2aa6a3..a3640105 100644 --- a/riak/tests/test_datetime.py +++ b/riak/tests/test_datetime.py @@ -3,8 +3,7 @@ import unittest from riak.util import epoch, epoch_tz, \ - unix_time_millis, \ - datetime_from_unix_time_millis + unix_time_millis # NB: without tzinfo, this is UTC ts0 = datetime.datetime(2015, 1, 1, 12, 1, 2, 987000) @@ -15,6 +14,7 @@ class DatetimeUnitTests(unittest.TestCase): def test_get_unix_time_without_tzinfo(self): self.assertIsNone(epoch.tzinfo) + self.assertIsNotNone(epoch_tz.tzinfo) self.assertIsNone(ts0.tzinfo) utm = unix_time_millis(ts0) self.assertEqual(utm, ts0_ts) diff --git a/riak/tz.py b/riak/tz.py index dd4dec01..30544b9f 100644 --- a/riak/tz.py +++ b/riak/tz.py @@ -2,6 +2,7 @@ ZERO = timedelta(0) + class UTC(tzinfo): """UTC""" diff --git a/riak/util.py b/riak/util.py index 3f826837..e3124612 100644 --- a/riak/util.py +++ b/riak/util.py @@ -15,6 +15,7 @@ from riak.tz import utc epoch_tz = datetime.datetime.fromtimestamp(0, tz=utc) + def unix_time_millis(dt): if dt.tzinfo: td = dt - epoch_tz From ae33a2993c62cfcc3f872ace4fe7a9d8fed81cf5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 9 Jul 2016 09:12:20 -0700 Subject: [PATCH 227/324] add pytz to tox.ini --- tox.ini | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/tox.ini b/tox.ini index 01d165d8..10387e42 100644 --- a/tox.ini +++ b/tox.ini @@ -12,5 +12,7 @@ basepython = {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 [testenv] install_command = pip install --upgrade {packages} commands = {envpython} setup.py test -deps = pip pytz +deps = + pip + pytz passenv = RUN_* SKIP_* RIAK_* From b770f7f8cf1c8683c07741c992e581bdf5ea700e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 11 Jul 2016 15:26:57 -0700 Subject: [PATCH 228/324] Fix TS pbuf tests on Python 3.X where results are bytes --- riak/tests/test_timeseries_pbuf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 2db1af9e..4588d01d 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -1,5 +1,6 @@ # -*- coding: utf-8 -*- import datetime +import six import unittest import riak.pb.riak_ts_pb2 @@ -457,5 +458,6 @@ def test_store_and_fetch_gh_483(self): row = ts_obj.rows[0] self.assertEqual(len(row), 5) - exp = rows[0] + exp = [six.b('hash1'), six.b('user2'), now, + six.b('frazzle'), 12.3] self.assertEqual(row, exp) From 3d78d3509856560319f699c8cff0f7b1873aa28e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 11 Jul 2016 19:15:17 -0700 Subject: [PATCH 229/324] 2.5.4 Release Notes --- RELNOTES.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/RELNOTES.md b/RELNOTES.md index bd36721d..6954bfa0 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,5 +1,10 @@ # Riak Python Client Release Notes +## [2.5.4 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.4) + + * [When converting `datetime` objects to send to Riak TS, `tzinfo` will be used if present](https://github.com/basho/riak-python-client/pull/486) + * [Workaround for incorrect version returned by Riak TS OSS](https://github.com/basho/riak-python-client/pull/472) + ## [2.5.3 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.3) * [Bug fix for raising `BadResource`](https://github.com/basho/riak-python-client/pull/481) From cf26d1329498ac40edb831847b88fc01956fc905 Mon Sep 17 00:00:00 2001 From: David Delassus Date: Wed, 13 Jul 2016 16:38:57 +0200 Subject: [PATCH 230/324] Make sure all pools are stopped when shuting down --- riak/client/__init__.py | 7 +++++++ riak/client/multi.py | 19 ++++++++++++++++--- 2 files changed, 23 insertions(+), 3 deletions(-) diff --git a/riak/client/__init__.py b/riak/client/__init__.py index 491324a4..ec06fd0b 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -130,6 +130,13 @@ def __init__(self, protocol='pbc', transport_options={}, self._bucket_types = WeakValueDictionary() self._tables = WeakValueDictionary() + def __del__(self): + if self._multiget_pool: + self._multiget_pool.stop() + + if self._multiput_pool: + self._multiput_pool.stop() + def _get_protocol(self): return self._protocol diff --git a/riak/client/multi.py b/riak/client/multi.py index 84b19a3a..81e58add 100644 --- a/riak/client/multi.py +++ b/riak/client/multi.py @@ -7,6 +7,8 @@ from riak.riak_object import RiakObject from riak.ts_object import TsObject +import atexit + if PY2: from Queue import Queue else: @@ -102,9 +104,11 @@ def stop(self): """ Signals the worker threads to exit and waits on them. """ - self._stop.set() - for worker in self._workers: - worker.join() + + if not self.stopped(): + self._stop.set() + for worker in self._workers: + worker.join() def stopped(self): """ @@ -193,6 +197,15 @@ def _worker_method(self): RIAK_MULTIPUT_POOL = MultiPutPool() +def stop_pools(): + """Stop worker pools at exit.""" + + RIAK_MULTIGET_POOL.stop() + RIAK_MULTIPUT_POOL.stop() + + +atexit.register(stop_pools) + def multiget(client, keys, **options): """Executes a parallel-fetch across multiple threads. Returns a list containing :class:`~riak.riak_object.RiakObject` or From 3d8124401f2911d4cf7fae05a48e4c22cd28d795 Mon Sep 17 00:00:00 2001 From: David Delassus Date: Wed, 13 Jul 2016 16:47:40 +0200 Subject: [PATCH 231/324] Fix typo --- riak/client/multi.py | 1 + 1 file changed, 1 insertion(+) diff --git a/riak/client/multi.py b/riak/client/multi.py index 81e58add..a3fcc301 100644 --- a/riak/client/multi.py +++ b/riak/client/multi.py @@ -206,6 +206,7 @@ def stop_pools(): atexit.register(stop_pools) + def multiget(client, keys, **options): """Executes a parallel-fetch across multiple threads. Returns a list containing :class:`~riak.riak_object.RiakObject` or From 301ae54fa6d240213e7c4bf42f4095b2d6fe7e8a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 14 Jul 2016 06:04:37 -0700 Subject: [PATCH 232/324] Add contributor, release notes for 2.5.5 --- README.md | 1 + RELNOTES.md | 4 ++++ buildbot/Makefile | 2 +- 3 files changed, 6 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c180bcfb..6ccb0aea 100644 --- a/README.md +++ b/README.md @@ -164,6 +164,7 @@ Contributors * Daniel Reverri * [Dan Root](https://github.com/daroot) * [David Basden](https://github.com/dbasden) +* [David Delassus](https://github.com/linkdd) * David Koblas * Dmitry Rozhkov * Eric Florenzano diff --git a/RELNOTES.md b/RELNOTES.md index 6954bfa0..715c3fdf 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,5 +1,9 @@ # Riak Python Client Release Notes +## [2.5.5 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.5) + + * [Stop all pools when client shuts down](https://github.com/basho/riak-python-client/pull/488) + ## [2.5.4 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.4) * [When converting `datetime` objects to send to Riak TS, `tzinfo` will be used if present](https://github.com/basho/riak-python-client/pull/486) diff --git a/buildbot/Makefile b/buildbot/Makefile index cd561d48..18e42507 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -43,7 +43,7 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_POOL=1 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: From 9fca06c741a249fceb26259862a8d13b63e9906e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 14 Jul 2016 06:16:11 -0700 Subject: [PATCH 233/324] Add debugging --- riak/client/__init__.py | 1 - riak/client/multi.py | 12 +++++++++--- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/riak/client/__init__.py b/riak/client/__init__.py index ec06fd0b..78704f05 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -133,7 +133,6 @@ def __init__(self, protocol='pbc', transport_options={}, def __del__(self): if self._multiget_pool: self._multiget_pool.stop() - if self._multiput_pool: self._multiput_pool.stop() diff --git a/riak/client/multi.py b/riak/client/multi.py index a3fcc301..145adc8d 100644 --- a/riak/client/multi.py +++ b/riak/client/multi.py @@ -8,6 +8,7 @@ from riak.ts_object import TsObject import atexit +import sys if PY2: from Queue import Queue @@ -104,11 +105,15 @@ def stop(self): """ Signals the worker threads to exit and waits on them. """ - - if not self.stopped(): + if self.stopped(): + sys.stderr.write('pool already stopped\n') + else: + sys.stderr.write('stopping pool\n') self._stop.set() for worker in self._workers: + sys.stderr.write('stopping worker {0}\n'.format(worker.name)) worker.join() + sys.stderr.write('all workers joined\n') def stopped(self): """ @@ -148,7 +153,9 @@ def _worker_method(self): output queue. """ while not self._should_quit(): + sys.stderr.write('worker {0} waiting for task...\n'.format(self._name)) task = self._inq.get() + sys.stderr.write('worker {0} got task\n'.format(self._name)) try: btype = task.client.bucket_type(task.bucket_type) obj = btype.bucket(task.bucket).get(task.key, **task.options) @@ -199,7 +206,6 @@ def _worker_method(self): def stop_pools(): """Stop worker pools at exit.""" - RIAK_MULTIGET_POOL.stop() RIAK_MULTIPUT_POOL.stop() From 45720cd075b9f2a6164d93b635125147c0ce6ad3 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 14 Jul 2016 06:24:38 -0700 Subject: [PATCH 234/324] Add Queue timeouts, remove debugging --- riak/client/multi.py | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/riak/client/multi.py b/riak/client/multi.py index 145adc8d..40238ad2 100644 --- a/riak/client/multi.py +++ b/riak/client/multi.py @@ -8,12 +8,11 @@ from riak.ts_object import TsObject import atexit -import sys if PY2: - from Queue import Queue + from Queue import Queue, Empty else: - from queue import Queue + from queue import Queue, Empty __all__ = ['multiget', 'multiput', 'MultiGetPool', 'MultiPutPool'] @@ -105,15 +104,10 @@ def stop(self): """ Signals the worker threads to exit and waits on them. """ - if self.stopped(): - sys.stderr.write('pool already stopped\n') - else: - sys.stderr.write('stopping pool\n') + if not self.stopped(): self._stop.set() for worker in self._workers: - sys.stderr.write('stopping worker {0}\n'.format(worker.name)) worker.join() - sys.stderr.write('all workers joined\n') def stopped(self): """ @@ -153,9 +147,11 @@ def _worker_method(self): output queue. """ while not self._should_quit(): - sys.stderr.write('worker {0} waiting for task...\n'.format(self._name)) - task = self._inq.get() - sys.stderr.write('worker {0} got task\n'.format(self._name)) + try: + task = self._inq.get(block=True, timeout=0.25) + except Empty: + continue + try: btype = task.client.bucket_type(task.bucket_type) obj = btype.bucket(task.bucket).get(task.key, **task.options) @@ -181,7 +177,11 @@ def _worker_method(self): the output queue. """ while not self._should_quit(): - task = self._inq.get() + try: + task = self._inq.get(block=True, timeout=0.25) + except Empty: + continue + try: obj = task.object if isinstance(obj, RiakObject): From 3aec1c34d247716daee8e92432d09c2ee5d02b8d Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 14 Jul 2016 07:26:10 -0700 Subject: [PATCH 235/324] remove pool tests from buildbot --- buildbot/Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/buildbot/Makefile b/buildbot/Makefile index 18e42507..cd561d48 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -43,7 +43,7 @@ test: setup test_normal test_security test_normal: @echo "Testing Riak Python Client (without security)" @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_POOL=1 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. test_security: From 32d19da6866f734233bb0bdf76820519914c3cbf Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 14 Jul 2016 10:07:22 -0700 Subject: [PATCH 236/324] Remove global multi pools. Close lazy-created multi pools when client object is closed --- riak/client/__init__.py | 23 ++++++--- riak/client/multi.py | 109 ++++++++++++++++++++++------------------ 2 files changed, 74 insertions(+), 58 deletions(-) diff --git a/riak/client/__init__.py b/riak/client/__init__.py index 78704f05..dc97ad52 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -109,6 +109,7 @@ def __init__(self, protocol='pbc', transport_options={}, self._credentials = self._create_credentials(credentials) self._http_pool = HttpPool(self, **transport_options) self._tcp_pool = TcpPool(self, **transport_options) + self._closed = False if PY2: self._encoders = {'application/json': default_encoder, @@ -131,10 +132,7 @@ def __init__(self, protocol='pbc', transport_options={}, self._tables = WeakValueDictionary() def __del__(self): - if self._multiget_pool: - self._multiget_pool.stop() - if self._multiput_pool: - self._multiput_pool.stop() + self.close() def _get_protocol(self): return self._protocol @@ -310,10 +308,19 @@ def close(self): """ Iterate through all of the connections and close each one. """ - if self._http_pool is not None: - self._http_pool.clear() - if self._tcp_pool is not None: - self._tcp_pool.clear() + if not self._closed: + self._closed = True + self._stop_multi_pools() + if self._http_pool is not None: + self._http_pool.clear() + if self._tcp_pool is not None: + self._tcp_pool.clear() + + def _stop_multi_pools(self): + if self._multiget_pool: + self._multiget_pool.stop() + if self._multiput_pool: + self._multiput_pool.stop() def _create_node(self, n): if isinstance(n, RiakNode): diff --git a/riak/client/multi.py b/riak/client/multi.py index 40238ad2..b7a1e11a 100644 --- a/riak/client/multi.py +++ b/riak/client/multi.py @@ -7,8 +7,6 @@ from riak.riak_object import RiakObject from riak.ts_object import TsObject -import atexit - if PY2: from Queue import Queue, Empty else: @@ -89,7 +87,7 @@ def start(self): name = "riak.client.multi-worker-{0}-{1}".format( self._name, i) worker = Thread(target=self._worker_method, name=name) - worker.daemon = True + worker.daemon = False worker.start() self._workers.append(worker) self._started.set() @@ -149,6 +147,11 @@ def _worker_method(self): while not self._should_quit(): try: task = self._inq.get(block=True, timeout=0.25) + except TypeError: + if self._should_quit(): + break + else: + raise except Empty: continue @@ -179,6 +182,11 @@ def _worker_method(self): while not self._should_quit(): try: task = self._inq.get(block=True, timeout=0.25) + except TypeError: + if self._should_quit(): + break + else: + raise except Empty: continue @@ -200,19 +208,6 @@ def _worker_method(self): self._inq.task_done() -RIAK_MULTIGET_POOL = MultiGetPool() -RIAK_MULTIPUT_POOL = MultiPutPool() - - -def stop_pools(): - """Stop worker pools at exit.""" - RIAK_MULTIGET_POOL.stop() - RIAK_MULTIPUT_POOL.stop() - - -atexit.register(stop_pools) - - def multiget(client, keys, **options): """Executes a parallel-fetch across multiple threads. Returns a list containing :class:`~riak.riak_object.RiakObject` or @@ -220,9 +215,9 @@ def multiget(client, keys, **options): bucket-type, bucket, key, and the exception raised. If a ``pool`` option is included, the request will use the given worker - pool and not the default :data:`RIAK_MULTIGET_POOL`. This option will - be passed by the client if the ``multiget_pool_size`` option was set on - client initialization. + pool and not a transient :class:`~riak.client.multi.MultiGetPool`. This + option will be passed by the client if the ``multiget_pool_size`` + option was set on client initialization. :param client: the client to use :type client: :class:`~riak.client.RiakClient` @@ -234,26 +229,33 @@ def multiget(client, keys, **options): :rtype: list """ + transient_pool = False outq = Queue() if 'pool' in options: pool = options['pool'] del options['pool'] else: - pool = RIAK_MULTIGET_POOL - - pool.start() - for bucket_type, bucket, key in keys: - task = Task(client, outq, bucket_type, bucket, key, None, options) - pool.enq(task) - - results = [] - for _ in range(len(keys)): - if pool.stopped(): - raise RuntimeError("Multi-get operation interrupted by pool " - "stopping!") - results.append(outq.get()) - outq.task_done() + pool = MultiGetPool() + transient_pool = True + + try: + pool.start() + for bucket_type, bucket, key in keys: + task = Task(client, outq, bucket_type, bucket, key, None, options) + pool.enq(task) + + results = [] + for _ in range(len(keys)): + if pool.stopped(): + raise RuntimeError( + 'Multi-get operation interrupted by pool ' + 'stopping!') + results.append(outq.get()) + outq.task_done() + finally: + if transient_pool: + pool.stop() return results @@ -263,9 +265,9 @@ def multiput(client, objs, **options): containing booleans or :class:`~riak.riak_object.RiakObject` If a ``pool`` option is included, the request will use the given worker - pool and not the default :data:`RIAK_MULTIPUT_POOL`. This option will - be passed by the client if the ``multiput_pool_size`` option was set on - client initialization. + pool and not a transient :class:`~riak.client.multi.MultiPutPool`. This + option will be passed by the client if the ``multiput_pool_size`` + option was set on client initialization. :param client: the client to use :type client: :class:`RiakClient ` @@ -277,25 +279,32 @@ def multiput(client, objs, **options): :type options: dict :rtype: list """ + transient_pool = False outq = Queue() if 'pool' in options: pool = options['pool'] del options['pool'] else: - pool = RIAK_MULTIPUT_POOL - - pool.start() - for obj in objs: - task = PutTask(client, outq, obj, options) - pool.enq(task) - - results = [] - for _ in range(len(objs)): - if pool.stopped(): - raise RuntimeError("Multi-put operation interrupted by pool " - "stopping!") - results.append(outq.get()) - outq.task_done() + pool = MultiPutPool() + transient_pool = True + + try: + pool.start() + for obj in objs: + task = PutTask(client, outq, obj, options) + pool.enq(task) + + results = [] + for _ in range(len(objs)): + if pool.stopped(): + raise RuntimeError( + 'Multi-put operation interrupted by pool ' + 'stopping!') + results.append(outq.get()) + outq.task_done() + finally: + if transient_pool: + pool.stop() return results From 6f7d4206d3a70ec7f290aa1f9e04958a0e1e629d Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 14 Jul 2016 13:00:24 -0700 Subject: [PATCH 237/324] Add loud note for 2.5.5 release with regard to multi operations --- RELNOTES.md | 1 + 1 file changed, 1 insertion(+) diff --git a/RELNOTES.md b/RELNOTES.md index 715c3fdf..796142d0 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -3,6 +3,7 @@ ## [2.5.5 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.5) * [Stop all pools when client shuts down](https://github.com/basho/riak-python-client/pull/488) + * [Calling `close` on client closes pools, remove global multi pools](https://github.com/basho/riak-python-client/pull/490). *NOTE*: if you use the multi get or put features of the client, you *MUST* call `close()` on your `RiakClient` instance to correctly clean up the thread pools used for these multi-operations. ## [2.5.4 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.4) From 396104a1ab83f29db03f964a37004c3363938013 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 18 Jul 2016 14:43:44 -0700 Subject: [PATCH 238/324] Show response from PyPI --- Makefile | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Makefile b/Makefile index a0f8fae7..a7a6e9f4 100644 --- a/Makefile +++ b/Makefile @@ -48,7 +48,7 @@ endif @git tag --sign -a "$(VERSION)" -m "riak-python-client $(VERSION)" --local-user "$(RELEASE_GPG_KEYNAME)" @git push --tags @echo "==> Python (sdist release)" - @python setup.py sdist upload -s -i $(RELEASE_GPG_KEYNAME) + @python setup.py sdist upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @bash ./build/publish $(VERSION) release: release_sdist @@ -56,10 +56,10 @@ ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif @echo "==> Python 2.7 (release)" - @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.3 (release)" - @python3.3 setup.py build --build-base=py-build/3.3 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + @python3.3 setup.py build --build-base=py-build/3.3 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.4 (release)" - @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.5 (release)" - @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload -s -i $(RELEASE_GPG_KEYNAME) + @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) From 38d24ea3a2f56b12df0f8b742ee143ae61c9aa1d Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 18 Jul 2016 14:55:05 -0700 Subject: [PATCH 239/324] Multi docs update --- docs/advanced.rst | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/docs/advanced.rst b/docs/advanced.rst index 45475dd8..d77d2bb3 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -36,24 +36,29 @@ Retry logic .. autofunction:: retryableHttpOnly --------- -Multiget --------- +------------------- +Multiget / Multiput +------------------- -.. currentmodule:: riak.client.multiget +.. currentmodule:: riak.client.multi .. autodata:: POOL_SIZE .. autoclass:: Task +.. autoclass:: PutTask .. autoclass:: MultiGetPool :members: :private-members: -.. autodata:: RIAK_MULTIGET_POOL - .. autofunction:: multiget +.. autoclass:: MultiPutPool + :members: + :private-members: + +.. autofunction:: multiput + --------- Datatypes --------- From 75b6467912c139893949c8e6a9fe255ec514da20 Mon Sep 17 00:00:00 2001 From: Alex Moore Date: Fri, 26 Aug 2016 15:20:40 -0400 Subject: [PATCH 240/324] Added slack notifications to Travis CI builds --- .idea/.name | 1 + .idea/misc.xml | 14 ++ .idea/modules.xml | 8 + .idea/riak-python-client.iml | 8 + .idea/vcs.xml | 6 + .idea/workspace.xml | 380 +++++++++++++++++++++++++++++++++++ .travis.yml | 19 +- 7 files changed, 428 insertions(+), 8 deletions(-) create mode 100644 .idea/.name create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/riak-python-client.iml create mode 100644 .idea/vcs.xml create mode 100644 .idea/workspace.xml diff --git a/.idea/.name b/.idea/.name new file mode 100644 index 00000000..efa6340f --- /dev/null +++ b/.idea/.name @@ -0,0 +1 @@ +riak-python-client \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..d93c344a --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 00000000..b2790f2d --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/riak-python-client.iml b/.idea/riak-python-client.iml new file mode 100644 index 00000000..d0876a78 --- /dev/null +++ b/.idea/riak-python-client.iml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 00000000..94a25f7f --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.idea/workspace.xml b/.idea/workspace.xml new file mode 100644 index 00000000..b0eca739 --- /dev/null +++ b/.idea/workspace.xml @@ -0,0 +1,380 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1444324606544 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index c451e0c9..928a8cd1 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,12 +1,15 @@ language: python python: - - "2.7" - - "3.3" - - "3.4" - - "3.5" - - "3.5-dev" # 3.5 development branch +- '2.7' +- '3.3' +- '3.4' +- '3.5' +- 3.5-dev install: - - pip install --upgrade flake8 +- pip install --upgrade flake8 script: - - python setup.py test - - flake8 --exclude=riak/pb riak *.py +- python setup.py test +- flake8 --exclude=riak/pb riak *.py +notifications: + slack: + secure: M2DFhniND+ZJAXmN0LgUWrqUPkvxL+kompUww/lj0n0jTrPFEUWDJ+VAhQzg/1Aw7h/Wx0w19/DMwn5oc1KTHI3uY+9eGZHt5ohM0AANuRD8pIjWKa8OU4/kt2yxUPadUFsF+id5gmugxVfOkNnKQkvEy6Nj7WxWqeuN+N+RGgU= From fa51ab11126270fb472fed63ba6be5fc0b57ba79 Mon Sep 17 00:00:00 2001 From: Alex Moore Date: Fri, 26 Aug 2016 15:21:49 -0400 Subject: [PATCH 241/324] Removing .idea files from source --- .gitignore | 1 + .idea/.name | 1 - .idea/misc.xml | 14 -- .idea/modules.xml | 8 - .idea/riak-python-client.iml | 8 - .idea/vcs.xml | 6 - .idea/workspace.xml | 380 ----------------------------------- 7 files changed, 1 insertion(+), 417 deletions(-) delete mode 100644 .idea/.name delete mode 100644 .idea/misc.xml delete mode 100644 .idea/modules.xml delete mode 100644 .idea/riak-python-client.iml delete mode 100644 .idea/vcs.xml delete mode 100644 .idea/workspace.xml diff --git a/.gitignore b/.gitignore index c7f6e9f4..de4adbc9 100644 --- a/.gitignore +++ b/.gitignore @@ -15,3 +15,4 @@ riak.egg-info/ .eggs/ #*# *~ +.idea/ diff --git a/.idea/.name b/.idea/.name deleted file mode 100644 index efa6340f..00000000 --- a/.idea/.name +++ /dev/null @@ -1 +0,0 @@ -riak-python-client \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index d93c344a..00000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,14 +0,0 @@ - - - - - - - - - - - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index b2790f2d..00000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/riak-python-client.iml b/.idea/riak-python-client.iml deleted file mode 100644 index d0876a78..00000000 --- a/.idea/riak-python-client.iml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7f..00000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/workspace.xml b/.idea/workspace.xml deleted file mode 100644 index b0eca739..00000000 --- a/.idea/workspace.xml +++ /dev/null @@ -1,380 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1444324606544 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file From 0eecf2d5793a9d4648bc5ae34879881821aa0583 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 6 Sep 2016 15:20:19 -0700 Subject: [PATCH 242/324] Initial HyperLogLog support, still a WIP --- riak/datatypes/__init__.py | 5 +- riak/datatypes/counter.py | 21 +- riak/datatypes/hll.py | 68 +++++ riak/pb/riak_dt_pb2.py | 286 ++++++++++++++------- riak/pb/riak_kv_pb2.py | 484 +++++++++++++++++++++-------------- riak/pb/riak_pb2.py | 216 ++++++++++------ riak/pb/riak_search_pb2.py | 59 +++-- riak/pb/riak_ts_pb2.py | 242 +++++++++++------- riak/pb/riak_yokozuna_pb2.py | 124 +++++---- riak/tests/test_datatypes.py | 16 +- riak_pb | 2 +- 11 files changed, 983 insertions(+), 540 deletions(-) create mode 100644 riak/datatypes/hll.py diff --git a/riak/datatypes/__init__.py b/riak/datatypes/__init__.py index 21235ce6..90d114e9 100644 --- a/riak/datatypes/__init__.py +++ b/riak/datatypes/__init__.py @@ -24,7 +24,8 @@ from .set import Set from .map import Map from .errors import ContextRequired +from .hll import Hll -__all__ = ['Datatype', 'Flag', 'Counter', 'Register', 'Set', 'Map', 'TYPES', - 'ContextRequired'] +__all__ = ['Datatype', 'TYPES', 'ContextRequired', + 'Flag', 'Counter', 'Register', 'Set', 'Map', 'Hll'] diff --git a/riak/datatypes/counter.py b/riak/datatypes/counter.py index 7d2de17b..33c69f81 100644 --- a/riak/datatypes/counter.py +++ b/riak/datatypes/counter.py @@ -1,20 +1,4 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" +import six from riak.datatypes.datatype import Datatype from riak.datatypes import TYPES @@ -72,8 +56,7 @@ def decrement(self, amount=1): self._increment -= amount def _check_type(self, new_value): - return (isinstance(new_value, int) or - isinstance(new_value, long)) # noqa + return isinstance(new_value, six.integer_types) TYPES['counter'] = Counter diff --git a/riak/datatypes/hll.py b/riak/datatypes/hll.py new file mode 100644 index 00000000..dbd68d4e --- /dev/null +++ b/riak/datatypes/hll.py @@ -0,0 +1,68 @@ +import collections +import six + +from .datatype import Datatype +from riak.datatypes import TYPES + +__all__ = ['Hll'] + + +class Hll(Datatype): + """A convergent datatype representing a HyperLogLog set. + Currently strings are the only supported value type. + Example:: + + myhll.add('barista') + myhll.add('roaster') + myhll.add('brewer') + """ + + type_name = 'hll' + _type_error_msg = 'Hlls can only be integers' + + def _post_init(self): + self._adds = set() + + def _default_value(self): + return 0 + + @Datatype.modified.getter + def modified(self): + """ + Whether this HyperLogLog has staged adds. + """ + return len(self._adds) > 0 + + def to_op(self): + """ + Extracts the modification operation from the Hll. + + :rtype: dict, None + """ + if not self._adds: + return None + changes = {} + if self._adds: + changes['adds'] = list(self._adds) + return changes + + def add(self, element): + """ + Adds an element to the HyperLogLog. Datatype cardinality will + be updated when the object is saved. + + :param element: the element to add + :type element: str + """ + if not isinstance(element, six.string_types): + raise TypeError("Hll elements can only be strings") + self._adds.add(element) + + def _coerce_value(self, new_value): + return int(new_value) + + def _check_type(self, new_value): + return isinstance(new_value, six.integer_types) + + +TYPES['hll'] = Hll diff --git a/riak/pb/riak_dt_pb2.py b/riak/pb/riak_dt_pb2.py index 58a2f54b..394be5b1 100644 --- a/riak/pb/riak_dt_pb2.py +++ b/riak/pb/riak_dt_pb2.py @@ -2,19 +2,26 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_dt.proto +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) +_sym_db = _symbol_database.Default() + DESCRIPTOR = _descriptor.FileDescriptor( name='riak_dt.proto', package='', - serialized_pb='\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"Q\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\"\x87\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\")\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"V\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"t\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntryB#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') + serialized_pb=_b('\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"d\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x04 \x01(\x04\"\x90\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\"2\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\x12\x07\n\x03HLL\x10\x04\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\x15\n\x05HllOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"n\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\x12\x16\n\x06hll_op\x18\x04 \x01(\x0b\x32\x06.HllOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"\x87\x01\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x06 \x01(\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -50,6 +57,7 @@ serialized_start=82, serialized_end=151, ) +_sym_db.RegisterEnumDescriptor(_MAPFIELD_MAPFIELDTYPE) _DTFETCHRESP_DATATYPE = _descriptor.EnumDescriptor( name='DataType', @@ -69,12 +77,17 @@ name='MAP', index=2, number=3, options=None, type=None), + _descriptor.EnumValueDescriptor( + name='HLL', index=3, number=4, + options=None, + type=None), ], containing_type=None, options=None, - serialized_start=696, - serialized_end=737, + serialized_start=715, + serialized_end=765, ) +_sym_db.RegisterEnumDescriptor(_DTFETCHRESP_DATATYPE) _MAPUPDATE_FLAGOP = _descriptor.EnumDescriptor( name='FlagOp', @@ -93,9 +106,10 @@ ], containing_type=None, options=None, - serialized_start=988, - serialized_end=1021, + serialized_start=1039, + serialized_end=1072, ) +_sym_db.RegisterEnumDescriptor(_MAPUPDATE_FLAGOP) _MAPFIELD = _descriptor.Descriptor( @@ -108,7 +122,7 @@ _descriptor.FieldDescriptor( name='name', full_name='MapField.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -129,6 +143,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=18, serialized_end=151, ) @@ -165,7 +181,7 @@ _descriptor.FieldDescriptor( name='register_value', full_name='MapEntry.register_value', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -192,6 +208,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=154, serialized_end=306, ) @@ -207,21 +225,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='DtFetchReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='DtFetchReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='DtFetchReq.type', index=2, number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -290,6 +308,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=309, serialized_end=516, ) @@ -323,6 +343,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='hll_value', full_name='DtValue.hll_value', index=3, + number=4, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -332,8 +359,10 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=518, - serialized_end=599, + serialized_end=618, ) @@ -347,7 +376,7 @@ _descriptor.FieldDescriptor( name='context', full_name='DtFetchResp.context', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -375,8 +404,10 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=602, - serialized_end=737, + oneofs=[ + ], + serialized_start=621, + serialized_end=765, ) @@ -403,8 +434,10 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=739, - serialized_end=769, + oneofs=[ + ], + serialized_start=767, + serialized_end=797, ) @@ -438,8 +471,40 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=771, - serialized_end=809, + oneofs=[ + ], + serialized_start=799, + serialized_end=837, +) + + +_HLLOP = _descriptor.Descriptor( + name='HllOp', + full_name='HllOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='adds', full_name='HllOp.adds', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + oneofs=[ + ], + serialized_start=839, + serialized_end=860, ) @@ -474,7 +539,7 @@ _descriptor.FieldDescriptor( name='register_op', full_name='MapUpdate.register_op', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -502,8 +567,10 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=812, - serialized_end=1021, + oneofs=[ + ], + serialized_start=863, + serialized_end=1072, ) @@ -537,8 +604,10 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1023, - serialized_end=1087, + oneofs=[ + ], + serialized_start=1074, + serialized_end=1138, ) @@ -570,6 +639,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='hll_op', full_name='DtOp.hll_op', index=3, + number=4, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -579,8 +655,10 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1089, - serialized_end=1175, + oneofs=[ + ], + serialized_start=1140, + serialized_end=1250, ) @@ -594,28 +672,28 @@ _descriptor.FieldDescriptor( name='bucket', full_name='DtUpdateReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='DtUpdateReq.key', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='DtUpdateReq.type', index=2, number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='context', full_name='DtUpdateReq.context', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -691,8 +769,10 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1178, - serialized_end=1419, + oneofs=[ + ], + serialized_start=1253, + serialized_end=1494, ) @@ -706,14 +786,14 @@ _descriptor.FieldDescriptor( name='key', full_name='DtUpdateResp.key', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='context', full_name='DtUpdateResp.context', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -738,6 +818,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='hll_value', full_name='DtUpdateResp.hll_value', index=5, + number=6, type=4, cpp_type=4, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -747,29 +834,32 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1421, - serialized_end=1537, + oneofs=[ + ], + serialized_start=1497, + serialized_end=1632, ) _MAPFIELD.fields_by_name['type'].enum_type = _MAPFIELD_MAPFIELDTYPE -_MAPFIELD_MAPFIELDTYPE.containing_type = _MAPFIELD; +_MAPFIELD_MAPFIELDTYPE.containing_type = _MAPFIELD _MAPENTRY.fields_by_name['field'].message_type = _MAPFIELD _MAPENTRY.fields_by_name['map_value'].message_type = _MAPENTRY _DTVALUE.fields_by_name['map_value'].message_type = _MAPENTRY _DTFETCHRESP.fields_by_name['type'].enum_type = _DTFETCHRESP_DATATYPE _DTFETCHRESP.fields_by_name['value'].message_type = _DTVALUE -_DTFETCHRESP_DATATYPE.containing_type = _DTFETCHRESP; +_DTFETCHRESP_DATATYPE.containing_type = _DTFETCHRESP _MAPUPDATE.fields_by_name['field'].message_type = _MAPFIELD _MAPUPDATE.fields_by_name['counter_op'].message_type = _COUNTEROP _MAPUPDATE.fields_by_name['set_op'].message_type = _SETOP _MAPUPDATE.fields_by_name['flag_op'].enum_type = _MAPUPDATE_FLAGOP _MAPUPDATE.fields_by_name['map_op'].message_type = _MAPOP -_MAPUPDATE_FLAGOP.containing_type = _MAPUPDATE; +_MAPUPDATE_FLAGOP.containing_type = _MAPUPDATE _MAPOP.fields_by_name['removes'].message_type = _MAPFIELD _MAPOP.fields_by_name['updates'].message_type = _MAPUPDATE _DTOP.fields_by_name['counter_op'].message_type = _COUNTEROP _DTOP.fields_by_name['set_op'].message_type = _SETOP _DTOP.fields_by_name['map_op'].message_type = _MAPOP +_DTOP.fields_by_name['hll_op'].message_type = _HLLOP _DTUPDATEREQ.fields_by_name['op'].message_type = _DTOP _DTUPDATERESP.fields_by_name['map_value'].message_type = _MAPENTRY DESCRIPTOR.message_types_by_name['MapField'] = _MAPFIELD @@ -779,85 +869,105 @@ DESCRIPTOR.message_types_by_name['DtFetchResp'] = _DTFETCHRESP DESCRIPTOR.message_types_by_name['CounterOp'] = _COUNTEROP DESCRIPTOR.message_types_by_name['SetOp'] = _SETOP +DESCRIPTOR.message_types_by_name['HllOp'] = _HLLOP DESCRIPTOR.message_types_by_name['MapUpdate'] = _MAPUPDATE DESCRIPTOR.message_types_by_name['MapOp'] = _MAPOP DESCRIPTOR.message_types_by_name['DtOp'] = _DTOP DESCRIPTOR.message_types_by_name['DtUpdateReq'] = _DTUPDATEREQ DESCRIPTOR.message_types_by_name['DtUpdateResp'] = _DTUPDATERESP -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapField(_message.Message): - DESCRIPTOR = _MAPFIELD - +MapField = _reflection.GeneratedProtocolMessageType('MapField', (_message.Message,), dict( + DESCRIPTOR = _MAPFIELD, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:MapField) + )) +_sym_db.RegisterMessage(MapField) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapEntry(_message.Message): - DESCRIPTOR = _MAPENTRY - +MapEntry = _reflection.GeneratedProtocolMessageType('MapEntry', (_message.Message,), dict( + DESCRIPTOR = _MAPENTRY, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:MapEntry) + )) +_sym_db.RegisterMessage(MapEntry) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtFetchReq(_message.Message): - DESCRIPTOR = _DTFETCHREQ - +DtFetchReq = _reflection.GeneratedProtocolMessageType('DtFetchReq', (_message.Message,), dict( + DESCRIPTOR = _DTFETCHREQ, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:DtFetchReq) + )) +_sym_db.RegisterMessage(DtFetchReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtValue(_message.Message): - DESCRIPTOR = _DTVALUE - +DtValue = _reflection.GeneratedProtocolMessageType('DtValue', (_message.Message,), dict( + DESCRIPTOR = _DTVALUE, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:DtValue) + )) +_sym_db.RegisterMessage(DtValue) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtFetchResp(_message.Message): - DESCRIPTOR = _DTFETCHRESP - +DtFetchResp = _reflection.GeneratedProtocolMessageType('DtFetchResp', (_message.Message,), dict( + DESCRIPTOR = _DTFETCHRESP, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:DtFetchResp) + )) +_sym_db.RegisterMessage(DtFetchResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class CounterOp(_message.Message): - DESCRIPTOR = _COUNTEROP - +CounterOp = _reflection.GeneratedProtocolMessageType('CounterOp', (_message.Message,), dict( + DESCRIPTOR = _COUNTEROP, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:CounterOp) + )) +_sym_db.RegisterMessage(CounterOp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class SetOp(_message.Message): - DESCRIPTOR = _SETOP - +SetOp = _reflection.GeneratedProtocolMessageType('SetOp', (_message.Message,), dict( + DESCRIPTOR = _SETOP, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:SetOp) - -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapUpdate(_message.Message): - DESCRIPTOR = _MAPUPDATE - + )) +_sym_db.RegisterMessage(SetOp) + +HllOp = _reflection.GeneratedProtocolMessageType('HllOp', (_message.Message,), dict( + DESCRIPTOR = _HLLOP, + __module__ = 'riak_dt_pb2' + # @@protoc_insertion_point(class_scope:HllOp) + )) +_sym_db.RegisterMessage(HllOp) + +MapUpdate = _reflection.GeneratedProtocolMessageType('MapUpdate', (_message.Message,), dict( + DESCRIPTOR = _MAPUPDATE, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:MapUpdate) + )) +_sym_db.RegisterMessage(MapUpdate) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class MapOp(_message.Message): - DESCRIPTOR = _MAPOP - +MapOp = _reflection.GeneratedProtocolMessageType('MapOp', (_message.Message,), dict( + DESCRIPTOR = _MAPOP, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:MapOp) + )) +_sym_db.RegisterMessage(MapOp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtOp(_message.Message): - DESCRIPTOR = _DTOP - +DtOp = _reflection.GeneratedProtocolMessageType('DtOp', (_message.Message,), dict( + DESCRIPTOR = _DTOP, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:DtOp) + )) +_sym_db.RegisterMessage(DtOp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtUpdateReq(_message.Message): - DESCRIPTOR = _DTUPDATEREQ - +DtUpdateReq = _reflection.GeneratedProtocolMessageType('DtUpdateReq', (_message.Message,), dict( + DESCRIPTOR = _DTUPDATEREQ, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:DtUpdateReq) + )) +_sym_db.RegisterMessage(DtUpdateReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class DtUpdateResp(_message.Message): - DESCRIPTOR = _DTUPDATERESP - +DtUpdateResp = _reflection.GeneratedProtocolMessageType('DtUpdateResp', (_message.Message,), dict( + DESCRIPTOR = _DTUPDATERESP, + __module__ = 'riak_dt_pb2' # @@protoc_insertion_point(class_scope:DtUpdateResp) + )) +_sym_db.RegisterMessage(DtUpdateResp) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakDtPB') +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\010RiakDtPB')) # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_kv_pb2.py b/riak/pb/riak_kv_pb2.py index 09797052..ce8ab338 100644 --- a/riak/pb/riak_kv_pb2.py +++ b/riak/pb/riak_kv_pb2.py @@ -2,12 +2,17 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_kv.proto +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) +_sym_db = _symbol_database.Default() + import riak.pb.riak_pb2 @@ -15,7 +20,10 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_kv.proto', package='', - serialized_pb='\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xf9\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\x12\x15\n\rcover_context\x18\x0f \x01(\x0c\x12\x13\n\x0breturn_body\x18\x10 \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"X\n\x10RpbIndexBodyResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xd8\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\x12\x15\n\rcover_context\x18\n \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\"x\n\x0eRpbCoverageReq\x12\x0c\n\x04type\x18\x01 \x01(\x0c\x12\x0e\n\x06\x62ucket\x18\x02 \x02(\x0c\x12\x16\n\x0emin_partitions\x18\x03 \x01(\r\x12\x15\n\rreplace_cover\x18\x04 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x05 \x03(\x0c\"5\n\x0fRpbCoverageResp\x12\"\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x11.RpbCoverageEntry\"Z\n\x10RpbCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rkeyspace_desc\x18\x03 \x01(\x0c\x12\x15\n\rcover_context\x18\x04 \x02(\x0c\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') + serialized_pb=_b('\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xf9\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\x12\x15\n\rcover_context\x18\x0f \x01(\x0c\x12\x13\n\x0breturn_body\x18\x10 \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"X\n\x10RpbIndexBodyResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xd8\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\x12\x15\n\rcover_context\x18\n \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\"x\n\x0eRpbCoverageReq\x12\x0c\n\x04type\x18\x01 \x01(\x0c\x12\x0e\n\x06\x62ucket\x18\x02 \x02(\x0c\x12\x16\n\x0emin_partitions\x18\x03 \x01(\r\x12\x15\n\rreplace_cover\x18\x04 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x05 \x03(\x0c\"5\n\x0fRpbCoverageResp\x12\"\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x11.RpbCoverageEntry\"Z\n\x10RpbCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rkeyspace_desc\x18\x03 \x01(\x0c\x12\x15\n\rcover_context\x18\x04 \x02(\x0c\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') + , + dependencies=[riak.pb.riak_pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -39,6 +47,7 @@ serialized_start=1688, serialized_end=1723, ) +_sym_db.RegisterEnumDescriptor(_RPBINDEXREQ_INDEXQUERYTYPE) _RPBGETCLIENTIDRESP = _descriptor.Descriptor( @@ -51,7 +60,7 @@ _descriptor.FieldDescriptor( name='client_id', full_name='RpbGetClientIdResp.client_id', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -64,6 +73,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=29, serialized_end=68, ) @@ -79,7 +90,7 @@ _descriptor.FieldDescriptor( name='client_id', full_name='RpbSetClientIdReq.client_id', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -92,6 +103,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=70, serialized_end=108, ) @@ -107,14 +120,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbGetReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbGetReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -149,7 +162,7 @@ _descriptor.FieldDescriptor( name='if_modified', full_name='RpbGetReq.if_modified', index=6, number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -191,7 +204,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbGetReq.type', index=12, number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -204,6 +217,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=111, serialized_end=344, ) @@ -226,7 +241,7 @@ _descriptor.FieldDescriptor( name='vclock', full_name='RpbGetResp.vclock', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -246,6 +261,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=346, serialized_end=423, ) @@ -261,21 +278,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbPutReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbPutReq.key', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='vclock', full_name='RpbPutReq.vclock', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -366,7 +383,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbPutReq.type', index=15, number=16, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -379,6 +396,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=426, serialized_end=720, ) @@ -401,14 +420,14 @@ _descriptor.FieldDescriptor( name='vclock', full_name='RpbPutResp.vclock', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbPutResp.key', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -421,6 +440,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=722, serialized_end=793, ) @@ -436,14 +457,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbDelReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbDelReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -457,7 +478,7 @@ _descriptor.FieldDescriptor( name='vclock', full_name='RpbDelReq.vclock', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -520,7 +541,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbDelReq.type', index=12, number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -533,6 +554,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=796, serialized_end=991, ) @@ -562,7 +585,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbListBucketsReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -575,6 +598,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=993, serialized_end=1059, ) @@ -610,6 +635,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1061, serialized_end=1112, ) @@ -625,7 +652,7 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbListKeysReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -639,7 +666,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbListKeysReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -652,6 +679,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1114, serialized_end=1177, ) @@ -687,6 +716,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1179, serialized_end=1224, ) @@ -702,14 +733,14 @@ _descriptor.FieldDescriptor( name='request', full_name='RpbMapRedReq.request', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content_type', full_name='RpbMapRedReq.content_type', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -722,6 +753,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1226, serialized_end=1279, ) @@ -744,7 +777,7 @@ _descriptor.FieldDescriptor( name='response', full_name='RpbMapRedResp.response', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -764,6 +797,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1281, serialized_end=1343, ) @@ -779,14 +814,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbIndexReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='index', full_name='RpbIndexReq.index', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -800,21 +835,21 @@ _descriptor.FieldDescriptor( name='key', full_name='RpbIndexReq.key', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='range_min', full_name='RpbIndexReq.range_min', index=4, number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='range_max', full_name='RpbIndexReq.range_max', index=5, number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -842,7 +877,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbIndexReq.continuation', index=9, number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -856,14 +891,14 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbIndexReq.type', index=11, number=12, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='term_regex', full_name='RpbIndexReq.term_regex', index=12, number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -877,7 +912,7 @@ _descriptor.FieldDescriptor( name='cover_context', full_name='RpbIndexReq.cover_context', index=14, number=15, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -898,6 +933,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1346, serialized_end=1723, ) @@ -927,7 +964,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbIndexResp.continuation', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -947,6 +984,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1725, serialized_end=1816, ) @@ -969,7 +1008,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbIndexBodyResp.continuation', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -989,6 +1028,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1818, serialized_end=1906, ) @@ -1004,21 +1045,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbCSBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='start_key', full_name='RpbCSBucketReq.start_key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='end_key', full_name='RpbCSBucketReq.end_key', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1039,7 +1080,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbCSBucketReq.continuation', index=5, number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1060,14 +1101,14 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbCSBucketReq.type', index=8, number=9, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='cover_context', full_name='RpbCSBucketReq.cover_context', index=9, number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1080,6 +1121,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1909, serialized_end=2125, ) @@ -1102,7 +1145,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbCSBucketResp.continuation', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1122,6 +1165,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2127, serialized_end=2214, ) @@ -1137,7 +1182,7 @@ _descriptor.FieldDescriptor( name='key', full_name='RpbIndexObject.key', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1157,6 +1202,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2216, serialized_end=2274, ) @@ -1172,35 +1219,35 @@ _descriptor.FieldDescriptor( name='value', full_name='RpbContent.value', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content_type', full_name='RpbContent.content_type', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='charset', full_name='RpbContent.charset', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content_encoding', full_name='RpbContent.content_encoding', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='vtag', full_name='RpbContent.vtag', index=4, number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1255,6 +1302,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2277, serialized_end=2522, ) @@ -1270,21 +1319,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbLink.bucket', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbLink.key', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='tag', full_name='RpbLink.tag', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1297,6 +1346,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2524, serialized_end=2575, ) @@ -1312,14 +1363,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbCounterUpdateReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbCounterUpdateReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1367,6 +1418,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2577, serialized_end=2699, ) @@ -1395,6 +1448,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2701, serialized_end=2738, ) @@ -1410,14 +1465,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbCounterGetReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbCounterGetReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1458,6 +1513,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2740, serialized_end=2853, ) @@ -1486,6 +1543,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2855, serialized_end=2889, ) @@ -1501,21 +1560,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbGetBucketKeyPreflistReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbGetBucketKeyPreflistReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='RpbGetBucketKeyPreflistReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1528,6 +1587,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2891, serialized_end=2962, ) @@ -1556,6 +1617,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=2964, serialized_end=3038, ) @@ -1578,7 +1641,7 @@ _descriptor.FieldDescriptor( name='node', full_name='RpbBucketKeyPreflistItem.node', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1598,6 +1661,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=3040, serialized_end=3116, ) @@ -1613,14 +1678,14 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbCoverageReq.type', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='bucket', full_name='RpbCoverageReq.bucket', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1634,7 +1699,7 @@ _descriptor.FieldDescriptor( name='replace_cover', full_name='RpbCoverageReq.replace_cover', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1654,6 +1719,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=3118, serialized_end=3238, ) @@ -1682,6 +1749,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=3240, serialized_end=3293, ) @@ -1697,7 +1766,7 @@ _descriptor.FieldDescriptor( name='ip', full_name='RpbCoverageEntry.ip', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1711,14 +1780,14 @@ _descriptor.FieldDescriptor( name='keyspace_desc', full_name='RpbCoverageEntry.keyspace_desc', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='cover_context', full_name='RpbCoverageEntry.cover_context', index=3, number=4, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1731,6 +1800,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=3295, serialized_end=3385, ) @@ -1739,7 +1810,7 @@ _RPBPUTREQ.fields_by_name['content'].message_type = _RPBCONTENT _RPBPUTRESP.fields_by_name['content'].message_type = _RPBCONTENT _RPBINDEXREQ.fields_by_name['qtype'].enum_type = _RPBINDEXREQ_INDEXQUERYTYPE -_RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ; +_RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ _RPBINDEXRESP.fields_by_name['results'].message_type = riak.pb.riak_pb2._RPBPAIR _RPBINDEXBODYRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT _RPBCSBUCKETRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT @@ -1781,193 +1852,224 @@ DESCRIPTOR.message_types_by_name['RpbCoverageResp'] = _RPBCOVERAGERESP DESCRIPTOR.message_types_by_name['RpbCoverageEntry'] = _RPBCOVERAGEENTRY -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetClientIdResp(_message.Message): - DESCRIPTOR = _RPBGETCLIENTIDRESP - +RpbGetClientIdResp = _reflection.GeneratedProtocolMessageType('RpbGetClientIdResp', (_message.Message,), dict( + DESCRIPTOR = _RPBGETCLIENTIDRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbGetClientIdResp) + )) +_sym_db.RegisterMessage(RpbGetClientIdResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSetClientIdReq(_message.Message): - DESCRIPTOR = _RPBSETCLIENTIDREQ - +RpbSetClientIdReq = _reflection.GeneratedProtocolMessageType('RpbSetClientIdReq', (_message.Message,), dict( + DESCRIPTOR = _RPBSETCLIENTIDREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbSetClientIdReq) + )) +_sym_db.RegisterMessage(RpbSetClientIdReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetReq(_message.Message): - DESCRIPTOR = _RPBGETREQ - +RpbGetReq = _reflection.GeneratedProtocolMessageType('RpbGetReq', (_message.Message,), dict( + DESCRIPTOR = _RPBGETREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbGetReq) + )) +_sym_db.RegisterMessage(RpbGetReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetResp(_message.Message): - DESCRIPTOR = _RPBGETRESP - +RpbGetResp = _reflection.GeneratedProtocolMessageType('RpbGetResp', (_message.Message,), dict( + DESCRIPTOR = _RPBGETRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbGetResp) + )) +_sym_db.RegisterMessage(RpbGetResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbPutReq(_message.Message): - DESCRIPTOR = _RPBPUTREQ - +RpbPutReq = _reflection.GeneratedProtocolMessageType('RpbPutReq', (_message.Message,), dict( + DESCRIPTOR = _RPBPUTREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbPutReq) + )) +_sym_db.RegisterMessage(RpbPutReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbPutResp(_message.Message): - DESCRIPTOR = _RPBPUTRESP - +RpbPutResp = _reflection.GeneratedProtocolMessageType('RpbPutResp', (_message.Message,), dict( + DESCRIPTOR = _RPBPUTRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbPutResp) + )) +_sym_db.RegisterMessage(RpbPutResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbDelReq(_message.Message): - DESCRIPTOR = _RPBDELREQ - +RpbDelReq = _reflection.GeneratedProtocolMessageType('RpbDelReq', (_message.Message,), dict( + DESCRIPTOR = _RPBDELREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbDelReq) + )) +_sym_db.RegisterMessage(RpbDelReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListBucketsReq(_message.Message): - DESCRIPTOR = _RPBLISTBUCKETSREQ - +RpbListBucketsReq = _reflection.GeneratedProtocolMessageType('RpbListBucketsReq', (_message.Message,), dict( + DESCRIPTOR = _RPBLISTBUCKETSREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbListBucketsReq) + )) +_sym_db.RegisterMessage(RpbListBucketsReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListBucketsResp(_message.Message): - DESCRIPTOR = _RPBLISTBUCKETSRESP - +RpbListBucketsResp = _reflection.GeneratedProtocolMessageType('RpbListBucketsResp', (_message.Message,), dict( + DESCRIPTOR = _RPBLISTBUCKETSRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbListBucketsResp) + )) +_sym_db.RegisterMessage(RpbListBucketsResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListKeysReq(_message.Message): - DESCRIPTOR = _RPBLISTKEYSREQ - +RpbListKeysReq = _reflection.GeneratedProtocolMessageType('RpbListKeysReq', (_message.Message,), dict( + DESCRIPTOR = _RPBLISTKEYSREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbListKeysReq) + )) +_sym_db.RegisterMessage(RpbListKeysReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbListKeysResp(_message.Message): - DESCRIPTOR = _RPBLISTKEYSRESP - +RpbListKeysResp = _reflection.GeneratedProtocolMessageType('RpbListKeysResp', (_message.Message,), dict( + DESCRIPTOR = _RPBLISTKEYSRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbListKeysResp) + )) +_sym_db.RegisterMessage(RpbListKeysResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbMapRedReq(_message.Message): - DESCRIPTOR = _RPBMAPREDREQ - +RpbMapRedReq = _reflection.GeneratedProtocolMessageType('RpbMapRedReq', (_message.Message,), dict( + DESCRIPTOR = _RPBMAPREDREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbMapRedReq) + )) +_sym_db.RegisterMessage(RpbMapRedReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbMapRedResp(_message.Message): - DESCRIPTOR = _RPBMAPREDRESP - +RpbMapRedResp = _reflection.GeneratedProtocolMessageType('RpbMapRedResp', (_message.Message,), dict( + DESCRIPTOR = _RPBMAPREDRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbMapRedResp) + )) +_sym_db.RegisterMessage(RpbMapRedResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbIndexReq(_message.Message): - DESCRIPTOR = _RPBINDEXREQ - +RpbIndexReq = _reflection.GeneratedProtocolMessageType('RpbIndexReq', (_message.Message,), dict( + DESCRIPTOR = _RPBINDEXREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbIndexReq) + )) +_sym_db.RegisterMessage(RpbIndexReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbIndexResp(_message.Message): - DESCRIPTOR = _RPBINDEXRESP - +RpbIndexResp = _reflection.GeneratedProtocolMessageType('RpbIndexResp', (_message.Message,), dict( + DESCRIPTOR = _RPBINDEXRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbIndexResp) + )) +_sym_db.RegisterMessage(RpbIndexResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbIndexBodyResp(_message.Message): - DESCRIPTOR = _RPBINDEXBODYRESP - +RpbIndexBodyResp = _reflection.GeneratedProtocolMessageType('RpbIndexBodyResp', (_message.Message,), dict( + DESCRIPTOR = _RPBINDEXBODYRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbIndexBodyResp) + )) +_sym_db.RegisterMessage(RpbIndexBodyResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCSBucketReq(_message.Message): - DESCRIPTOR = _RPBCSBUCKETREQ - +RpbCSBucketReq = _reflection.GeneratedProtocolMessageType('RpbCSBucketReq', (_message.Message,), dict( + DESCRIPTOR = _RPBCSBUCKETREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCSBucketReq) + )) +_sym_db.RegisterMessage(RpbCSBucketReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCSBucketResp(_message.Message): - DESCRIPTOR = _RPBCSBUCKETRESP - +RpbCSBucketResp = _reflection.GeneratedProtocolMessageType('RpbCSBucketResp', (_message.Message,), dict( + DESCRIPTOR = _RPBCSBUCKETRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCSBucketResp) + )) +_sym_db.RegisterMessage(RpbCSBucketResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbIndexObject(_message.Message): - DESCRIPTOR = _RPBINDEXOBJECT - +RpbIndexObject = _reflection.GeneratedProtocolMessageType('RpbIndexObject', (_message.Message,), dict( + DESCRIPTOR = _RPBINDEXOBJECT, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbIndexObject) + )) +_sym_db.RegisterMessage(RpbIndexObject) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbContent(_message.Message): - DESCRIPTOR = _RPBCONTENT - +RpbContent = _reflection.GeneratedProtocolMessageType('RpbContent', (_message.Message,), dict( + DESCRIPTOR = _RPBCONTENT, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbContent) + )) +_sym_db.RegisterMessage(RpbContent) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbLink(_message.Message): - DESCRIPTOR = _RPBLINK - +RpbLink = _reflection.GeneratedProtocolMessageType('RpbLink', (_message.Message,), dict( + DESCRIPTOR = _RPBLINK, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbLink) + )) +_sym_db.RegisterMessage(RpbLink) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterUpdateReq(_message.Message): - DESCRIPTOR = _RPBCOUNTERUPDATEREQ - +RpbCounterUpdateReq = _reflection.GeneratedProtocolMessageType('RpbCounterUpdateReq', (_message.Message,), dict( + DESCRIPTOR = _RPBCOUNTERUPDATEREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCounterUpdateReq) + )) +_sym_db.RegisterMessage(RpbCounterUpdateReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterUpdateResp(_message.Message): - DESCRIPTOR = _RPBCOUNTERUPDATERESP - +RpbCounterUpdateResp = _reflection.GeneratedProtocolMessageType('RpbCounterUpdateResp', (_message.Message,), dict( + DESCRIPTOR = _RPBCOUNTERUPDATERESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCounterUpdateResp) + )) +_sym_db.RegisterMessage(RpbCounterUpdateResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterGetReq(_message.Message): - DESCRIPTOR = _RPBCOUNTERGETREQ - +RpbCounterGetReq = _reflection.GeneratedProtocolMessageType('RpbCounterGetReq', (_message.Message,), dict( + DESCRIPTOR = _RPBCOUNTERGETREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCounterGetReq) + )) +_sym_db.RegisterMessage(RpbCounterGetReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCounterGetResp(_message.Message): - DESCRIPTOR = _RPBCOUNTERGETRESP - +RpbCounterGetResp = _reflection.GeneratedProtocolMessageType('RpbCounterGetResp', (_message.Message,), dict( + DESCRIPTOR = _RPBCOUNTERGETRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCounterGetResp) + )) +_sym_db.RegisterMessage(RpbCounterGetResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketKeyPreflistReq(_message.Message): - DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTREQ - +RpbGetBucketKeyPreflistReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketKeyPreflistReq', (_message.Message,), dict( + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistReq) + )) +_sym_db.RegisterMessage(RpbGetBucketKeyPreflistReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketKeyPreflistResp(_message.Message): - DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTRESP - +RpbGetBucketKeyPreflistResp = _reflection.GeneratedProtocolMessageType('RpbGetBucketKeyPreflistResp', (_message.Message,), dict( + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTRESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistResp) + )) +_sym_db.RegisterMessage(RpbGetBucketKeyPreflistResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbBucketKeyPreflistItem(_message.Message): - DESCRIPTOR = _RPBBUCKETKEYPREFLISTITEM - +RpbBucketKeyPreflistItem = _reflection.GeneratedProtocolMessageType('RpbBucketKeyPreflistItem', (_message.Message,), dict( + DESCRIPTOR = _RPBBUCKETKEYPREFLISTITEM, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbBucketKeyPreflistItem) + )) +_sym_db.RegisterMessage(RpbBucketKeyPreflistItem) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCoverageReq(_message.Message): - DESCRIPTOR = _RPBCOVERAGEREQ - +RpbCoverageReq = _reflection.GeneratedProtocolMessageType('RpbCoverageReq', (_message.Message,), dict( + DESCRIPTOR = _RPBCOVERAGEREQ, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCoverageReq) + )) +_sym_db.RegisterMessage(RpbCoverageReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCoverageResp(_message.Message): - DESCRIPTOR = _RPBCOVERAGERESP - +RpbCoverageResp = _reflection.GeneratedProtocolMessageType('RpbCoverageResp', (_message.Message,), dict( + DESCRIPTOR = _RPBCOVERAGERESP, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCoverageResp) + )) +_sym_db.RegisterMessage(RpbCoverageResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCoverageEntry(_message.Message): - DESCRIPTOR = _RPBCOVERAGEENTRY - +RpbCoverageEntry = _reflection.GeneratedProtocolMessageType('RpbCoverageEntry', (_message.Message,), dict( + DESCRIPTOR = _RPBCOVERAGEENTRY, + __module__ = 'riak_kv_pb2' # @@protoc_insertion_point(class_scope:RpbCoverageEntry) + )) +_sym_db.RegisterMessage(RpbCoverageEntry) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakKvPB') +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\010RiakKvPB')) # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_pb2.py b/riak/pb/riak_pb2.py index a757940a..4ce87953 100644 --- a/riak/pb/riak_pb2.py +++ b/riak/pb/riak_pb2.py @@ -2,19 +2,26 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak.proto +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) +_sym_db = _symbol_database.Default() + DESCRIPTOR = _descriptor.FileDescriptor( name='riak.proto', package='', - serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xb0\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') + serialized_pb=_b('\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xc7\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\x12\x15\n\rhll_precision\x18\x1d \x01(\r\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -43,9 +50,10 @@ ], containing_type=None, options=None, - serialized_start=1236, - serialized_end=1298, + serialized_start=1259, + serialized_end=1321, ) +_sym_db.RegisterEnumDescriptor(_RPBBUCKETPROPS_RPBREPLMODE) _RPBERRORRESP = _descriptor.Descriptor( @@ -58,7 +66,7 @@ _descriptor.FieldDescriptor( name='errmsg', full_name='RpbErrorResp.errmsg', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -78,6 +86,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=14, serialized_end=61, ) @@ -93,14 +103,14 @@ _descriptor.FieldDescriptor( name='node', full_name='RpbGetServerInfoResp.node', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='server_version', full_name='RpbGetServerInfoResp.server_version', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -113,6 +123,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=63, serialized_end=123, ) @@ -128,14 +140,14 @@ _descriptor.FieldDescriptor( name='key', full_name='RpbPair.key', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='value', full_name='RpbPair.value', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -148,6 +160,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=125, serialized_end=162, ) @@ -163,14 +177,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbGetBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='RpbGetBucketReq.type', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -183,6 +197,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=164, serialized_end=211, ) @@ -211,6 +227,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=213, serialized_end=263, ) @@ -226,7 +244,7 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbSetBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -240,7 +258,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbSetBucketReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -253,6 +271,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=265, serialized_end=344, ) @@ -268,14 +288,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbResetBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='RpbResetBucketReq.type', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -288,6 +308,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=346, serialized_end=395, ) @@ -303,7 +325,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbGetBucketTypeReq.type', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -316,6 +338,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=397, serialized_end=432, ) @@ -331,7 +355,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbSetBucketTypeReq.type', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -351,6 +375,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=434, serialized_end=501, ) @@ -366,14 +392,14 @@ _descriptor.FieldDescriptor( name='module', full_name='RpbModFun.module', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='function', full_name='RpbModFun.function', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -386,6 +412,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=503, serialized_end=548, ) @@ -408,7 +436,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbCommitHook.name', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -421,6 +449,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=550, serialized_end=607, ) @@ -583,7 +613,7 @@ _descriptor.FieldDescriptor( name='backend', full_name='RpbBucketProps.backend', index=21, number=22, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -604,14 +634,14 @@ _descriptor.FieldDescriptor( name='search_index', full_name='RpbBucketProps.search_index', index=24, number=25, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='datatype', full_name='RpbBucketProps.datatype', index=25, number=26, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -629,6 +659,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='hll_precision', full_name='RpbBucketProps.hll_precision', index=28, + number=29, type=13, cpp_type=3, label=1, + has_default_value=False, default_value=0, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -639,8 +676,10 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=610, - serialized_end=1298, + serialized_end=1321, ) @@ -654,14 +693,14 @@ _descriptor.FieldDescriptor( name='user', full_name='RpbAuthReq.user', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='password', full_name='RpbAuthReq.password', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -674,8 +713,10 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1300, - serialized_end=1344, + oneofs=[ + ], + serialized_start=1323, + serialized_end=1367, ) _RPBGETBUCKETRESP.fields_by_name['props'].message_type = _RPBBUCKETPROPS @@ -687,7 +728,7 @@ _RPBBUCKETPROPS.fields_by_name['chash_keyfun'].message_type = _RPBMODFUN _RPBBUCKETPROPS.fields_by_name['linkfun'].message_type = _RPBMODFUN _RPBBUCKETPROPS.fields_by_name['repl'].enum_type = _RPBBUCKETPROPS_RPBREPLMODE -_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS; +_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS DESCRIPTOR.message_types_by_name['RpbErrorResp'] = _RPBERRORRESP DESCRIPTOR.message_types_by_name['RpbGetServerInfoResp'] = _RPBGETSERVERINFORESP DESCRIPTOR.message_types_by_name['RpbPair'] = _RPBPAIR @@ -702,85 +743,98 @@ DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbErrorResp(_message.Message): - DESCRIPTOR = _RPBERRORRESP - +RpbErrorResp = _reflection.GeneratedProtocolMessageType('RpbErrorResp', (_message.Message,), dict( + DESCRIPTOR = _RPBERRORRESP, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbErrorResp) + )) +_sym_db.RegisterMessage(RpbErrorResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetServerInfoResp(_message.Message): - DESCRIPTOR = _RPBGETSERVERINFORESP - +RpbGetServerInfoResp = _reflection.GeneratedProtocolMessageType('RpbGetServerInfoResp', (_message.Message,), dict( + DESCRIPTOR = _RPBGETSERVERINFORESP, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbGetServerInfoResp) + )) +_sym_db.RegisterMessage(RpbGetServerInfoResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbPair(_message.Message): - DESCRIPTOR = _RPBPAIR - +RpbPair = _reflection.GeneratedProtocolMessageType('RpbPair', (_message.Message,), dict( + DESCRIPTOR = _RPBPAIR, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbPair) + )) +_sym_db.RegisterMessage(RpbPair) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketReq(_message.Message): - DESCRIPTOR = _RPBGETBUCKETREQ - +RpbGetBucketReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketReq', (_message.Message,), dict( + DESCRIPTOR = _RPBGETBUCKETREQ, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbGetBucketReq) + )) +_sym_db.RegisterMessage(RpbGetBucketReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketResp(_message.Message): - DESCRIPTOR = _RPBGETBUCKETRESP - +RpbGetBucketResp = _reflection.GeneratedProtocolMessageType('RpbGetBucketResp', (_message.Message,), dict( + DESCRIPTOR = _RPBGETBUCKETRESP, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbGetBucketResp) + )) +_sym_db.RegisterMessage(RpbGetBucketResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSetBucketReq(_message.Message): - DESCRIPTOR = _RPBSETBUCKETREQ - +RpbSetBucketReq = _reflection.GeneratedProtocolMessageType('RpbSetBucketReq', (_message.Message,), dict( + DESCRIPTOR = _RPBSETBUCKETREQ, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbSetBucketReq) + )) +_sym_db.RegisterMessage(RpbSetBucketReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbResetBucketReq(_message.Message): - DESCRIPTOR = _RPBRESETBUCKETREQ - +RpbResetBucketReq = _reflection.GeneratedProtocolMessageType('RpbResetBucketReq', (_message.Message,), dict( + DESCRIPTOR = _RPBRESETBUCKETREQ, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbResetBucketReq) + )) +_sym_db.RegisterMessage(RpbResetBucketReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbGetBucketTypeReq(_message.Message): - DESCRIPTOR = _RPBGETBUCKETTYPEREQ - +RpbGetBucketTypeReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketTypeReq', (_message.Message,), dict( + DESCRIPTOR = _RPBGETBUCKETTYPEREQ, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbGetBucketTypeReq) + )) +_sym_db.RegisterMessage(RpbGetBucketTypeReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSetBucketTypeReq(_message.Message): - DESCRIPTOR = _RPBSETBUCKETTYPEREQ - +RpbSetBucketTypeReq = _reflection.GeneratedProtocolMessageType('RpbSetBucketTypeReq', (_message.Message,), dict( + DESCRIPTOR = _RPBSETBUCKETTYPEREQ, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbSetBucketTypeReq) + )) +_sym_db.RegisterMessage(RpbSetBucketTypeReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbModFun(_message.Message): - DESCRIPTOR = _RPBMODFUN - +RpbModFun = _reflection.GeneratedProtocolMessageType('RpbModFun', (_message.Message,), dict( + DESCRIPTOR = _RPBMODFUN, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbModFun) + )) +_sym_db.RegisterMessage(RpbModFun) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbCommitHook(_message.Message): - DESCRIPTOR = _RPBCOMMITHOOK - +RpbCommitHook = _reflection.GeneratedProtocolMessageType('RpbCommitHook', (_message.Message,), dict( + DESCRIPTOR = _RPBCOMMITHOOK, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbCommitHook) + )) +_sym_db.RegisterMessage(RpbCommitHook) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbBucketProps(_message.Message): - DESCRIPTOR = _RPBBUCKETPROPS - +RpbBucketProps = _reflection.GeneratedProtocolMessageType('RpbBucketProps', (_message.Message,), dict( + DESCRIPTOR = _RPBBUCKETPROPS, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbBucketProps) + )) +_sym_db.RegisterMessage(RpbBucketProps) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbAuthReq(_message.Message): - DESCRIPTOR = _RPBAUTHREQ - +RpbAuthReq = _reflection.GeneratedProtocolMessageType('RpbAuthReq', (_message.Message,), dict( + DESCRIPTOR = _RPBAUTHREQ, + __module__ = 'riak.pb.riak_pb2' # @@protoc_insertion_point(class_scope:RpbAuthReq) + )) +_sym_db.RegisterMessage(RpbAuthReq) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\006RiakPB') +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\006RiakPB')) # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_search_pb2.py b/riak/pb/riak_search_pb2.py index 788b7cda..f487a9bd 100644 --- a/riak/pb/riak_search_pb2.py +++ b/riak/pb/riak_search_pb2.py @@ -2,12 +2,17 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_search.proto +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) +_sym_db = _symbol_database.Default() + import riak.pb.riak_pb2 @@ -15,7 +20,10 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_search.proto', package='', - serialized_pb='\n\x11riak_search.proto\x1a\nriak.proto\"(\n\x0cRpbSearchDoc\x12\x18\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x08.RpbPair\"\x9d\x01\n\x11RpbSearchQueryReq\x12\t\n\x01q\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12\x0c\n\x04rows\x18\x03 \x01(\r\x12\r\n\x05start\x18\x04 \x01(\r\x12\x0c\n\x04sort\x18\x05 \x01(\x0c\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\x0c\x12\n\n\x02\x64\x66\x18\x07 \x01(\x0c\x12\n\n\x02op\x18\x08 \x01(\x0c\x12\n\n\x02\x66l\x18\t \x03(\x0c\x12\x0f\n\x07presort\x18\n \x01(\x0c\"W\n\x12RpbSearchQueryResp\x12\x1b\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\r.RpbSearchDoc\x12\x11\n\tmax_score\x18\x02 \x01(\x02\x12\x11\n\tnum_found\x18\x03 \x01(\rB\'\n\x17\x63om.basho.riak.protobufB\x0cRiakSearchPB') + serialized_pb=_b('\n\x11riak_search.proto\x1a\nriak.proto\"(\n\x0cRpbSearchDoc\x12\x18\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x08.RpbPair\"\x9d\x01\n\x11RpbSearchQueryReq\x12\t\n\x01q\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12\x0c\n\x04rows\x18\x03 \x01(\r\x12\r\n\x05start\x18\x04 \x01(\r\x12\x0c\n\x04sort\x18\x05 \x01(\x0c\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\x0c\x12\n\n\x02\x64\x66\x18\x07 \x01(\x0c\x12\n\n\x02op\x18\x08 \x01(\x0c\x12\n\n\x02\x66l\x18\t \x03(\x0c\x12\x0f\n\x07presort\x18\n \x01(\x0c\"W\n\x12RpbSearchQueryResp\x12\x1b\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\r.RpbSearchDoc\x12\x11\n\tmax_score\x18\x02 \x01(\x02\x12\x11\n\tnum_found\x18\x03 \x01(\rB\'\n\x17\x63om.basho.riak.protobufB\x0cRiakSearchPB') + , + dependencies=[riak.pb.riak_pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -43,6 +51,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=33, serialized_end=73, ) @@ -58,14 +68,14 @@ _descriptor.FieldDescriptor( name='q', full_name='RpbSearchQueryReq.q', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='index', full_name='RpbSearchQueryReq.index', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -86,28 +96,28 @@ _descriptor.FieldDescriptor( name='sort', full_name='RpbSearchQueryReq.sort', index=4, number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='filter', full_name='RpbSearchQueryReq.filter', index=5, number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='df', full_name='RpbSearchQueryReq.df', index=6, number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='op', full_name='RpbSearchQueryReq.op', index=7, number=8, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -121,7 +131,7 @@ _descriptor.FieldDescriptor( name='presort', full_name='RpbSearchQueryReq.presort', index=9, number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -134,6 +144,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=76, serialized_end=233, ) @@ -176,6 +188,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=235, serialized_end=322, ) @@ -186,25 +200,28 @@ DESCRIPTOR.message_types_by_name['RpbSearchQueryReq'] = _RPBSEARCHQUERYREQ DESCRIPTOR.message_types_by_name['RpbSearchQueryResp'] = _RPBSEARCHQUERYRESP -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSearchDoc(_message.Message): - DESCRIPTOR = _RPBSEARCHDOC - +RpbSearchDoc = _reflection.GeneratedProtocolMessageType('RpbSearchDoc', (_message.Message,), dict( + DESCRIPTOR = _RPBSEARCHDOC, + __module__ = 'riak_search_pb2' # @@protoc_insertion_point(class_scope:RpbSearchDoc) + )) +_sym_db.RegisterMessage(RpbSearchDoc) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSearchQueryReq(_message.Message): - DESCRIPTOR = _RPBSEARCHQUERYREQ - +RpbSearchQueryReq = _reflection.GeneratedProtocolMessageType('RpbSearchQueryReq', (_message.Message,), dict( + DESCRIPTOR = _RPBSEARCHQUERYREQ, + __module__ = 'riak_search_pb2' # @@protoc_insertion_point(class_scope:RpbSearchQueryReq) + )) +_sym_db.RegisterMessage(RpbSearchQueryReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbSearchQueryResp(_message.Message): - DESCRIPTOR = _RPBSEARCHQUERYRESP - +RpbSearchQueryResp = _reflection.GeneratedProtocolMessageType('RpbSearchQueryResp', (_message.Message,), dict( + DESCRIPTOR = _RPBSEARCHQUERYRESP, + __module__ = 'riak_search_pb2' # @@protoc_insertion_point(class_scope:RpbSearchQueryResp) + )) +_sym_db.RegisterMessage(RpbSearchQueryResp) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\014RiakSearchPB') +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\014RiakSearchPB')) # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_ts_pb2.py b/riak/pb/riak_ts_pb2.py index 6e2ee149..b1031070 100644 --- a/riak/pb/riak_ts_pb2.py +++ b/riak/pb/riak_ts_pb2.py @@ -2,13 +2,18 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_ts.proto +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) +_sym_db = _symbol_database.Default() + import riak.pb.riak_pb2 @@ -16,7 +21,10 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_ts.proto', package='', - serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') + serialized_pb=_b('\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') + , + dependencies=[riak.pb.riak_pb2.DESCRIPTOR,]) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) _TSCOLUMNTYPE = _descriptor.EnumDescriptor( name='TsColumnType', @@ -50,6 +58,7 @@ serialized_start=1359, serialized_end=1438, ) +_sym_db.RegisterEnumDescriptor(_TSCOLUMNTYPE) TsColumnType = enum_type_wrapper.EnumTypeWrapper(_TSCOLUMNTYPE) VARCHAR = 0 @@ -84,7 +93,7 @@ _descriptor.FieldDescriptor( name='cover_context', full_name='TsQueryReq.cover_context', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -97,6 +106,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=29, serialized_end=120, ) @@ -139,6 +150,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=122, serialized_end=216, ) @@ -154,7 +167,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsGetReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -181,6 +194,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=218, serialized_end=282, ) @@ -216,6 +231,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=284, serialized_end=356, ) @@ -231,7 +248,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsPutReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -258,6 +275,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=358, serialized_end=444, ) @@ -279,6 +298,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=446, serialized_end=457, ) @@ -294,7 +315,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsDelReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -308,7 +329,7 @@ _descriptor.FieldDescriptor( name='vclock', full_name='TsDelReq.vclock', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -328,6 +349,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=459, serialized_end=539, ) @@ -349,6 +372,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=541, serialized_end=552, ) @@ -364,7 +389,7 @@ _descriptor.FieldDescriptor( name='base', full_name='TsInterpolation.base', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -384,6 +409,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=554, serialized_end=619, ) @@ -399,7 +426,7 @@ _descriptor.FieldDescriptor( name='name', full_name='TsColumnDescription.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -419,6 +446,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=621, serialized_end=685, ) @@ -447,6 +476,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=687, serialized_end=718, ) @@ -462,7 +493,7 @@ _descriptor.FieldDescriptor( name='varchar_value', full_name='TsCell.varchar_value', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -503,6 +534,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=720, serialized_end=843, ) @@ -518,7 +551,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsListKeysReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -538,6 +571,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=845, serialized_end=892, ) @@ -573,6 +608,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=894, serialized_end=946, ) @@ -595,14 +632,14 @@ _descriptor.FieldDescriptor( name='table', full_name='TsCoverageReq.table', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='replace_cover', full_name='TsCoverageReq.replace_cover', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -622,6 +659,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=948, serialized_end=1061, ) @@ -650,6 +689,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1063, serialized_end=1114, ) @@ -665,7 +706,7 @@ _descriptor.FieldDescriptor( name='ip', full_name='TsCoverageEntry.ip', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -679,7 +720,7 @@ _descriptor.FieldDescriptor( name='cover_context', full_name='TsCoverageEntry.cover_context', index=2, number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -699,6 +740,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1116, serialized_end=1207, ) @@ -714,7 +757,7 @@ _descriptor.FieldDescriptor( name='field_name', full_name='TsRange.field_name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -749,7 +792,7 @@ _descriptor.FieldDescriptor( name='desc', full_name='TsRange.desc', index=5, number=6, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -762,6 +805,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=1210, serialized_end=1357, ) @@ -800,116 +845,135 @@ DESCRIPTOR.message_types_by_name['TsCoverageResp'] = _TSCOVERAGERESP DESCRIPTOR.message_types_by_name['TsCoverageEntry'] = _TSCOVERAGEENTRY DESCRIPTOR.message_types_by_name['TsRange'] = _TSRANGE +DESCRIPTOR.enum_types_by_name['TsColumnType'] = _TSCOLUMNTYPE -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsQueryReq(_message.Message): - DESCRIPTOR = _TSQUERYREQ - +TsQueryReq = _reflection.GeneratedProtocolMessageType('TsQueryReq', (_message.Message,), dict( + DESCRIPTOR = _TSQUERYREQ, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsQueryReq) + )) +_sym_db.RegisterMessage(TsQueryReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsQueryResp(_message.Message): - DESCRIPTOR = _TSQUERYRESP - +TsQueryResp = _reflection.GeneratedProtocolMessageType('TsQueryResp', (_message.Message,), dict( + DESCRIPTOR = _TSQUERYRESP, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsQueryResp) + )) +_sym_db.RegisterMessage(TsQueryResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsGetReq(_message.Message): - DESCRIPTOR = _TSGETREQ - +TsGetReq = _reflection.GeneratedProtocolMessageType('TsGetReq', (_message.Message,), dict( + DESCRIPTOR = _TSGETREQ, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsGetReq) + )) +_sym_db.RegisterMessage(TsGetReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsGetResp(_message.Message): - DESCRIPTOR = _TSGETRESP - +TsGetResp = _reflection.GeneratedProtocolMessageType('TsGetResp', (_message.Message,), dict( + DESCRIPTOR = _TSGETRESP, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsGetResp) + )) +_sym_db.RegisterMessage(TsGetResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsPutReq(_message.Message): - DESCRIPTOR = _TSPUTREQ - +TsPutReq = _reflection.GeneratedProtocolMessageType('TsPutReq', (_message.Message,), dict( + DESCRIPTOR = _TSPUTREQ, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsPutReq) + )) +_sym_db.RegisterMessage(TsPutReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsPutResp(_message.Message): - DESCRIPTOR = _TSPUTRESP - +TsPutResp = _reflection.GeneratedProtocolMessageType('TsPutResp', (_message.Message,), dict( + DESCRIPTOR = _TSPUTRESP, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsPutResp) + )) +_sym_db.RegisterMessage(TsPutResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsDelReq(_message.Message): - DESCRIPTOR = _TSDELREQ - +TsDelReq = _reflection.GeneratedProtocolMessageType('TsDelReq', (_message.Message,), dict( + DESCRIPTOR = _TSDELREQ, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsDelReq) + )) +_sym_db.RegisterMessage(TsDelReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsDelResp(_message.Message): - DESCRIPTOR = _TSDELRESP - +TsDelResp = _reflection.GeneratedProtocolMessageType('TsDelResp', (_message.Message,), dict( + DESCRIPTOR = _TSDELRESP, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsDelResp) + )) +_sym_db.RegisterMessage(TsDelResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsInterpolation(_message.Message): - DESCRIPTOR = _TSINTERPOLATION - +TsInterpolation = _reflection.GeneratedProtocolMessageType('TsInterpolation', (_message.Message,), dict( + DESCRIPTOR = _TSINTERPOLATION, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsInterpolation) + )) +_sym_db.RegisterMessage(TsInterpolation) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsColumnDescription(_message.Message): - DESCRIPTOR = _TSCOLUMNDESCRIPTION - +TsColumnDescription = _reflection.GeneratedProtocolMessageType('TsColumnDescription', (_message.Message,), dict( + DESCRIPTOR = _TSCOLUMNDESCRIPTION, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsColumnDescription) + )) +_sym_db.RegisterMessage(TsColumnDescription) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsRow(_message.Message): - DESCRIPTOR = _TSROW - +TsRow = _reflection.GeneratedProtocolMessageType('TsRow', (_message.Message,), dict( + DESCRIPTOR = _TSROW, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsRow) + )) +_sym_db.RegisterMessage(TsRow) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsCell(_message.Message): - DESCRIPTOR = _TSCELL - +TsCell = _reflection.GeneratedProtocolMessageType('TsCell', (_message.Message,), dict( + DESCRIPTOR = _TSCELL, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsCell) + )) +_sym_db.RegisterMessage(TsCell) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsListKeysReq(_message.Message): - DESCRIPTOR = _TSLISTKEYSREQ - +TsListKeysReq = _reflection.GeneratedProtocolMessageType('TsListKeysReq', (_message.Message,), dict( + DESCRIPTOR = _TSLISTKEYSREQ, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsListKeysReq) + )) +_sym_db.RegisterMessage(TsListKeysReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsListKeysResp(_message.Message): - DESCRIPTOR = _TSLISTKEYSRESP - +TsListKeysResp = _reflection.GeneratedProtocolMessageType('TsListKeysResp', (_message.Message,), dict( + DESCRIPTOR = _TSLISTKEYSRESP, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsListKeysResp) + )) +_sym_db.RegisterMessage(TsListKeysResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsCoverageReq(_message.Message): - DESCRIPTOR = _TSCOVERAGEREQ - +TsCoverageReq = _reflection.GeneratedProtocolMessageType('TsCoverageReq', (_message.Message,), dict( + DESCRIPTOR = _TSCOVERAGEREQ, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsCoverageReq) + )) +_sym_db.RegisterMessage(TsCoverageReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsCoverageResp(_message.Message): - DESCRIPTOR = _TSCOVERAGERESP - +TsCoverageResp = _reflection.GeneratedProtocolMessageType('TsCoverageResp', (_message.Message,), dict( + DESCRIPTOR = _TSCOVERAGERESP, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsCoverageResp) + )) +_sym_db.RegisterMessage(TsCoverageResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsCoverageEntry(_message.Message): - DESCRIPTOR = _TSCOVERAGEENTRY - +TsCoverageEntry = _reflection.GeneratedProtocolMessageType('TsCoverageEntry', (_message.Message,), dict( + DESCRIPTOR = _TSCOVERAGEENTRY, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsCoverageEntry) + )) +_sym_db.RegisterMessage(TsCoverageEntry) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class TsRange(_message.Message): - DESCRIPTOR = _TSRANGE - +TsRange = _reflection.GeneratedProtocolMessageType('TsRange', (_message.Message,), dict( + DESCRIPTOR = _TSRANGE, + __module__ = 'riak_ts_pb2' # @@protoc_insertion_point(class_scope:TsRange) + )) +_sym_db.RegisterMessage(TsRange) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakTsPB') +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\010RiakTsPB')) # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_yokozuna_pb2.py b/riak/pb/riak_yokozuna_pb2.py index 1673f538..7c9b6798 100644 --- a/riak/pb/riak_yokozuna_pb2.py +++ b/riak/pb/riak_yokozuna_pb2.py @@ -2,19 +2,26 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_yokozuna.proto +import sys +_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) +_sym_db = _symbol_database.Default() + DESCRIPTOR = _descriptor.FileDescriptor( name='riak_yokozuna.proto', package='', - serialized_pb='\n\x13riak_yokozuna.proto\"?\n\x10RpbYokozunaIndex\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0e\n\x06schema\x18\x02 \x01(\x0c\x12\r\n\x05n_val\x18\x03 \x01(\r\"&\n\x16RpbYokozunaIndexGetReq\x12\x0c\n\x04name\x18\x01 \x01(\x0c\";\n\x17RpbYokozunaIndexGetResp\x12 \n\x05index\x18\x01 \x03(\x0b\x32\x11.RpbYokozunaIndex\"K\n\x16RpbYokozunaIndexPutReq\x12 \n\x05index\x18\x01 \x02(\x0b\x32\x11.RpbYokozunaIndex\x12\x0f\n\x07timeout\x18\x02 \x01(\r\")\n\x19RpbYokozunaIndexDeleteReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\"2\n\x11RpbYokozunaSchema\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"=\n\x17RpbYokozunaSchemaPutReq\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchema\"\'\n\x17RpbYokozunaSchemaGetReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\">\n\x18RpbYokozunaSchemaGetResp\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchemaB)\n\x17\x63om.basho.riak.protobufB\x0eRiakYokozunaPB') + serialized_pb=_b('\n\x13riak_yokozuna.proto\"?\n\x10RpbYokozunaIndex\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0e\n\x06schema\x18\x02 \x01(\x0c\x12\r\n\x05n_val\x18\x03 \x01(\r\"&\n\x16RpbYokozunaIndexGetReq\x12\x0c\n\x04name\x18\x01 \x01(\x0c\";\n\x17RpbYokozunaIndexGetResp\x12 \n\x05index\x18\x01 \x03(\x0b\x32\x11.RpbYokozunaIndex\"K\n\x16RpbYokozunaIndexPutReq\x12 \n\x05index\x18\x01 \x02(\x0b\x32\x11.RpbYokozunaIndex\x12\x0f\n\x07timeout\x18\x02 \x01(\r\")\n\x19RpbYokozunaIndexDeleteReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\"2\n\x11RpbYokozunaSchema\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"=\n\x17RpbYokozunaSchemaPutReq\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchema\"\'\n\x17RpbYokozunaSchemaGetReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\">\n\x18RpbYokozunaSchemaGetResp\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchemaB)\n\x17\x63om.basho.riak.protobufB\x0eRiakYokozunaPB') +) +_sym_db.RegisterFileDescriptor(DESCRIPTOR) @@ -29,14 +36,14 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaIndex.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='schema', full_name='RpbYokozunaIndex.schema', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -56,6 +63,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=23, serialized_end=86, ) @@ -71,7 +80,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaIndexGetReq.name', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -84,6 +93,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=88, serialized_end=126, ) @@ -112,6 +123,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=128, serialized_end=187, ) @@ -147,6 +160,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=189, serialized_end=264, ) @@ -162,7 +177,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaIndexDeleteReq.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -175,6 +190,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=266, serialized_end=307, ) @@ -190,14 +207,14 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaSchema.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content', full_name='RpbYokozunaSchema.content', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -210,6 +227,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=309, serialized_end=359, ) @@ -238,6 +257,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=361, serialized_end=422, ) @@ -253,7 +274,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaSchemaGetReq.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value="", + has_default_value=False, default_value=_b(""), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -266,6 +287,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=424, serialized_end=463, ) @@ -294,6 +317,8 @@ options=None, is_extendable=False, extension_ranges=[], + oneofs=[ + ], serialized_start=465, serialized_end=527, ) @@ -312,61 +337,70 @@ DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetReq'] = _RPBYOKOZUNASCHEMAGETREQ DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetResp'] = _RPBYOKOZUNASCHEMAGETRESP -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndex(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEX - +RpbYokozunaIndex = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndex', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNAINDEX, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaIndex) + )) +_sym_db.RegisterMessage(RpbYokozunaIndex) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexGetReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXGETREQ - +RpbYokozunaIndexGetReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexGetReq', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNAINDEXGETREQ, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetReq) + )) +_sym_db.RegisterMessage(RpbYokozunaIndexGetReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexGetResp(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXGETRESP - +RpbYokozunaIndexGetResp = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexGetResp', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNAINDEXGETRESP, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetResp) + )) +_sym_db.RegisterMessage(RpbYokozunaIndexGetResp) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexPutReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXPUTREQ - +RpbYokozunaIndexPutReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexPutReq', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNAINDEXPUTREQ, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaIndexPutReq) + )) +_sym_db.RegisterMessage(RpbYokozunaIndexPutReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaIndexDeleteReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNAINDEXDELETEREQ - +RpbYokozunaIndexDeleteReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexDeleteReq', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNAINDEXDELETEREQ, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaIndexDeleteReq) + )) +_sym_db.RegisterMessage(RpbYokozunaIndexDeleteReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchema(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMA - +RpbYokozunaSchema = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchema', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNASCHEMA, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaSchema) + )) +_sym_db.RegisterMessage(RpbYokozunaSchema) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchemaPutReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMAPUTREQ - +RpbYokozunaSchemaPutReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchemaPutReq', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNASCHEMAPUTREQ, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaPutReq) + )) +_sym_db.RegisterMessage(RpbYokozunaSchemaPutReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchemaGetReq(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMAGETREQ - +RpbYokozunaSchemaGetReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchemaGetReq', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETREQ, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetReq) + )) +_sym_db.RegisterMessage(RpbYokozunaSchemaGetReq) -@add_metaclass(_reflection.GeneratedProtocolMessageType) -class RpbYokozunaSchemaGetResp(_message.Message): - DESCRIPTOR = _RPBYOKOZUNASCHEMAGETRESP - +RpbYokozunaSchemaGetResp = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchemaGetResp', (_message.Message,), dict( + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETRESP, + __module__ = 'riak_yokozuna_pb2' # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetResp) + )) +_sym_db.RegisterMessage(RpbYokozunaSchemaGetResp) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\016RiakYokozunaPB') +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\016RiakYokozunaPB')) # @@protoc_insertion_point(module_scope) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 3e945920..b4e44684 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -97,19 +97,29 @@ def op(self, dtype): def check_op_output(self, op): self.assertIn('adds', op) self.assertItemsEqual(op['adds'], ['bar', 'foo']) - self.assertIn('removes', op) - self.assertIn('foo', op['removes']) def test_removes_require_context(self): dtype = self.dtype(self.bucket, 'key') with self.assertRaises(datatypes.ContextRequired): dtype.discard('foo') - dtype._context = 'blah' dtype.discard('foo') self.assertTrue(dtype.modified) +class HllUnitTests(DatatypeUnitTestBase, unittest.TestCase): + dtype = datatypes.Hll + + def op(self, dtype): + dtype._context = 'hll_context' + dtype.add('foo') + dtype.add('bar') + + def check_op_output(self, op): + self.assertIn('adds', op) + self.assertItemsEqual(op['adds'], ['bar', 'foo']) + + class MapUnitTests(DatatypeUnitTestBase, unittest.TestCase): dtype = datatypes.Map diff --git a/riak_pb b/riak_pb index d14b2c97..f5af9ffe 160000 --- a/riak_pb +++ b/riak_pb @@ -1 +1 @@ -Subproject commit d14b2c9758427f47106ef8064d39415b59076f72 +Subproject commit f5af9ffe04eb27689d483509de26574bdf70343f From 1aa2c84b64b63185bbe57542100ce2c3835f0a76 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 6 Sep 2016 15:23:07 -0700 Subject: [PATCH 243/324] Fix flake error --- riak/datatypes/hll.py | 1 - 1 file changed, 1 deletion(-) diff --git a/riak/datatypes/hll.py b/riak/datatypes/hll.py index dbd68d4e..16d006f0 100644 --- a/riak/datatypes/hll.py +++ b/riak/datatypes/hll.py @@ -1,4 +1,3 @@ -import collections import six from .datatype import Datatype From c1113402e9f64636ae155cd9828c85f085c44074 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 6 Sep 2016 15:36:57 -0700 Subject: [PATCH 244/324] Add check for protoc 2.5.0, re-gen files --- Makefile | 7 + riak/pb/riak_dt_pb2.py | 189 +++++--------- riak/pb/riak_kv_pb2.py | 484 ++++++++++++++--------------------- riak/pb/riak_pb2.py | 199 ++++++-------- riak/pb/riak_search_pb2.py | 59 ++--- riak/pb/riak_ts_pb2.py | 242 +++++++----------- riak/pb/riak_yokozuna_pb2.py | 124 ++++----- riak/tests/test_datatypes.py | 2 +- 8 files changed, 500 insertions(+), 806 deletions(-) diff --git a/Makefile b/Makefile index a7a6e9f4..f3c32efd 100644 --- a/Makefile +++ b/Makefile @@ -15,6 +15,7 @@ unexport LC_TELEPHONE unexport LC_TIME PANDOC_VERSION := $(shell pandoc --version) +PROTOC_VERSION := $(shell protoc --version) clean: pb_clean @@ -23,6 +24,12 @@ pb_clean: @rm -rf riak/pb/*_pb2.py riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build pb_compile: pb_clean +ifeq ($(PROTOC_VERSION),) + $(error The protoc command is required to parse proto files) +endif +ifneq ($(PROTOC_VERSION),libprotoc 2.5.0) + $(error protoc must be version 2.5.0) +endif @echo "==> Python (compile)" @protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto @python setup.py build_messages diff --git a/riak/pb/riak_dt_pb2.py b/riak/pb/riak_dt_pb2.py index 394be5b1..5a4a2f86 100644 --- a/riak/pb/riak_dt_pb2.py +++ b/riak/pb/riak_dt_pb2.py @@ -2,26 +2,19 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_dt.proto -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) -_sym_db = _symbol_database.Default() - DESCRIPTOR = _descriptor.FileDescriptor( name='riak_dt.proto', package='', - serialized_pb=_b('\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"d\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x04 \x01(\x04\"\x90\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\"2\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\x12\x07\n\x03HLL\x10\x04\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\x15\n\x05HllOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"n\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\x12\x16\n\x06hll_op\x18\x04 \x01(\x0b\x32\x06.HllOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"\x87\x01\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x06 \x01(\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') -) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + serialized_pb='\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"d\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x04 \x01(\x04\"\x90\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\"2\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\x12\x07\n\x03HLL\x10\x04\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\x15\n\x05HllOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"n\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\x12\x16\n\x06hll_op\x18\x04 \x01(\x0b\x32\x06.HllOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"\x87\x01\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x06 \x01(\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') @@ -57,7 +50,6 @@ serialized_start=82, serialized_end=151, ) -_sym_db.RegisterEnumDescriptor(_MAPFIELD_MAPFIELDTYPE) _DTFETCHRESP_DATATYPE = _descriptor.EnumDescriptor( name='DataType', @@ -87,7 +79,6 @@ serialized_start=715, serialized_end=765, ) -_sym_db.RegisterEnumDescriptor(_DTFETCHRESP_DATATYPE) _MAPUPDATE_FLAGOP = _descriptor.EnumDescriptor( name='FlagOp', @@ -109,7 +100,6 @@ serialized_start=1039, serialized_end=1072, ) -_sym_db.RegisterEnumDescriptor(_MAPUPDATE_FLAGOP) _MAPFIELD = _descriptor.Descriptor( @@ -122,7 +112,7 @@ _descriptor.FieldDescriptor( name='name', full_name='MapField.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -143,8 +133,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=18, serialized_end=151, ) @@ -181,7 +169,7 @@ _descriptor.FieldDescriptor( name='register_value', full_name='MapEntry.register_value', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -208,8 +196,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=154, serialized_end=306, ) @@ -225,21 +211,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='DtFetchReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='DtFetchReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='DtFetchReq.type', index=2, number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -308,8 +294,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=309, serialized_end=516, ) @@ -359,8 +343,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=518, serialized_end=618, ) @@ -376,7 +358,7 @@ _descriptor.FieldDescriptor( name='context', full_name='DtFetchResp.context', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -404,8 +386,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=621, serialized_end=765, ) @@ -434,8 +414,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=767, serialized_end=797, ) @@ -471,8 +449,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=799, serialized_end=837, ) @@ -501,8 +477,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=839, serialized_end=860, ) @@ -539,7 +513,7 @@ _descriptor.FieldDescriptor( name='register_op', full_name='MapUpdate.register_op', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -567,8 +541,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=863, serialized_end=1072, ) @@ -604,8 +576,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1074, serialized_end=1138, ) @@ -655,8 +625,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1140, serialized_end=1250, ) @@ -672,28 +640,28 @@ _descriptor.FieldDescriptor( name='bucket', full_name='DtUpdateReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='DtUpdateReq.key', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='DtUpdateReq.type', index=2, number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='context', full_name='DtUpdateReq.context', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -769,8 +737,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1253, serialized_end=1494, ) @@ -786,14 +752,14 @@ _descriptor.FieldDescriptor( name='key', full_name='DtUpdateResp.key', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='context', full_name='DtUpdateResp.context', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -834,26 +800,24 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1497, serialized_end=1632, ) _MAPFIELD.fields_by_name['type'].enum_type = _MAPFIELD_MAPFIELDTYPE -_MAPFIELD_MAPFIELDTYPE.containing_type = _MAPFIELD +_MAPFIELD_MAPFIELDTYPE.containing_type = _MAPFIELD; _MAPENTRY.fields_by_name['field'].message_type = _MAPFIELD _MAPENTRY.fields_by_name['map_value'].message_type = _MAPENTRY _DTVALUE.fields_by_name['map_value'].message_type = _MAPENTRY _DTFETCHRESP.fields_by_name['type'].enum_type = _DTFETCHRESP_DATATYPE _DTFETCHRESP.fields_by_name['value'].message_type = _DTVALUE -_DTFETCHRESP_DATATYPE.containing_type = _DTFETCHRESP +_DTFETCHRESP_DATATYPE.containing_type = _DTFETCHRESP; _MAPUPDATE.fields_by_name['field'].message_type = _MAPFIELD _MAPUPDATE.fields_by_name['counter_op'].message_type = _COUNTEROP _MAPUPDATE.fields_by_name['set_op'].message_type = _SETOP _MAPUPDATE.fields_by_name['flag_op'].enum_type = _MAPUPDATE_FLAGOP _MAPUPDATE.fields_by_name['map_op'].message_type = _MAPOP -_MAPUPDATE_FLAGOP.containing_type = _MAPUPDATE +_MAPUPDATE_FLAGOP.containing_type = _MAPUPDATE; _MAPOP.fields_by_name['removes'].message_type = _MAPFIELD _MAPOP.fields_by_name['updates'].message_type = _MAPUPDATE _DTOP.fields_by_name['counter_op'].message_type = _COUNTEROP @@ -876,98 +840,85 @@ DESCRIPTOR.message_types_by_name['DtUpdateReq'] = _DTUPDATEREQ DESCRIPTOR.message_types_by_name['DtUpdateResp'] = _DTUPDATERESP -MapField = _reflection.GeneratedProtocolMessageType('MapField', (_message.Message,), dict( - DESCRIPTOR = _MAPFIELD, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapField(_message.Message): + DESCRIPTOR = _MAPFIELD + # @@protoc_insertion_point(class_scope:MapField) - )) -_sym_db.RegisterMessage(MapField) -MapEntry = _reflection.GeneratedProtocolMessageType('MapEntry', (_message.Message,), dict( - DESCRIPTOR = _MAPENTRY, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapEntry(_message.Message): + DESCRIPTOR = _MAPENTRY + # @@protoc_insertion_point(class_scope:MapEntry) - )) -_sym_db.RegisterMessage(MapEntry) -DtFetchReq = _reflection.GeneratedProtocolMessageType('DtFetchReq', (_message.Message,), dict( - DESCRIPTOR = _DTFETCHREQ, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtFetchReq(_message.Message): + DESCRIPTOR = _DTFETCHREQ + # @@protoc_insertion_point(class_scope:DtFetchReq) - )) -_sym_db.RegisterMessage(DtFetchReq) -DtValue = _reflection.GeneratedProtocolMessageType('DtValue', (_message.Message,), dict( - DESCRIPTOR = _DTVALUE, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtValue(_message.Message): + DESCRIPTOR = _DTVALUE + # @@protoc_insertion_point(class_scope:DtValue) - )) -_sym_db.RegisterMessage(DtValue) -DtFetchResp = _reflection.GeneratedProtocolMessageType('DtFetchResp', (_message.Message,), dict( - DESCRIPTOR = _DTFETCHRESP, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtFetchResp(_message.Message): + DESCRIPTOR = _DTFETCHRESP + # @@protoc_insertion_point(class_scope:DtFetchResp) - )) -_sym_db.RegisterMessage(DtFetchResp) -CounterOp = _reflection.GeneratedProtocolMessageType('CounterOp', (_message.Message,), dict( - DESCRIPTOR = _COUNTEROP, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class CounterOp(_message.Message): + DESCRIPTOR = _COUNTEROP + # @@protoc_insertion_point(class_scope:CounterOp) - )) -_sym_db.RegisterMessage(CounterOp) -SetOp = _reflection.GeneratedProtocolMessageType('SetOp', (_message.Message,), dict( - DESCRIPTOR = _SETOP, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class SetOp(_message.Message): + DESCRIPTOR = _SETOP + # @@protoc_insertion_point(class_scope:SetOp) - )) -_sym_db.RegisterMessage(SetOp) -HllOp = _reflection.GeneratedProtocolMessageType('HllOp', (_message.Message,), dict( - DESCRIPTOR = _HLLOP, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class HllOp(_message.Message): + DESCRIPTOR = _HLLOP + # @@protoc_insertion_point(class_scope:HllOp) - )) -_sym_db.RegisterMessage(HllOp) -MapUpdate = _reflection.GeneratedProtocolMessageType('MapUpdate', (_message.Message,), dict( - DESCRIPTOR = _MAPUPDATE, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapUpdate(_message.Message): + DESCRIPTOR = _MAPUPDATE + # @@protoc_insertion_point(class_scope:MapUpdate) - )) -_sym_db.RegisterMessage(MapUpdate) -MapOp = _reflection.GeneratedProtocolMessageType('MapOp', (_message.Message,), dict( - DESCRIPTOR = _MAPOP, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class MapOp(_message.Message): + DESCRIPTOR = _MAPOP + # @@protoc_insertion_point(class_scope:MapOp) - )) -_sym_db.RegisterMessage(MapOp) -DtOp = _reflection.GeneratedProtocolMessageType('DtOp', (_message.Message,), dict( - DESCRIPTOR = _DTOP, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtOp(_message.Message): + DESCRIPTOR = _DTOP + # @@protoc_insertion_point(class_scope:DtOp) - )) -_sym_db.RegisterMessage(DtOp) -DtUpdateReq = _reflection.GeneratedProtocolMessageType('DtUpdateReq', (_message.Message,), dict( - DESCRIPTOR = _DTUPDATEREQ, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtUpdateReq(_message.Message): + DESCRIPTOR = _DTUPDATEREQ + # @@protoc_insertion_point(class_scope:DtUpdateReq) - )) -_sym_db.RegisterMessage(DtUpdateReq) -DtUpdateResp = _reflection.GeneratedProtocolMessageType('DtUpdateResp', (_message.Message,), dict( - DESCRIPTOR = _DTUPDATERESP, - __module__ = 'riak_dt_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class DtUpdateResp(_message.Message): + DESCRIPTOR = _DTUPDATERESP + # @@protoc_insertion_point(class_scope:DtUpdateResp) - )) -_sym_db.RegisterMessage(DtUpdateResp) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\010RiakDtPB')) +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakDtPB') # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_kv_pb2.py b/riak/pb/riak_kv_pb2.py index ce8ab338..09797052 100644 --- a/riak/pb/riak_kv_pb2.py +++ b/riak/pb/riak_kv_pb2.py @@ -2,17 +2,12 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_kv.proto -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) -_sym_db = _symbol_database.Default() - import riak.pb.riak_pb2 @@ -20,10 +15,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_kv.proto', package='', - serialized_pb=_b('\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xf9\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\x12\x15\n\rcover_context\x18\x0f \x01(\x0c\x12\x13\n\x0breturn_body\x18\x10 \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"X\n\x10RpbIndexBodyResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xd8\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\x12\x15\n\rcover_context\x18\n \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\"x\n\x0eRpbCoverageReq\x12\x0c\n\x04type\x18\x01 \x01(\x0c\x12\x0e\n\x06\x62ucket\x18\x02 \x02(\x0c\x12\x16\n\x0emin_partitions\x18\x03 \x01(\r\x12\x15\n\rreplace_cover\x18\x04 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x05 \x03(\x0c\"5\n\x0fRpbCoverageResp\x12\"\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x11.RpbCoverageEntry\"Z\n\x10RpbCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rkeyspace_desc\x18\x03 \x01(\x0c\x12\x15\n\rcover_context\x18\x04 \x02(\x0c\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') - , - dependencies=[riak.pb.riak_pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + serialized_pb='\n\rriak_kv.proto\x1a\nriak.proto\"\'\n\x12RpbGetClientIdResp\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"&\n\x11RpbSetClientIdReq\x12\x11\n\tclient_id\x18\x01 \x02(\x0c\"\xe9\x01\n\tRpbGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\x12\x13\n\x0bif_modified\x18\x07 \x01(\x0c\x12\x0c\n\x04head\x18\x08 \x01(\x08\x12\x15\n\rdeletedvclock\x18\t \x01(\x08\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"M\n\nRpbGetResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x11\n\tunchanged\x18\x03 \x01(\x08\"\xa6\x02\n\tRpbPutReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x1c\n\x07\x63ontent\x18\x04 \x02(\x0b\x32\x0b.RpbContent\x12\t\n\x01w\x18\x05 \x01(\r\x12\n\n\x02\x64w\x18\x06 \x01(\r\x12\x13\n\x0breturn_body\x18\x07 \x01(\x08\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x17\n\x0fif_not_modified\x18\t \x01(\x08\x12\x15\n\rif_none_match\x18\n \x01(\x08\x12\x13\n\x0breturn_head\x18\x0b \x01(\x08\x12\x0f\n\x07timeout\x18\x0c \x01(\r\x12\x0c\n\x04\x61sis\x18\r \x01(\x08\x12\x15\n\rsloppy_quorum\x18\x0e \x01(\x08\x12\r\n\x05n_val\x18\x0f \x01(\r\x12\x0c\n\x04type\x18\x10 \x01(\x0c\"G\n\nRpbPutResp\x12\x1c\n\x07\x63ontent\x18\x01 \x03(\x0b\x32\x0b.RpbContent\x12\x0e\n\x06vclock\x18\x02 \x01(\x0c\x12\x0b\n\x03key\x18\x03 \x01(\x0c\"\xc3\x01\n\tRpbDelReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\n\n\x02rw\x18\x03 \x01(\r\x12\x0e\n\x06vclock\x18\x04 \x01(\x0c\x12\t\n\x01r\x18\x05 \x01(\r\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02pr\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\n\n\x02\x64w\x18\t \x01(\r\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x0c\n\x04type\x18\r \x01(\x0c\"B\n\x11RpbListBucketsReq\x12\x0f\n\x07timeout\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\x08\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"3\n\x12RpbListBucketsResp\x12\x0f\n\x07\x62uckets\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"?\n\x0eRpbListKeysReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"-\n\x0fRpbListKeysResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"5\n\x0cRpbMapRedReq\x12\x0f\n\x07request\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x02(\x0c\">\n\rRpbMapRedResp\x12\r\n\x05phase\x18\x01 \x01(\r\x12\x10\n\x08response\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xf9\x02\n\x0bRpbIndexReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12*\n\x05qtype\x18\x03 \x02(\x0e\x32\x1b.RpbIndexReq.IndexQueryType\x12\x0b\n\x03key\x18\x04 \x01(\x0c\x12\x11\n\trange_min\x18\x05 \x01(\x0c\x12\x11\n\trange_max\x18\x06 \x01(\x0c\x12\x14\n\x0creturn_terms\x18\x07 \x01(\x08\x12\x0e\n\x06stream\x18\x08 \x01(\x08\x12\x13\n\x0bmax_results\x18\t \x01(\r\x12\x14\n\x0c\x63ontinuation\x18\n \x01(\x0c\x12\x0f\n\x07timeout\x18\x0b \x01(\r\x12\x0c\n\x04type\x18\x0c \x01(\x0c\x12\x12\n\nterm_regex\x18\r \x01(\x0c\x12\x17\n\x0fpagination_sort\x18\x0e \x01(\x08\x12\x15\n\rcover_context\x18\x0f \x01(\x0c\x12\x13\n\x0breturn_body\x18\x10 \x01(\x08\"#\n\x0eIndexQueryType\x12\x06\n\x02\x65q\x10\x00\x12\t\n\x05range\x10\x01\"[\n\x0cRpbIndexResp\x12\x0c\n\x04keys\x18\x01 \x03(\x0c\x12\x19\n\x07results\x18\x02 \x03(\x0b\x32\x08.RpbPair\x12\x14\n\x0c\x63ontinuation\x18\x03 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x04 \x01(\x08\"X\n\x10RpbIndexBodyResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\"\xd8\x01\n\x0eRpbCSBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x11\n\tstart_key\x18\x02 \x02(\x0c\x12\x0f\n\x07\x65nd_key\x18\x03 \x01(\x0c\x12\x18\n\nstart_incl\x18\x04 \x01(\x08:\x04true\x12\x17\n\x08\x65nd_incl\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\x14\n\x0c\x63ontinuation\x18\x06 \x01(\x0c\x12\x13\n\x0bmax_results\x18\x07 \x01(\r\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x0c\n\x04type\x18\t \x01(\x0c\x12\x15\n\rcover_context\x18\n \x01(\x0c\"W\n\x0fRpbCSBucketResp\x12 \n\x07objects\x18\x01 \x03(\x0b\x32\x0f.RpbIndexObject\x12\x14\n\x0c\x63ontinuation\x18\x02 \x01(\x0c\x12\x0c\n\x04\x64one\x18\x03 \x01(\x08\":\n\x0eRpbIndexObject\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\x1b\n\x06object\x18\x02 \x02(\x0b\x32\x0b.RpbGetResp\"\xf5\x01\n\nRpbContent\x12\r\n\x05value\x18\x01 \x02(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x02 \x01(\x0c\x12\x0f\n\x07\x63harset\x18\x03 \x01(\x0c\x12\x18\n\x10\x63ontent_encoding\x18\x04 \x01(\x0c\x12\x0c\n\x04vtag\x18\x05 \x01(\x0c\x12\x17\n\x05links\x18\x06 \x03(\x0b\x32\x08.RpbLink\x12\x10\n\x08last_mod\x18\x07 \x01(\r\x12\x16\n\x0elast_mod_usecs\x18\x08 \x01(\r\x12\x1a\n\x08usermeta\x18\t \x03(\x0b\x32\x08.RpbPair\x12\x19\n\x07indexes\x18\n \x03(\x0b\x32\x08.RpbPair\x12\x0f\n\x07\x64\x65leted\x18\x0b \x01(\x08\"3\n\x07RpbLink\x12\x0e\n\x06\x62ucket\x18\x01 \x01(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0b\n\x03tag\x18\x03 \x01(\x0c\"z\n\x13RpbCounterUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x03 \x02(\x12\x12\t\n\x01w\x18\x04 \x01(\r\x12\n\n\x02\x64w\x18\x05 \x01(\r\x12\n\n\x02pw\x18\x06 \x01(\r\x12\x13\n\x0breturnvalue\x18\x07 \x01(\x08\"%\n\x14RpbCounterUpdateResp\x12\r\n\x05value\x18\x01 \x01(\x12\"q\n\x10RpbCounterGetReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\t\n\x01r\x18\x03 \x01(\r\x12\n\n\x02pr\x18\x04 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x05 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x06 \x01(\x08\"\"\n\x11RpbCounterGetResp\x12\r\n\x05value\x18\x01 \x01(\x12\"G\n\x1aRpbGetBucketKeyPreflistReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"J\n\x1bRpbGetBucketKeyPreflistResp\x12+\n\x08preflist\x18\x01 \x03(\x0b\x32\x19.RpbBucketKeyPreflistItem\"L\n\x18RpbBucketKeyPreflistItem\x12\x11\n\tpartition\x18\x01 \x02(\x03\x12\x0c\n\x04node\x18\x02 \x02(\x0c\x12\x0f\n\x07primary\x18\x03 \x02(\x08\"x\n\x0eRpbCoverageReq\x12\x0c\n\x04type\x18\x01 \x01(\x0c\x12\x0e\n\x06\x62ucket\x18\x02 \x02(\x0c\x12\x16\n\x0emin_partitions\x18\x03 \x01(\r\x12\x15\n\rreplace_cover\x18\x04 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x05 \x03(\x0c\"5\n\x0fRpbCoverageResp\x12\"\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x11.RpbCoverageEntry\"Z\n\x10RpbCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rkeyspace_desc\x18\x03 \x01(\x0c\x12\x15\n\rcover_context\x18\x04 \x02(\x0c\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakKvPB') @@ -47,7 +39,6 @@ serialized_start=1688, serialized_end=1723, ) -_sym_db.RegisterEnumDescriptor(_RPBINDEXREQ_INDEXQUERYTYPE) _RPBGETCLIENTIDRESP = _descriptor.Descriptor( @@ -60,7 +51,7 @@ _descriptor.FieldDescriptor( name='client_id', full_name='RpbGetClientIdResp.client_id', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -73,8 +64,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=29, serialized_end=68, ) @@ -90,7 +79,7 @@ _descriptor.FieldDescriptor( name='client_id', full_name='RpbSetClientIdReq.client_id', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -103,8 +92,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=70, serialized_end=108, ) @@ -120,14 +107,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbGetReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbGetReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -162,7 +149,7 @@ _descriptor.FieldDescriptor( name='if_modified', full_name='RpbGetReq.if_modified', index=6, number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -204,7 +191,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbGetReq.type', index=12, number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -217,8 +204,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=111, serialized_end=344, ) @@ -241,7 +226,7 @@ _descriptor.FieldDescriptor( name='vclock', full_name='RpbGetResp.vclock', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -261,8 +246,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=346, serialized_end=423, ) @@ -278,21 +261,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbPutReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbPutReq.key', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='vclock', full_name='RpbPutReq.vclock', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -383,7 +366,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbPutReq.type', index=15, number=16, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -396,8 +379,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=426, serialized_end=720, ) @@ -420,14 +401,14 @@ _descriptor.FieldDescriptor( name='vclock', full_name='RpbPutResp.vclock', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbPutResp.key', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -440,8 +421,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=722, serialized_end=793, ) @@ -457,14 +436,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbDelReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbDelReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -478,7 +457,7 @@ _descriptor.FieldDescriptor( name='vclock', full_name='RpbDelReq.vclock', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -541,7 +520,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbDelReq.type', index=12, number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -554,8 +533,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=796, serialized_end=991, ) @@ -585,7 +562,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbListBucketsReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -598,8 +575,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=993, serialized_end=1059, ) @@ -635,8 +610,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1061, serialized_end=1112, ) @@ -652,7 +625,7 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbListKeysReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -666,7 +639,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbListKeysReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -679,8 +652,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1114, serialized_end=1177, ) @@ -716,8 +687,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1179, serialized_end=1224, ) @@ -733,14 +702,14 @@ _descriptor.FieldDescriptor( name='request', full_name='RpbMapRedReq.request', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content_type', full_name='RpbMapRedReq.content_type', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -753,8 +722,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1226, serialized_end=1279, ) @@ -777,7 +744,7 @@ _descriptor.FieldDescriptor( name='response', full_name='RpbMapRedResp.response', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -797,8 +764,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1281, serialized_end=1343, ) @@ -814,14 +779,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbIndexReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='index', full_name='RpbIndexReq.index', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -835,21 +800,21 @@ _descriptor.FieldDescriptor( name='key', full_name='RpbIndexReq.key', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='range_min', full_name='RpbIndexReq.range_min', index=4, number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='range_max', full_name='RpbIndexReq.range_max', index=5, number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -877,7 +842,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbIndexReq.continuation', index=9, number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -891,14 +856,14 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbIndexReq.type', index=11, number=12, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='term_regex', full_name='RpbIndexReq.term_regex', index=12, number=13, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -912,7 +877,7 @@ _descriptor.FieldDescriptor( name='cover_context', full_name='RpbIndexReq.cover_context', index=14, number=15, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -933,8 +898,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1346, serialized_end=1723, ) @@ -964,7 +927,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbIndexResp.continuation', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -984,8 +947,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1725, serialized_end=1816, ) @@ -1008,7 +969,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbIndexBodyResp.continuation', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1028,8 +989,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1818, serialized_end=1906, ) @@ -1045,21 +1004,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbCSBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='start_key', full_name='RpbCSBucketReq.start_key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='end_key', full_name='RpbCSBucketReq.end_key', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1080,7 +1039,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbCSBucketReq.continuation', index=5, number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1101,14 +1060,14 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbCSBucketReq.type', index=8, number=9, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='cover_context', full_name='RpbCSBucketReq.cover_context', index=9, number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1121,8 +1080,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1909, serialized_end=2125, ) @@ -1145,7 +1102,7 @@ _descriptor.FieldDescriptor( name='continuation', full_name='RpbCSBucketResp.continuation', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1165,8 +1122,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2127, serialized_end=2214, ) @@ -1182,7 +1137,7 @@ _descriptor.FieldDescriptor( name='key', full_name='RpbIndexObject.key', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1202,8 +1157,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2216, serialized_end=2274, ) @@ -1219,35 +1172,35 @@ _descriptor.FieldDescriptor( name='value', full_name='RpbContent.value', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content_type', full_name='RpbContent.content_type', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='charset', full_name='RpbContent.charset', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content_encoding', full_name='RpbContent.content_encoding', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='vtag', full_name='RpbContent.vtag', index=4, number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1302,8 +1255,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2277, serialized_end=2522, ) @@ -1319,21 +1270,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbLink.bucket', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbLink.key', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='tag', full_name='RpbLink.tag', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1346,8 +1297,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2524, serialized_end=2575, ) @@ -1363,14 +1312,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbCounterUpdateReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbCounterUpdateReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1418,8 +1367,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2577, serialized_end=2699, ) @@ -1448,8 +1395,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2701, serialized_end=2738, ) @@ -1465,14 +1410,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbCounterGetReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbCounterGetReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1513,8 +1458,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2740, serialized_end=2853, ) @@ -1543,8 +1486,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2855, serialized_end=2889, ) @@ -1560,21 +1501,21 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbGetBucketKeyPreflistReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='key', full_name='RpbGetBucketKeyPreflistReq.key', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='RpbGetBucketKeyPreflistReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1587,8 +1528,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2891, serialized_end=2962, ) @@ -1617,8 +1556,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=2964, serialized_end=3038, ) @@ -1641,7 +1578,7 @@ _descriptor.FieldDescriptor( name='node', full_name='RpbBucketKeyPreflistItem.node', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1661,8 +1598,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=3040, serialized_end=3116, ) @@ -1678,14 +1613,14 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbCoverageReq.type', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='bucket', full_name='RpbCoverageReq.bucket', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1699,7 +1634,7 @@ _descriptor.FieldDescriptor( name='replace_cover', full_name='RpbCoverageReq.replace_cover', index=3, number=4, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1719,8 +1654,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=3118, serialized_end=3238, ) @@ -1749,8 +1682,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=3240, serialized_end=3293, ) @@ -1766,7 +1697,7 @@ _descriptor.FieldDescriptor( name='ip', full_name='RpbCoverageEntry.ip', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1780,14 +1711,14 @@ _descriptor.FieldDescriptor( name='keyspace_desc', full_name='RpbCoverageEntry.keyspace_desc', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='cover_context', full_name='RpbCoverageEntry.cover_context', index=3, number=4, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -1800,8 +1731,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=3295, serialized_end=3385, ) @@ -1810,7 +1739,7 @@ _RPBPUTREQ.fields_by_name['content'].message_type = _RPBCONTENT _RPBPUTRESP.fields_by_name['content'].message_type = _RPBCONTENT _RPBINDEXREQ.fields_by_name['qtype'].enum_type = _RPBINDEXREQ_INDEXQUERYTYPE -_RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ +_RPBINDEXREQ_INDEXQUERYTYPE.containing_type = _RPBINDEXREQ; _RPBINDEXRESP.fields_by_name['results'].message_type = riak.pb.riak_pb2._RPBPAIR _RPBINDEXBODYRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT _RPBCSBUCKETRESP.fields_by_name['objects'].message_type = _RPBINDEXOBJECT @@ -1852,224 +1781,193 @@ DESCRIPTOR.message_types_by_name['RpbCoverageResp'] = _RPBCOVERAGERESP DESCRIPTOR.message_types_by_name['RpbCoverageEntry'] = _RPBCOVERAGEENTRY -RpbGetClientIdResp = _reflection.GeneratedProtocolMessageType('RpbGetClientIdResp', (_message.Message,), dict( - DESCRIPTOR = _RPBGETCLIENTIDRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetClientIdResp(_message.Message): + DESCRIPTOR = _RPBGETCLIENTIDRESP + # @@protoc_insertion_point(class_scope:RpbGetClientIdResp) - )) -_sym_db.RegisterMessage(RpbGetClientIdResp) -RpbSetClientIdReq = _reflection.GeneratedProtocolMessageType('RpbSetClientIdReq', (_message.Message,), dict( - DESCRIPTOR = _RPBSETCLIENTIDREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetClientIdReq(_message.Message): + DESCRIPTOR = _RPBSETCLIENTIDREQ + # @@protoc_insertion_point(class_scope:RpbSetClientIdReq) - )) -_sym_db.RegisterMessage(RpbSetClientIdReq) -RpbGetReq = _reflection.GeneratedProtocolMessageType('RpbGetReq', (_message.Message,), dict( - DESCRIPTOR = _RPBGETREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetReq(_message.Message): + DESCRIPTOR = _RPBGETREQ + # @@protoc_insertion_point(class_scope:RpbGetReq) - )) -_sym_db.RegisterMessage(RpbGetReq) -RpbGetResp = _reflection.GeneratedProtocolMessageType('RpbGetResp', (_message.Message,), dict( - DESCRIPTOR = _RPBGETRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetResp(_message.Message): + DESCRIPTOR = _RPBGETRESP + # @@protoc_insertion_point(class_scope:RpbGetResp) - )) -_sym_db.RegisterMessage(RpbGetResp) -RpbPutReq = _reflection.GeneratedProtocolMessageType('RpbPutReq', (_message.Message,), dict( - DESCRIPTOR = _RPBPUTREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPutReq(_message.Message): + DESCRIPTOR = _RPBPUTREQ + # @@protoc_insertion_point(class_scope:RpbPutReq) - )) -_sym_db.RegisterMessage(RpbPutReq) -RpbPutResp = _reflection.GeneratedProtocolMessageType('RpbPutResp', (_message.Message,), dict( - DESCRIPTOR = _RPBPUTRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPutResp(_message.Message): + DESCRIPTOR = _RPBPUTRESP + # @@protoc_insertion_point(class_scope:RpbPutResp) - )) -_sym_db.RegisterMessage(RpbPutResp) -RpbDelReq = _reflection.GeneratedProtocolMessageType('RpbDelReq', (_message.Message,), dict( - DESCRIPTOR = _RPBDELREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbDelReq(_message.Message): + DESCRIPTOR = _RPBDELREQ + # @@protoc_insertion_point(class_scope:RpbDelReq) - )) -_sym_db.RegisterMessage(RpbDelReq) -RpbListBucketsReq = _reflection.GeneratedProtocolMessageType('RpbListBucketsReq', (_message.Message,), dict( - DESCRIPTOR = _RPBLISTBUCKETSREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListBucketsReq(_message.Message): + DESCRIPTOR = _RPBLISTBUCKETSREQ + # @@protoc_insertion_point(class_scope:RpbListBucketsReq) - )) -_sym_db.RegisterMessage(RpbListBucketsReq) -RpbListBucketsResp = _reflection.GeneratedProtocolMessageType('RpbListBucketsResp', (_message.Message,), dict( - DESCRIPTOR = _RPBLISTBUCKETSRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListBucketsResp(_message.Message): + DESCRIPTOR = _RPBLISTBUCKETSRESP + # @@protoc_insertion_point(class_scope:RpbListBucketsResp) - )) -_sym_db.RegisterMessage(RpbListBucketsResp) -RpbListKeysReq = _reflection.GeneratedProtocolMessageType('RpbListKeysReq', (_message.Message,), dict( - DESCRIPTOR = _RPBLISTKEYSREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListKeysReq(_message.Message): + DESCRIPTOR = _RPBLISTKEYSREQ + # @@protoc_insertion_point(class_scope:RpbListKeysReq) - )) -_sym_db.RegisterMessage(RpbListKeysReq) -RpbListKeysResp = _reflection.GeneratedProtocolMessageType('RpbListKeysResp', (_message.Message,), dict( - DESCRIPTOR = _RPBLISTKEYSRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbListKeysResp(_message.Message): + DESCRIPTOR = _RPBLISTKEYSRESP + # @@protoc_insertion_point(class_scope:RpbListKeysResp) - )) -_sym_db.RegisterMessage(RpbListKeysResp) -RpbMapRedReq = _reflection.GeneratedProtocolMessageType('RpbMapRedReq', (_message.Message,), dict( - DESCRIPTOR = _RPBMAPREDREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbMapRedReq(_message.Message): + DESCRIPTOR = _RPBMAPREDREQ + # @@protoc_insertion_point(class_scope:RpbMapRedReq) - )) -_sym_db.RegisterMessage(RpbMapRedReq) -RpbMapRedResp = _reflection.GeneratedProtocolMessageType('RpbMapRedResp', (_message.Message,), dict( - DESCRIPTOR = _RPBMAPREDRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbMapRedResp(_message.Message): + DESCRIPTOR = _RPBMAPREDRESP + # @@protoc_insertion_point(class_scope:RpbMapRedResp) - )) -_sym_db.RegisterMessage(RpbMapRedResp) -RpbIndexReq = _reflection.GeneratedProtocolMessageType('RpbIndexReq', (_message.Message,), dict( - DESCRIPTOR = _RPBINDEXREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexReq(_message.Message): + DESCRIPTOR = _RPBINDEXREQ + # @@protoc_insertion_point(class_scope:RpbIndexReq) - )) -_sym_db.RegisterMessage(RpbIndexReq) -RpbIndexResp = _reflection.GeneratedProtocolMessageType('RpbIndexResp', (_message.Message,), dict( - DESCRIPTOR = _RPBINDEXRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexResp(_message.Message): + DESCRIPTOR = _RPBINDEXRESP + # @@protoc_insertion_point(class_scope:RpbIndexResp) - )) -_sym_db.RegisterMessage(RpbIndexResp) -RpbIndexBodyResp = _reflection.GeneratedProtocolMessageType('RpbIndexBodyResp', (_message.Message,), dict( - DESCRIPTOR = _RPBINDEXBODYRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexBodyResp(_message.Message): + DESCRIPTOR = _RPBINDEXBODYRESP + # @@protoc_insertion_point(class_scope:RpbIndexBodyResp) - )) -_sym_db.RegisterMessage(RpbIndexBodyResp) -RpbCSBucketReq = _reflection.GeneratedProtocolMessageType('RpbCSBucketReq', (_message.Message,), dict( - DESCRIPTOR = _RPBCSBUCKETREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCSBucketReq(_message.Message): + DESCRIPTOR = _RPBCSBUCKETREQ + # @@protoc_insertion_point(class_scope:RpbCSBucketReq) - )) -_sym_db.RegisterMessage(RpbCSBucketReq) -RpbCSBucketResp = _reflection.GeneratedProtocolMessageType('RpbCSBucketResp', (_message.Message,), dict( - DESCRIPTOR = _RPBCSBUCKETRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCSBucketResp(_message.Message): + DESCRIPTOR = _RPBCSBUCKETRESP + # @@protoc_insertion_point(class_scope:RpbCSBucketResp) - )) -_sym_db.RegisterMessage(RpbCSBucketResp) -RpbIndexObject = _reflection.GeneratedProtocolMessageType('RpbIndexObject', (_message.Message,), dict( - DESCRIPTOR = _RPBINDEXOBJECT, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbIndexObject(_message.Message): + DESCRIPTOR = _RPBINDEXOBJECT + # @@protoc_insertion_point(class_scope:RpbIndexObject) - )) -_sym_db.RegisterMessage(RpbIndexObject) -RpbContent = _reflection.GeneratedProtocolMessageType('RpbContent', (_message.Message,), dict( - DESCRIPTOR = _RPBCONTENT, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbContent(_message.Message): + DESCRIPTOR = _RPBCONTENT + # @@protoc_insertion_point(class_scope:RpbContent) - )) -_sym_db.RegisterMessage(RpbContent) -RpbLink = _reflection.GeneratedProtocolMessageType('RpbLink', (_message.Message,), dict( - DESCRIPTOR = _RPBLINK, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbLink(_message.Message): + DESCRIPTOR = _RPBLINK + # @@protoc_insertion_point(class_scope:RpbLink) - )) -_sym_db.RegisterMessage(RpbLink) -RpbCounterUpdateReq = _reflection.GeneratedProtocolMessageType('RpbCounterUpdateReq', (_message.Message,), dict( - DESCRIPTOR = _RPBCOUNTERUPDATEREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterUpdateReq(_message.Message): + DESCRIPTOR = _RPBCOUNTERUPDATEREQ + # @@protoc_insertion_point(class_scope:RpbCounterUpdateReq) - )) -_sym_db.RegisterMessage(RpbCounterUpdateReq) -RpbCounterUpdateResp = _reflection.GeneratedProtocolMessageType('RpbCounterUpdateResp', (_message.Message,), dict( - DESCRIPTOR = _RPBCOUNTERUPDATERESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterUpdateResp(_message.Message): + DESCRIPTOR = _RPBCOUNTERUPDATERESP + # @@protoc_insertion_point(class_scope:RpbCounterUpdateResp) - )) -_sym_db.RegisterMessage(RpbCounterUpdateResp) -RpbCounterGetReq = _reflection.GeneratedProtocolMessageType('RpbCounterGetReq', (_message.Message,), dict( - DESCRIPTOR = _RPBCOUNTERGETREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterGetReq(_message.Message): + DESCRIPTOR = _RPBCOUNTERGETREQ + # @@protoc_insertion_point(class_scope:RpbCounterGetReq) - )) -_sym_db.RegisterMessage(RpbCounterGetReq) -RpbCounterGetResp = _reflection.GeneratedProtocolMessageType('RpbCounterGetResp', (_message.Message,), dict( - DESCRIPTOR = _RPBCOUNTERGETRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCounterGetResp(_message.Message): + DESCRIPTOR = _RPBCOUNTERGETRESP + # @@protoc_insertion_point(class_scope:RpbCounterGetResp) - )) -_sym_db.RegisterMessage(RpbCounterGetResp) -RpbGetBucketKeyPreflistReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketKeyPreflistReq', (_message.Message,), dict( - DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketKeyPreflistReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTREQ + # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistReq) - )) -_sym_db.RegisterMessage(RpbGetBucketKeyPreflistReq) -RpbGetBucketKeyPreflistResp = _reflection.GeneratedProtocolMessageType('RpbGetBucketKeyPreflistResp', (_message.Message,), dict( - DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTRESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketKeyPreflistResp(_message.Message): + DESCRIPTOR = _RPBGETBUCKETKEYPREFLISTRESP + # @@protoc_insertion_point(class_scope:RpbGetBucketKeyPreflistResp) - )) -_sym_db.RegisterMessage(RpbGetBucketKeyPreflistResp) -RpbBucketKeyPreflistItem = _reflection.GeneratedProtocolMessageType('RpbBucketKeyPreflistItem', (_message.Message,), dict( - DESCRIPTOR = _RPBBUCKETKEYPREFLISTITEM, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbBucketKeyPreflistItem(_message.Message): + DESCRIPTOR = _RPBBUCKETKEYPREFLISTITEM + # @@protoc_insertion_point(class_scope:RpbBucketKeyPreflistItem) - )) -_sym_db.RegisterMessage(RpbBucketKeyPreflistItem) -RpbCoverageReq = _reflection.GeneratedProtocolMessageType('RpbCoverageReq', (_message.Message,), dict( - DESCRIPTOR = _RPBCOVERAGEREQ, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCoverageReq(_message.Message): + DESCRIPTOR = _RPBCOVERAGEREQ + # @@protoc_insertion_point(class_scope:RpbCoverageReq) - )) -_sym_db.RegisterMessage(RpbCoverageReq) -RpbCoverageResp = _reflection.GeneratedProtocolMessageType('RpbCoverageResp', (_message.Message,), dict( - DESCRIPTOR = _RPBCOVERAGERESP, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCoverageResp(_message.Message): + DESCRIPTOR = _RPBCOVERAGERESP + # @@protoc_insertion_point(class_scope:RpbCoverageResp) - )) -_sym_db.RegisterMessage(RpbCoverageResp) -RpbCoverageEntry = _reflection.GeneratedProtocolMessageType('RpbCoverageEntry', (_message.Message,), dict( - DESCRIPTOR = _RPBCOVERAGEENTRY, - __module__ = 'riak_kv_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCoverageEntry(_message.Message): + DESCRIPTOR = _RPBCOVERAGEENTRY + # @@protoc_insertion_point(class_scope:RpbCoverageEntry) - )) -_sym_db.RegisterMessage(RpbCoverageEntry) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\010RiakKvPB')) +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakKvPB') # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_pb2.py b/riak/pb/riak_pb2.py index 4ce87953..8f4ac076 100644 --- a/riak/pb/riak_pb2.py +++ b/riak/pb/riak_pb2.py @@ -2,26 +2,19 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak.proto -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) -_sym_db = _symbol_database.Default() - DESCRIPTOR = _descriptor.FileDescriptor( name='riak.proto', package='', - serialized_pb=_b('\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xc7\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\x12\x15\n\rhll_precision\x18\x1d \x01(\r\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') -) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + serialized_pb='\n\nriak.proto\"/\n\x0cRpbErrorResp\x12\x0e\n\x06\x65rrmsg\x18\x01 \x02(\x0c\x12\x0f\n\x07\x65rrcode\x18\x02 \x02(\r\"<\n\x14RpbGetServerInfoResp\x12\x0c\n\x04node\x18\x01 \x01(\x0c\x12\x16\n\x0eserver_version\x18\x02 \x01(\x0c\"%\n\x07RpbPair\x12\x0b\n\x03key\x18\x01 \x02(\x0c\x12\r\n\x05value\x18\x02 \x01(\x0c\"/\n\x0fRpbGetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"2\n\x10RpbGetBucketResp\x12\x1e\n\x05props\x18\x01 \x02(\x0b\x32\x0f.RpbBucketProps\"O\n\x0fRpbSetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\x12\x0c\n\x04type\x18\x03 \x01(\x0c\"1\n\x11RpbResetBucketReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0c\n\x04type\x18\x02 \x01(\x0c\"#\n\x13RpbGetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\"C\n\x13RpbSetBucketTypeReq\x12\x0c\n\x04type\x18\x01 \x02(\x0c\x12\x1e\n\x05props\x18\x02 \x02(\x0b\x32\x0f.RpbBucketProps\"-\n\tRpbModFun\x12\x0e\n\x06module\x18\x01 \x02(\x0c\x12\x10\n\x08\x66unction\x18\x02 \x02(\x0c\"9\n\rRpbCommitHook\x12\x1a\n\x06modfun\x18\x01 \x01(\x0b\x32\n.RpbModFun\x12\x0c\n\x04name\x18\x02 \x01(\x0c\"\xc7\x05\n\x0eRpbBucketProps\x12\r\n\x05n_val\x18\x01 \x01(\r\x12\x12\n\nallow_mult\x18\x02 \x01(\x08\x12\x17\n\x0flast_write_wins\x18\x03 \x01(\x08\x12!\n\tprecommit\x18\x04 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1c\n\rhas_precommit\x18\x05 \x01(\x08:\x05\x66\x61lse\x12\"\n\npostcommit\x18\x06 \x03(\x0b\x32\x0e.RpbCommitHook\x12\x1d\n\x0ehas_postcommit\x18\x07 \x01(\x08:\x05\x66\x61lse\x12 \n\x0c\x63hash_keyfun\x18\x08 \x01(\x0b\x32\n.RpbModFun\x12\x1b\n\x07linkfun\x18\t \x01(\x0b\x32\n.RpbModFun\x12\x12\n\nold_vclock\x18\n \x01(\r\x12\x14\n\x0cyoung_vclock\x18\x0b \x01(\r\x12\x12\n\nbig_vclock\x18\x0c \x01(\r\x12\x14\n\x0csmall_vclock\x18\r \x01(\r\x12\n\n\x02pr\x18\x0e \x01(\r\x12\t\n\x01r\x18\x0f \x01(\r\x12\t\n\x01w\x18\x10 \x01(\r\x12\n\n\x02pw\x18\x11 \x01(\r\x12\n\n\x02\x64w\x18\x12 \x01(\r\x12\n\n\x02rw\x18\x13 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x14 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x15 \x01(\x08\x12\x0f\n\x07\x62\x61\x63kend\x18\x16 \x01(\x0c\x12\x0e\n\x06search\x18\x17 \x01(\x08\x12)\n\x04repl\x18\x18 \x01(\x0e\x32\x1b.RpbBucketProps.RpbReplMode\x12\x14\n\x0csearch_index\x18\x19 \x01(\x0c\x12\x10\n\x08\x64\x61tatype\x18\x1a \x01(\x0c\x12\x12\n\nconsistent\x18\x1b \x01(\x08\x12\x12\n\nwrite_once\x18\x1c \x01(\x08\x12\x15\n\rhll_precision\x18\x1d \x01(\r\">\n\x0bRpbReplMode\x12\t\n\x05\x46\x41LSE\x10\x00\x12\x0c\n\x08REALTIME\x10\x01\x12\x0c\n\x08\x46ULLSYNC\x10\x02\x12\x08\n\x04TRUE\x10\x03\",\n\nRpbAuthReq\x12\x0c\n\x04user\x18\x01 \x02(\x0c\x12\x10\n\x08password\x18\x02 \x02(\x0c\x42!\n\x17\x63om.basho.riak.protobufB\x06RiakPB') @@ -53,7 +46,6 @@ serialized_start=1259, serialized_end=1321, ) -_sym_db.RegisterEnumDescriptor(_RPBBUCKETPROPS_RPBREPLMODE) _RPBERRORRESP = _descriptor.Descriptor( @@ -66,7 +58,7 @@ _descriptor.FieldDescriptor( name='errmsg', full_name='RpbErrorResp.errmsg', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -86,8 +78,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=14, serialized_end=61, ) @@ -103,14 +93,14 @@ _descriptor.FieldDescriptor( name='node', full_name='RpbGetServerInfoResp.node', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='server_version', full_name='RpbGetServerInfoResp.server_version', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -123,8 +113,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=63, serialized_end=123, ) @@ -140,14 +128,14 @@ _descriptor.FieldDescriptor( name='key', full_name='RpbPair.key', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='value', full_name='RpbPair.value', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -160,8 +148,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=125, serialized_end=162, ) @@ -177,14 +163,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbGetBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='RpbGetBucketReq.type', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -197,8 +183,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=164, serialized_end=211, ) @@ -227,8 +211,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=213, serialized_end=263, ) @@ -244,7 +226,7 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbSetBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -258,7 +240,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbSetBucketReq.type', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -271,8 +253,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=265, serialized_end=344, ) @@ -288,14 +268,14 @@ _descriptor.FieldDescriptor( name='bucket', full_name='RpbResetBucketReq.bucket', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='type', full_name='RpbResetBucketReq.type', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -308,8 +288,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=346, serialized_end=395, ) @@ -325,7 +303,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbGetBucketTypeReq.type', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -338,8 +316,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=397, serialized_end=432, ) @@ -355,7 +331,7 @@ _descriptor.FieldDescriptor( name='type', full_name='RpbSetBucketTypeReq.type', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -375,8 +351,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=434, serialized_end=501, ) @@ -392,14 +366,14 @@ _descriptor.FieldDescriptor( name='module', full_name='RpbModFun.module', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='function', full_name='RpbModFun.function', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -412,8 +386,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=503, serialized_end=548, ) @@ -436,7 +408,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbCommitHook.name', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -449,8 +421,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=550, serialized_end=607, ) @@ -613,7 +583,7 @@ _descriptor.FieldDescriptor( name='backend', full_name='RpbBucketProps.backend', index=21, number=22, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -634,14 +604,14 @@ _descriptor.FieldDescriptor( name='search_index', full_name='RpbBucketProps.search_index', index=24, number=25, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='datatype', full_name='RpbBucketProps.datatype', index=25, number=26, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -676,8 +646,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=610, serialized_end=1321, ) @@ -693,14 +661,14 @@ _descriptor.FieldDescriptor( name='user', full_name='RpbAuthReq.user', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='password', full_name='RpbAuthReq.password', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -713,8 +681,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1323, serialized_end=1367, ) @@ -728,7 +694,7 @@ _RPBBUCKETPROPS.fields_by_name['chash_keyfun'].message_type = _RPBMODFUN _RPBBUCKETPROPS.fields_by_name['linkfun'].message_type = _RPBMODFUN _RPBBUCKETPROPS.fields_by_name['repl'].enum_type = _RPBBUCKETPROPS_RPBREPLMODE -_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS +_RPBBUCKETPROPS_RPBREPLMODE.containing_type = _RPBBUCKETPROPS; DESCRIPTOR.message_types_by_name['RpbErrorResp'] = _RPBERRORRESP DESCRIPTOR.message_types_by_name['RpbGetServerInfoResp'] = _RPBGETSERVERINFORESP DESCRIPTOR.message_types_by_name['RpbPair'] = _RPBPAIR @@ -743,98 +709,85 @@ DESCRIPTOR.message_types_by_name['RpbBucketProps'] = _RPBBUCKETPROPS DESCRIPTOR.message_types_by_name['RpbAuthReq'] = _RPBAUTHREQ -RpbErrorResp = _reflection.GeneratedProtocolMessageType('RpbErrorResp', (_message.Message,), dict( - DESCRIPTOR = _RPBERRORRESP, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbErrorResp(_message.Message): + DESCRIPTOR = _RPBERRORRESP + # @@protoc_insertion_point(class_scope:RpbErrorResp) - )) -_sym_db.RegisterMessage(RpbErrorResp) -RpbGetServerInfoResp = _reflection.GeneratedProtocolMessageType('RpbGetServerInfoResp', (_message.Message,), dict( - DESCRIPTOR = _RPBGETSERVERINFORESP, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetServerInfoResp(_message.Message): + DESCRIPTOR = _RPBGETSERVERINFORESP + # @@protoc_insertion_point(class_scope:RpbGetServerInfoResp) - )) -_sym_db.RegisterMessage(RpbGetServerInfoResp) -RpbPair = _reflection.GeneratedProtocolMessageType('RpbPair', (_message.Message,), dict( - DESCRIPTOR = _RPBPAIR, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbPair(_message.Message): + DESCRIPTOR = _RPBPAIR + # @@protoc_insertion_point(class_scope:RpbPair) - )) -_sym_db.RegisterMessage(RpbPair) -RpbGetBucketReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketReq', (_message.Message,), dict( - DESCRIPTOR = _RPBGETBUCKETREQ, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETREQ + # @@protoc_insertion_point(class_scope:RpbGetBucketReq) - )) -_sym_db.RegisterMessage(RpbGetBucketReq) -RpbGetBucketResp = _reflection.GeneratedProtocolMessageType('RpbGetBucketResp', (_message.Message,), dict( - DESCRIPTOR = _RPBGETBUCKETRESP, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketResp(_message.Message): + DESCRIPTOR = _RPBGETBUCKETRESP + # @@protoc_insertion_point(class_scope:RpbGetBucketResp) - )) -_sym_db.RegisterMessage(RpbGetBucketResp) -RpbSetBucketReq = _reflection.GeneratedProtocolMessageType('RpbSetBucketReq', (_message.Message,), dict( - DESCRIPTOR = _RPBSETBUCKETREQ, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetBucketReq(_message.Message): + DESCRIPTOR = _RPBSETBUCKETREQ + # @@protoc_insertion_point(class_scope:RpbSetBucketReq) - )) -_sym_db.RegisterMessage(RpbSetBucketReq) -RpbResetBucketReq = _reflection.GeneratedProtocolMessageType('RpbResetBucketReq', (_message.Message,), dict( - DESCRIPTOR = _RPBRESETBUCKETREQ, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbResetBucketReq(_message.Message): + DESCRIPTOR = _RPBRESETBUCKETREQ + # @@protoc_insertion_point(class_scope:RpbResetBucketReq) - )) -_sym_db.RegisterMessage(RpbResetBucketReq) -RpbGetBucketTypeReq = _reflection.GeneratedProtocolMessageType('RpbGetBucketTypeReq', (_message.Message,), dict( - DESCRIPTOR = _RPBGETBUCKETTYPEREQ, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbGetBucketTypeReq(_message.Message): + DESCRIPTOR = _RPBGETBUCKETTYPEREQ + # @@protoc_insertion_point(class_scope:RpbGetBucketTypeReq) - )) -_sym_db.RegisterMessage(RpbGetBucketTypeReq) -RpbSetBucketTypeReq = _reflection.GeneratedProtocolMessageType('RpbSetBucketTypeReq', (_message.Message,), dict( - DESCRIPTOR = _RPBSETBUCKETTYPEREQ, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSetBucketTypeReq(_message.Message): + DESCRIPTOR = _RPBSETBUCKETTYPEREQ + # @@protoc_insertion_point(class_scope:RpbSetBucketTypeReq) - )) -_sym_db.RegisterMessage(RpbSetBucketTypeReq) -RpbModFun = _reflection.GeneratedProtocolMessageType('RpbModFun', (_message.Message,), dict( - DESCRIPTOR = _RPBMODFUN, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbModFun(_message.Message): + DESCRIPTOR = _RPBMODFUN + # @@protoc_insertion_point(class_scope:RpbModFun) - )) -_sym_db.RegisterMessage(RpbModFun) -RpbCommitHook = _reflection.GeneratedProtocolMessageType('RpbCommitHook', (_message.Message,), dict( - DESCRIPTOR = _RPBCOMMITHOOK, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbCommitHook(_message.Message): + DESCRIPTOR = _RPBCOMMITHOOK + # @@protoc_insertion_point(class_scope:RpbCommitHook) - )) -_sym_db.RegisterMessage(RpbCommitHook) -RpbBucketProps = _reflection.GeneratedProtocolMessageType('RpbBucketProps', (_message.Message,), dict( - DESCRIPTOR = _RPBBUCKETPROPS, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbBucketProps(_message.Message): + DESCRIPTOR = _RPBBUCKETPROPS + # @@protoc_insertion_point(class_scope:RpbBucketProps) - )) -_sym_db.RegisterMessage(RpbBucketProps) -RpbAuthReq = _reflection.GeneratedProtocolMessageType('RpbAuthReq', (_message.Message,), dict( - DESCRIPTOR = _RPBAUTHREQ, - __module__ = 'riak.pb.riak_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbAuthReq(_message.Message): + DESCRIPTOR = _RPBAUTHREQ + # @@protoc_insertion_point(class_scope:RpbAuthReq) - )) -_sym_db.RegisterMessage(RpbAuthReq) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\006RiakPB')) +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\006RiakPB') # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_search_pb2.py b/riak/pb/riak_search_pb2.py index f487a9bd..788b7cda 100644 --- a/riak/pb/riak_search_pb2.py +++ b/riak/pb/riak_search_pb2.py @@ -2,17 +2,12 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_search.proto -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) -_sym_db = _symbol_database.Default() - import riak.pb.riak_pb2 @@ -20,10 +15,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_search.proto', package='', - serialized_pb=_b('\n\x11riak_search.proto\x1a\nriak.proto\"(\n\x0cRpbSearchDoc\x12\x18\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x08.RpbPair\"\x9d\x01\n\x11RpbSearchQueryReq\x12\t\n\x01q\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12\x0c\n\x04rows\x18\x03 \x01(\r\x12\r\n\x05start\x18\x04 \x01(\r\x12\x0c\n\x04sort\x18\x05 \x01(\x0c\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\x0c\x12\n\n\x02\x64\x66\x18\x07 \x01(\x0c\x12\n\n\x02op\x18\x08 \x01(\x0c\x12\n\n\x02\x66l\x18\t \x03(\x0c\x12\x0f\n\x07presort\x18\n \x01(\x0c\"W\n\x12RpbSearchQueryResp\x12\x1b\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\r.RpbSearchDoc\x12\x11\n\tmax_score\x18\x02 \x01(\x02\x12\x11\n\tnum_found\x18\x03 \x01(\rB\'\n\x17\x63om.basho.riak.protobufB\x0cRiakSearchPB') - , - dependencies=[riak.pb.riak_pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + serialized_pb='\n\x11riak_search.proto\x1a\nriak.proto\"(\n\x0cRpbSearchDoc\x12\x18\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x08.RpbPair\"\x9d\x01\n\x11RpbSearchQueryReq\x12\t\n\x01q\x18\x01 \x02(\x0c\x12\r\n\x05index\x18\x02 \x02(\x0c\x12\x0c\n\x04rows\x18\x03 \x01(\r\x12\r\n\x05start\x18\x04 \x01(\r\x12\x0c\n\x04sort\x18\x05 \x01(\x0c\x12\x0e\n\x06\x66ilter\x18\x06 \x01(\x0c\x12\n\n\x02\x64\x66\x18\x07 \x01(\x0c\x12\n\n\x02op\x18\x08 \x01(\x0c\x12\n\n\x02\x66l\x18\t \x03(\x0c\x12\x0f\n\x07presort\x18\n \x01(\x0c\"W\n\x12RpbSearchQueryResp\x12\x1b\n\x04\x64ocs\x18\x01 \x03(\x0b\x32\r.RpbSearchDoc\x12\x11\n\tmax_score\x18\x02 \x01(\x02\x12\x11\n\tnum_found\x18\x03 \x01(\rB\'\n\x17\x63om.basho.riak.protobufB\x0cRiakSearchPB') @@ -51,8 +43,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=33, serialized_end=73, ) @@ -68,14 +58,14 @@ _descriptor.FieldDescriptor( name='q', full_name='RpbSearchQueryReq.q', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='index', full_name='RpbSearchQueryReq.index', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -96,28 +86,28 @@ _descriptor.FieldDescriptor( name='sort', full_name='RpbSearchQueryReq.sort', index=4, number=5, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='filter', full_name='RpbSearchQueryReq.filter', index=5, number=6, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='df', full_name='RpbSearchQueryReq.df', index=6, number=7, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='op', full_name='RpbSearchQueryReq.op', index=7, number=8, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -131,7 +121,7 @@ _descriptor.FieldDescriptor( name='presort', full_name='RpbSearchQueryReq.presort', index=9, number=10, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -144,8 +134,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=76, serialized_end=233, ) @@ -188,8 +176,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=235, serialized_end=322, ) @@ -200,28 +186,25 @@ DESCRIPTOR.message_types_by_name['RpbSearchQueryReq'] = _RPBSEARCHQUERYREQ DESCRIPTOR.message_types_by_name['RpbSearchQueryResp'] = _RPBSEARCHQUERYRESP -RpbSearchDoc = _reflection.GeneratedProtocolMessageType('RpbSearchDoc', (_message.Message,), dict( - DESCRIPTOR = _RPBSEARCHDOC, - __module__ = 'riak_search_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchDoc(_message.Message): + DESCRIPTOR = _RPBSEARCHDOC + # @@protoc_insertion_point(class_scope:RpbSearchDoc) - )) -_sym_db.RegisterMessage(RpbSearchDoc) -RpbSearchQueryReq = _reflection.GeneratedProtocolMessageType('RpbSearchQueryReq', (_message.Message,), dict( - DESCRIPTOR = _RPBSEARCHQUERYREQ, - __module__ = 'riak_search_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchQueryReq(_message.Message): + DESCRIPTOR = _RPBSEARCHQUERYREQ + # @@protoc_insertion_point(class_scope:RpbSearchQueryReq) - )) -_sym_db.RegisterMessage(RpbSearchQueryReq) -RpbSearchQueryResp = _reflection.GeneratedProtocolMessageType('RpbSearchQueryResp', (_message.Message,), dict( - DESCRIPTOR = _RPBSEARCHQUERYRESP, - __module__ = 'riak_search_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbSearchQueryResp(_message.Message): + DESCRIPTOR = _RPBSEARCHQUERYRESP + # @@protoc_insertion_point(class_scope:RpbSearchQueryResp) - )) -_sym_db.RegisterMessage(RpbSearchQueryResp) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\014RiakSearchPB')) +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\014RiakSearchPB') # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_ts_pb2.py b/riak/pb/riak_ts_pb2.py index b1031070..6e2ee149 100644 --- a/riak/pb/riak_ts_pb2.py +++ b/riak/pb/riak_ts_pb2.py @@ -2,18 +2,13 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_ts.proto -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) -_sym_db = _symbol_database.Default() - import riak.pb.riak_pb2 @@ -21,10 +16,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_ts.proto', package='', - serialized_pb=_b('\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') - , - dependencies=[riak.pb.riak_pb2.DESCRIPTOR,]) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') _TSCOLUMNTYPE = _descriptor.EnumDescriptor( name='TsColumnType', @@ -58,7 +50,6 @@ serialized_start=1359, serialized_end=1438, ) -_sym_db.RegisterEnumDescriptor(_TSCOLUMNTYPE) TsColumnType = enum_type_wrapper.EnumTypeWrapper(_TSCOLUMNTYPE) VARCHAR = 0 @@ -93,7 +84,7 @@ _descriptor.FieldDescriptor( name='cover_context', full_name='TsQueryReq.cover_context', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -106,8 +97,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=29, serialized_end=120, ) @@ -150,8 +139,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=122, serialized_end=216, ) @@ -167,7 +154,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsGetReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -194,8 +181,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=218, serialized_end=282, ) @@ -231,8 +216,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=284, serialized_end=356, ) @@ -248,7 +231,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsPutReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -275,8 +258,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=358, serialized_end=444, ) @@ -298,8 +279,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=446, serialized_end=457, ) @@ -315,7 +294,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsDelReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -329,7 +308,7 @@ _descriptor.FieldDescriptor( name='vclock', full_name='TsDelReq.vclock', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -349,8 +328,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=459, serialized_end=539, ) @@ -372,8 +349,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=541, serialized_end=552, ) @@ -389,7 +364,7 @@ _descriptor.FieldDescriptor( name='base', full_name='TsInterpolation.base', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -409,8 +384,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=554, serialized_end=619, ) @@ -426,7 +399,7 @@ _descriptor.FieldDescriptor( name='name', full_name='TsColumnDescription.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -446,8 +419,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=621, serialized_end=685, ) @@ -476,8 +447,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=687, serialized_end=718, ) @@ -493,7 +462,7 @@ _descriptor.FieldDescriptor( name='varchar_value', full_name='TsCell.varchar_value', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -534,8 +503,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=720, serialized_end=843, ) @@ -551,7 +518,7 @@ _descriptor.FieldDescriptor( name='table', full_name='TsListKeysReq.table', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -571,8 +538,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=845, serialized_end=892, ) @@ -608,8 +573,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=894, serialized_end=946, ) @@ -632,14 +595,14 @@ _descriptor.FieldDescriptor( name='table', full_name='TsCoverageReq.table', index=1, number=2, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='replace_cover', full_name='TsCoverageReq.replace_cover', index=2, number=3, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -659,8 +622,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=948, serialized_end=1061, ) @@ -689,8 +650,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1063, serialized_end=1114, ) @@ -706,7 +665,7 @@ _descriptor.FieldDescriptor( name='ip', full_name='TsCoverageEntry.ip', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -720,7 +679,7 @@ _descriptor.FieldDescriptor( name='cover_context', full_name='TsCoverageEntry.cover_context', index=2, number=3, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -740,8 +699,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1116, serialized_end=1207, ) @@ -757,7 +714,7 @@ _descriptor.FieldDescriptor( name='field_name', full_name='TsRange.field_name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -792,7 +749,7 @@ _descriptor.FieldDescriptor( name='desc', full_name='TsRange.desc', index=5, number=6, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -805,8 +762,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=1210, serialized_end=1357, ) @@ -845,135 +800,116 @@ DESCRIPTOR.message_types_by_name['TsCoverageResp'] = _TSCOVERAGERESP DESCRIPTOR.message_types_by_name['TsCoverageEntry'] = _TSCOVERAGEENTRY DESCRIPTOR.message_types_by_name['TsRange'] = _TSRANGE -DESCRIPTOR.enum_types_by_name['TsColumnType'] = _TSCOLUMNTYPE -TsQueryReq = _reflection.GeneratedProtocolMessageType('TsQueryReq', (_message.Message,), dict( - DESCRIPTOR = _TSQUERYREQ, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsQueryReq(_message.Message): + DESCRIPTOR = _TSQUERYREQ + # @@protoc_insertion_point(class_scope:TsQueryReq) - )) -_sym_db.RegisterMessage(TsQueryReq) -TsQueryResp = _reflection.GeneratedProtocolMessageType('TsQueryResp', (_message.Message,), dict( - DESCRIPTOR = _TSQUERYRESP, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsQueryResp(_message.Message): + DESCRIPTOR = _TSQUERYRESP + # @@protoc_insertion_point(class_scope:TsQueryResp) - )) -_sym_db.RegisterMessage(TsQueryResp) -TsGetReq = _reflection.GeneratedProtocolMessageType('TsGetReq', (_message.Message,), dict( - DESCRIPTOR = _TSGETREQ, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsGetReq(_message.Message): + DESCRIPTOR = _TSGETREQ + # @@protoc_insertion_point(class_scope:TsGetReq) - )) -_sym_db.RegisterMessage(TsGetReq) -TsGetResp = _reflection.GeneratedProtocolMessageType('TsGetResp', (_message.Message,), dict( - DESCRIPTOR = _TSGETRESP, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsGetResp(_message.Message): + DESCRIPTOR = _TSGETRESP + # @@protoc_insertion_point(class_scope:TsGetResp) - )) -_sym_db.RegisterMessage(TsGetResp) -TsPutReq = _reflection.GeneratedProtocolMessageType('TsPutReq', (_message.Message,), dict( - DESCRIPTOR = _TSPUTREQ, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsPutReq(_message.Message): + DESCRIPTOR = _TSPUTREQ + # @@protoc_insertion_point(class_scope:TsPutReq) - )) -_sym_db.RegisterMessage(TsPutReq) -TsPutResp = _reflection.GeneratedProtocolMessageType('TsPutResp', (_message.Message,), dict( - DESCRIPTOR = _TSPUTRESP, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsPutResp(_message.Message): + DESCRIPTOR = _TSPUTRESP + # @@protoc_insertion_point(class_scope:TsPutResp) - )) -_sym_db.RegisterMessage(TsPutResp) -TsDelReq = _reflection.GeneratedProtocolMessageType('TsDelReq', (_message.Message,), dict( - DESCRIPTOR = _TSDELREQ, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsDelReq(_message.Message): + DESCRIPTOR = _TSDELREQ + # @@protoc_insertion_point(class_scope:TsDelReq) - )) -_sym_db.RegisterMessage(TsDelReq) -TsDelResp = _reflection.GeneratedProtocolMessageType('TsDelResp', (_message.Message,), dict( - DESCRIPTOR = _TSDELRESP, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsDelResp(_message.Message): + DESCRIPTOR = _TSDELRESP + # @@protoc_insertion_point(class_scope:TsDelResp) - )) -_sym_db.RegisterMessage(TsDelResp) -TsInterpolation = _reflection.GeneratedProtocolMessageType('TsInterpolation', (_message.Message,), dict( - DESCRIPTOR = _TSINTERPOLATION, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsInterpolation(_message.Message): + DESCRIPTOR = _TSINTERPOLATION + # @@protoc_insertion_point(class_scope:TsInterpolation) - )) -_sym_db.RegisterMessage(TsInterpolation) -TsColumnDescription = _reflection.GeneratedProtocolMessageType('TsColumnDescription', (_message.Message,), dict( - DESCRIPTOR = _TSCOLUMNDESCRIPTION, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsColumnDescription(_message.Message): + DESCRIPTOR = _TSCOLUMNDESCRIPTION + # @@protoc_insertion_point(class_scope:TsColumnDescription) - )) -_sym_db.RegisterMessage(TsColumnDescription) -TsRow = _reflection.GeneratedProtocolMessageType('TsRow', (_message.Message,), dict( - DESCRIPTOR = _TSROW, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsRow(_message.Message): + DESCRIPTOR = _TSROW + # @@protoc_insertion_point(class_scope:TsRow) - )) -_sym_db.RegisterMessage(TsRow) -TsCell = _reflection.GeneratedProtocolMessageType('TsCell', (_message.Message,), dict( - DESCRIPTOR = _TSCELL, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCell(_message.Message): + DESCRIPTOR = _TSCELL + # @@protoc_insertion_point(class_scope:TsCell) - )) -_sym_db.RegisterMessage(TsCell) -TsListKeysReq = _reflection.GeneratedProtocolMessageType('TsListKeysReq', (_message.Message,), dict( - DESCRIPTOR = _TSLISTKEYSREQ, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsListKeysReq(_message.Message): + DESCRIPTOR = _TSLISTKEYSREQ + # @@protoc_insertion_point(class_scope:TsListKeysReq) - )) -_sym_db.RegisterMessage(TsListKeysReq) -TsListKeysResp = _reflection.GeneratedProtocolMessageType('TsListKeysResp', (_message.Message,), dict( - DESCRIPTOR = _TSLISTKEYSRESP, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsListKeysResp(_message.Message): + DESCRIPTOR = _TSLISTKEYSRESP + # @@protoc_insertion_point(class_scope:TsListKeysResp) - )) -_sym_db.RegisterMessage(TsListKeysResp) -TsCoverageReq = _reflection.GeneratedProtocolMessageType('TsCoverageReq', (_message.Message,), dict( - DESCRIPTOR = _TSCOVERAGEREQ, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCoverageReq(_message.Message): + DESCRIPTOR = _TSCOVERAGEREQ + # @@protoc_insertion_point(class_scope:TsCoverageReq) - )) -_sym_db.RegisterMessage(TsCoverageReq) -TsCoverageResp = _reflection.GeneratedProtocolMessageType('TsCoverageResp', (_message.Message,), dict( - DESCRIPTOR = _TSCOVERAGERESP, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCoverageResp(_message.Message): + DESCRIPTOR = _TSCOVERAGERESP + # @@protoc_insertion_point(class_scope:TsCoverageResp) - )) -_sym_db.RegisterMessage(TsCoverageResp) -TsCoverageEntry = _reflection.GeneratedProtocolMessageType('TsCoverageEntry', (_message.Message,), dict( - DESCRIPTOR = _TSCOVERAGEENTRY, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsCoverageEntry(_message.Message): + DESCRIPTOR = _TSCOVERAGEENTRY + # @@protoc_insertion_point(class_scope:TsCoverageEntry) - )) -_sym_db.RegisterMessage(TsCoverageEntry) -TsRange = _reflection.GeneratedProtocolMessageType('TsRange', (_message.Message,), dict( - DESCRIPTOR = _TSRANGE, - __module__ = 'riak_ts_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class TsRange(_message.Message): + DESCRIPTOR = _TSRANGE + # @@protoc_insertion_point(class_scope:TsRange) - )) -_sym_db.RegisterMessage(TsRange) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\010RiakTsPB')) +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\010RiakTsPB') # @@protoc_insertion_point(module_scope) diff --git a/riak/pb/riak_yokozuna_pb2.py b/riak/pb/riak_yokozuna_pb2.py index 7c9b6798..1673f538 100644 --- a/riak/pb/riak_yokozuna_pb2.py +++ b/riak/pb/riak_yokozuna_pb2.py @@ -2,26 +2,19 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_yokozuna.proto -import sys -_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1')) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database from google.protobuf import descriptor_pb2 # @@protoc_insertion_point(imports) -_sym_db = _symbol_database.Default() - DESCRIPTOR = _descriptor.FileDescriptor( name='riak_yokozuna.proto', package='', - serialized_pb=_b('\n\x13riak_yokozuna.proto\"?\n\x10RpbYokozunaIndex\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0e\n\x06schema\x18\x02 \x01(\x0c\x12\r\n\x05n_val\x18\x03 \x01(\r\"&\n\x16RpbYokozunaIndexGetReq\x12\x0c\n\x04name\x18\x01 \x01(\x0c\";\n\x17RpbYokozunaIndexGetResp\x12 \n\x05index\x18\x01 \x03(\x0b\x32\x11.RpbYokozunaIndex\"K\n\x16RpbYokozunaIndexPutReq\x12 \n\x05index\x18\x01 \x02(\x0b\x32\x11.RpbYokozunaIndex\x12\x0f\n\x07timeout\x18\x02 \x01(\r\")\n\x19RpbYokozunaIndexDeleteReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\"2\n\x11RpbYokozunaSchema\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"=\n\x17RpbYokozunaSchemaPutReq\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchema\"\'\n\x17RpbYokozunaSchemaGetReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\">\n\x18RpbYokozunaSchemaGetResp\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchemaB)\n\x17\x63om.basho.riak.protobufB\x0eRiakYokozunaPB') -) -_sym_db.RegisterFileDescriptor(DESCRIPTOR) + serialized_pb='\n\x13riak_yokozuna.proto\"?\n\x10RpbYokozunaIndex\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0e\n\x06schema\x18\x02 \x01(\x0c\x12\r\n\x05n_val\x18\x03 \x01(\r\"&\n\x16RpbYokozunaIndexGetReq\x12\x0c\n\x04name\x18\x01 \x01(\x0c\";\n\x17RpbYokozunaIndexGetResp\x12 \n\x05index\x18\x01 \x03(\x0b\x32\x11.RpbYokozunaIndex\"K\n\x16RpbYokozunaIndexPutReq\x12 \n\x05index\x18\x01 \x02(\x0b\x32\x11.RpbYokozunaIndex\x12\x0f\n\x07timeout\x18\x02 \x01(\r\")\n\x19RpbYokozunaIndexDeleteReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\"2\n\x11RpbYokozunaSchema\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"=\n\x17RpbYokozunaSchemaPutReq\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchema\"\'\n\x17RpbYokozunaSchemaGetReq\x12\x0c\n\x04name\x18\x01 \x02(\x0c\">\n\x18RpbYokozunaSchemaGetResp\x12\"\n\x06schema\x18\x01 \x02(\x0b\x32\x12.RpbYokozunaSchemaB)\n\x17\x63om.basho.riak.protobufB\x0eRiakYokozunaPB') @@ -36,14 +29,14 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaIndex.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='schema', full_name='RpbYokozunaIndex.schema', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -63,8 +56,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=23, serialized_end=86, ) @@ -80,7 +71,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaIndexGetReq.name', index=0, number=1, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -93,8 +84,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=88, serialized_end=126, ) @@ -123,8 +112,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=128, serialized_end=187, ) @@ -160,8 +147,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=189, serialized_end=264, ) @@ -177,7 +162,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaIndexDeleteReq.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -190,8 +175,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=266, serialized_end=307, ) @@ -207,14 +190,14 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaSchema.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), _descriptor.FieldDescriptor( name='content', full_name='RpbYokozunaSchema.content', index=1, number=2, type=12, cpp_type=9, label=1, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -227,8 +210,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=309, serialized_end=359, ) @@ -257,8 +238,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=361, serialized_end=422, ) @@ -274,7 +253,7 @@ _descriptor.FieldDescriptor( name='name', full_name='RpbYokozunaSchemaGetReq.name', index=0, number=1, type=12, cpp_type=9, label=2, - has_default_value=False, default_value=_b(""), + has_default_value=False, default_value="", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), @@ -287,8 +266,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=424, serialized_end=463, ) @@ -317,8 +294,6 @@ options=None, is_extendable=False, extension_ranges=[], - oneofs=[ - ], serialized_start=465, serialized_end=527, ) @@ -337,70 +312,61 @@ DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetReq'] = _RPBYOKOZUNASCHEMAGETREQ DESCRIPTOR.message_types_by_name['RpbYokozunaSchemaGetResp'] = _RPBYOKOZUNASCHEMAGETRESP -RpbYokozunaIndex = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndex', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNAINDEX, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndex(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEX + # @@protoc_insertion_point(class_scope:RpbYokozunaIndex) - )) -_sym_db.RegisterMessage(RpbYokozunaIndex) -RpbYokozunaIndexGetReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexGetReq', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNAINDEXGETREQ, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexGetReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXGETREQ + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetReq) - )) -_sym_db.RegisterMessage(RpbYokozunaIndexGetReq) -RpbYokozunaIndexGetResp = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexGetResp', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNAINDEXGETRESP, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexGetResp(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXGETRESP + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexGetResp) - )) -_sym_db.RegisterMessage(RpbYokozunaIndexGetResp) -RpbYokozunaIndexPutReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexPutReq', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNAINDEXPUTREQ, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexPutReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXPUTREQ + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexPutReq) - )) -_sym_db.RegisterMessage(RpbYokozunaIndexPutReq) -RpbYokozunaIndexDeleteReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaIndexDeleteReq', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNAINDEXDELETEREQ, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaIndexDeleteReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNAINDEXDELETEREQ + # @@protoc_insertion_point(class_scope:RpbYokozunaIndexDeleteReq) - )) -_sym_db.RegisterMessage(RpbYokozunaIndexDeleteReq) -RpbYokozunaSchema = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchema', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNASCHEMA, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchema(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMA + # @@protoc_insertion_point(class_scope:RpbYokozunaSchema) - )) -_sym_db.RegisterMessage(RpbYokozunaSchema) -RpbYokozunaSchemaPutReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchemaPutReq', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNASCHEMAPUTREQ, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaPutReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAPUTREQ + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaPutReq) - )) -_sym_db.RegisterMessage(RpbYokozunaSchemaPutReq) -RpbYokozunaSchemaGetReq = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchemaGetReq', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNASCHEMAGETREQ, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaGetReq(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETREQ + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetReq) - )) -_sym_db.RegisterMessage(RpbYokozunaSchemaGetReq) -RpbYokozunaSchemaGetResp = _reflection.GeneratedProtocolMessageType('RpbYokozunaSchemaGetResp', (_message.Message,), dict( - DESCRIPTOR = _RPBYOKOZUNASCHEMAGETRESP, - __module__ = 'riak_yokozuna_pb2' +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class RpbYokozunaSchemaGetResp(_message.Message): + DESCRIPTOR = _RPBYOKOZUNASCHEMAGETRESP + # @@protoc_insertion_point(class_scope:RpbYokozunaSchemaGetResp) - )) -_sym_db.RegisterMessage(RpbYokozunaSchemaGetResp) DESCRIPTOR.has_options = True -DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\027com.basho.riak.protobufB\016RiakYokozunaPB')) +DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\027com.basho.riak.protobufB\016RiakYokozunaPB') # @@protoc_insertion_point(module_scope) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index b4e44684..a9d63764 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -107,7 +107,7 @@ def test_removes_require_context(self): self.assertTrue(dtype.modified) -class HllUnitTests(DatatypeUnitTestBase, unittest.TestCase): +class HllUnitTests(DatatypeUnitTestBase, unittest.TestCase, Comparison): dtype = datatypes.Hll def op(self, dtype): From c1936f300c19fda49b506d43648b9a9918545fab Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 7 Sep 2016 11:58:45 -0700 Subject: [PATCH 245/324] Add tests for HLL precision --- riak/client/operations.py | 8 ++++++++ riak/codecs/pbuf.py | 5 +++-- riak/tests/__init__.py | 1 + riak/tests/test_datatypes.py | 20 +++++++++++++++++++- tools | 2 +- 5 files changed, 32 insertions(+), 4 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index 8bf2b9c2..b5c6b8f0 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -382,6 +382,7 @@ def set_bucket_props(self, transport, bucket, props): :param props: the properties to set :type props: dict """ + _validate_bucket_props(props) return transport.set_bucket_props(bucket, props) @retryable @@ -430,6 +431,7 @@ def set_bucket_type_props(self, transport, bucket_type, props): :param props: the properties to set :type props: dict """ + _validate_bucket_props(props) return transport.set_bucket_type_props(bucket_type, props) @retryable @@ -1227,6 +1229,12 @@ def _fetch_datatype(self, transport, bucket, key, r=None, pr=None, include_context=include_context) +def _validate_bucket_props(props): + if 'hll_precision' in props: + precision = props['hll_precision'] + if precision < 4 or precision > 16: + raise ValueError('hll_precision must be between 4 and 16, inclusive') + def _validate_timeout(timeout): """ Raises an exception if the given timeout is an invalid value. diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index b78b585e..9934cfdc 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -50,7 +50,7 @@ def _invert(d): NORMAL_PROPS = ['n_val', 'allow_mult', 'last_write_wins', 'old_vclock', 'young_vclock', 'big_vclock', 'small_vclock', 'basic_quorum', 'notfound_ok', 'search', 'backend', 'search_index', 'datatype', - 'write_once'] + 'write_once', 'hll_precision'] COMMIT_HOOK_PROPS = ['precommit', 'postcommit'] MODFUN_PROPS = ['chash_keyfun', 'linkfun'] QUORUM_PROPS = ['r', 'pr', 'w', 'pw', 'dw', 'rw'] @@ -71,7 +71,8 @@ def _invert(d): DT_FETCH_TYPES = { riak.pb.riak_dt_pb2.DtFetchResp.COUNTER: 'counter', riak.pb.riak_dt_pb2.DtFetchResp.SET: 'set', - riak.pb.riak_dt_pb2.DtFetchResp.MAP: 'map' + riak.pb.riak_dt_pb2.DtFetchResp.MAP: 'map', + riak.pb.riak_dt_pb2.DtFetchResp.HLL: 'hll' } diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index aa475c9c..4237a780 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -52,6 +52,7 @@ def hostname_resolves(hostname): RUN_BTYPES = int(os.environ.get('RUN_BTYPES', '0')) RUN_DATATYPES = int(os.environ.get('RUN_DATATYPES', '0')) +RUN_DATATYPE_HLL = int(os.environ.get('RUN_DATATYPE_HLL', '0')) RUN_CLIENT = int(os.environ.get('RUN_CLIENT', '0')) RUN_INDEXES = int(os.environ.get('RUN_INDEXES', '0')) RUN_KV = int(os.environ.get('RUN_KV', '0')) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index a9d63764..2c46fe3d 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -3,7 +3,7 @@ import riak.datatypes as datatypes from riak import RiakBucket, BucketType, RiakObject -from riak.tests import RUN_DATATYPES +from riak.tests import RUN_DATATYPES, RUN_DATATYPE_HLL from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -158,6 +158,24 @@ def test_removes_require_context(self): self.assertTrue(dtype.modified) +@unittest.skipUnless(RUN_DATATYPE_HLL, 'RUN_DATATYPE_HLL is 0') +class HllDatatypeIntegrationTests(IntegrationTestBase, + unittest.TestCase): + def test_fetch_bucket_type_props(self): + btype = self.client.bucket_type('hlls') + props = btype.get_properties() + self.assertEqual(14, props['hll_precision']) + + def test_set_invalid_hll_precision(self): + btype = self.client.bucket_type('hlls') + with self.assertRaises(ValueError): + btype.set_property('hll_precision', 3) + with self.assertRaises(ValueError): + btype.set_property('hll_precision', 17) + with self.assertRaises(ValueError): + btype.set_property('hll_precision', 0) + + @unittest.skipUnless(RUN_DATATYPES, 'RUN_DATATYPES is 0') class DatatypeIntegrationTests(IntegrationTestBase, unittest.TestCase, diff --git a/tools b/tools index 4dae68dd..8d1b6e50 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 4dae68ddca2d405090d64a97c7e99b4607263892 +Subproject commit 8d1b6e502a4082693e935774a77a73d4896e4bf5 From 795d653b8edb7f45a8596c3665581843a2c43425 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 7 Sep 2016 12:24:19 -0700 Subject: [PATCH 246/324] fix flake errors --- riak/client/operations.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index b5c6b8f0..1d75cede 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -1233,7 +1233,9 @@ def _validate_bucket_props(props): if 'hll_precision' in props: precision = props['hll_precision'] if precision < 4 or precision > 16: - raise ValueError('hll_precision must be between 4 and 16, inclusive') + raise ValueError( + 'hll_precision must be between 4 and 16, inclusive') + def _validate_timeout(timeout): """ From 7b094ba580d06e61852ee3e32d97a1cb33160996 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Sep 2016 10:36:13 -0700 Subject: [PATCH 247/324] Add encode/decode for HLL CRDT with integration tests --- riak/codecs/http.py | 7 +++++-- riak/codecs/pbuf.py | 19 ++++++++++++++++++ riak/datatypes/datatype.py | 2 +- riak/datatypes/set.py | 19 +----------------- riak/tests/test_datatypes.py | 33 ++++++++++++++++++++++++++++++- riak/transports/http/transport.py | 18 ++++++++--------- 6 files changed, 66 insertions(+), 32 deletions(-) diff --git a/riak/codecs/http.py b/riak/codecs/http.py index 1078dd43..5b74158a 100644 --- a/riak/codecs/http.py +++ b/riak/codecs/http.py @@ -259,7 +259,6 @@ def _parse_content_type(self, value): def _decode_datatype(self, dtype, value): if not dtype == 'map': return value - map = {} for key in value: field = self._map_key_to_pair(key) @@ -281,13 +280,17 @@ def _encode_dt_op(self, dtype, op): elif dtype == 'flag': return op elif dtype == 'set': - # self._encode_set_op(msg, op) set_op = {} if 'adds' in op: set_op['add_all'] = op['adds'] if 'removes' in op: set_op['remove_all'] = op['removes'] return set_op + elif dtype == 'hll': + hll_op = {} + if 'adds' in op: + hll_op['add_all'] = op['adds'] + return hll_op elif dtype == 'map': map_op = {} for fop in op: diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 9934cfdc..2b364403 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -578,6 +578,8 @@ def decode_dt_value(self, dtype, msg): return msg.counter_value elif dtype == 'set': return self.decode_set_value(msg.set_value) + elif dtype == 'hll': + return self.decode_hll_value(msg.hll_value) elif dtype == 'map': return self.decode_map_value(msg.map_value) @@ -606,17 +608,26 @@ def decode_map_value(self, entries): value = entry.flag_value elif dtype == 'map': value = self.decode_map_value(entry.map_value) + else: + raise ValueError( + 'Map may not contain datatype: {}' + .format(dtype)) out[(name, dtype)] = value return out def decode_set_value(self, set_value): return [bytes_to_str(string[:]) for string in set_value] + def decode_hll_value(self, hll_value): + return int(hll_value) + def encode_dt_op(self, dtype, req, op): if dtype == 'counter': req.op.counter_op.increment = op[1] elif dtype == 'set': self.encode_set_op(req.op, op) + elif dtype == 'hll': + self.encode_hll_op(req.op, op) elif dtype == 'map': self.encode_map_op(req.op.map_op, op) else: @@ -629,6 +640,10 @@ def encode_set_op(self, msg, op): if 'removes' in op: msg.set_op.removes.extend(str_to_bytes(op['removes'])) + def encode_hll_op(self, msg, op): + if 'adds' in op: + msg.hll_op.adds.extend(str_to_bytes(op['adds'])) + def encode_map_op(self, msg, ops): for op in ops: name, dtype = op[1] @@ -663,6 +678,10 @@ def encode_map_update(self, dtype, msg, op): msg.flag_op = riak.pb.riak_dt_pb2.MapUpdate.ENABLE else: msg.flag_op = riak.pb.riak_dt_pb2.MapUpdate.DISABLE + else: + raise ValueError( + 'Map may not contain datatype: {}' + .format(dtype)) def encode_to_ts_cell(self, cell, ts_cell): if cell is not None: diff --git a/riak/datatypes/datatype.py b/riak/datatypes/datatype.py index a28d11cd..192b815e 100644 --- a/riak/datatypes/datatype.py +++ b/riak/datatypes/datatype.py @@ -54,7 +54,7 @@ def value(self): which is unique for each datatype. **NB**: Do not use this property to mutate data, as it will not - have any effect. Use the methods of the individual type to effect + have any effect. Use the methods of the individual type to affect changes. This value is guaranteed to be independent of any internal data representation. """ diff --git a/riak/datatypes/set.py b/riak/datatypes/set.py index a055020a..e0797ab8 100644 --- a/riak/datatypes/set.py +++ b/riak/datatypes/set.py @@ -1,22 +1,5 @@ -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import collections + from .datatype import Datatype from six import string_types from riak.datatypes import TYPES diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 2c46fe3d..73552a47 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -2,7 +2,7 @@ import unittest import riak.datatypes as datatypes -from riak import RiakBucket, BucketType, RiakObject +from riak import RiakError, RiakBucket, BucketType, RiakObject from riak.tests import RUN_DATATYPES, RUN_DATATYPE_HLL from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -166,6 +166,17 @@ def test_fetch_bucket_type_props(self): props = btype.get_properties() self.assertEqual(14, props['hll_precision']) + def test_set_same_hll_precision(self): + btype = self.client.bucket_type('hlls') + btype.set_property('hll_precision', 14) + props = btype.get_properties() + self.assertEqual(14, props['hll_precision']) + + def test_set_larger_hll_precision(self): + btype = self.client.bucket_type('hlls') + with self.assertRaises(RiakError): + btype.set_property('hll_precision', 15) + def test_set_invalid_hll_precision(self): btype = self.client.bucket_type('hlls') with self.assertRaises(ValueError): @@ -175,6 +186,26 @@ def test_set_invalid_hll_precision(self): with self.assertRaises(ValueError): btype.set_property('hll_precision', 0) + def test_dt_hll(self): + btype = self.client.bucket_type('hlls') + props = btype.get_properties() + self.assertEqual(14, props['hll_precision']) + bucket = btype.bucket(self.bucket_name) + myhll = datatypes.Hll(bucket, self.key_name) + myhll.add('user1') + myhll.add('user2') + myhll.add('foo') + myhll.add('bar') + myhll.add('baz') + myhll.add('user1') + self.assertEqual(5, len(myhll._adds)) + + myhll.store() + self.assertEqual(5, myhll.value) + + otherhll = bucket.get(self.key_name) + self.assertEqual(5, otherhll.value) + @unittest.skipUnless(RUN_DATATYPES, 'RUN_DATATYPES is 0') class DatatypeIntegrationTests(IntegrationTestBase, diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index 10238d32..bfa9eb96 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -696,12 +696,11 @@ def update_counter(self, bucket, key, amount, **options): self.check_http_code(status, [200, 204]) def fetch_datatype(self, bucket, key, **options): - if bucket.bucket_type.is_default(): - raise NotImplementedError("Datatypes cannot be used in the default" - " bucket-type.") - if not self.datatypes(): raise NotImplementedError("Datatypes are not supported.") + if bucket.bucket_type.is_default(): + raise NotImplementedError( + 'Datatypes cannot be used in the default bucket-type.') url = self.datatypes_path(bucket.bucket_type.name, bucket.name, key, **options) @@ -717,12 +716,11 @@ def fetch_datatype(self, bucket, key, **options): response.get('context')) def update_datatype(self, datatype, **options): - if datatype.bucket.bucket_type.is_default(): - raise NotImplementedError("Datatypes cannot be used in the default" - " bucket-type.") - if not self.datatypes(): - raise NotImplementedError("Datatypes are not supported.") + raise NotImplementedError('Datatypes are not supported.') + if datatype.bucket.bucket_type.is_default(): + raise NotImplementedError( + 'Datatypes cannot be used in the default bucket-type.') op = datatype.to_op() context = datatype.context @@ -731,7 +729,7 @@ def update_datatype(self, datatype, **options): raise ValueError("No operation to send on datatype {!r}". format(datatype)) - if type_name not in ('counter', 'set', 'map'): + if type_name not in ('counter', 'set', 'hll', 'map'): raise TypeError("Cannot send operation on datatype {!r}". format(type_name)) From a9b87564d5e5cae45fcf84263e0202a3d2d16f99 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Sep 2016 10:54:08 -0700 Subject: [PATCH 248/324] Add buildbot Makefile target to run HLL DT tests --- buildbot/Makefile | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/buildbot/Makefile b/buildbot/Makefile index cd561d48..74144c2e 100644 --- a/buildbot/Makefile +++ b/buildbot/Makefile @@ -57,5 +57,11 @@ test_timeseries: @$(RIAK_ADMIN) security disable @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_KV=1 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. +test_hll: + @echo "Testing Riak Python Client (HLL datatype)" + @$(RIAK_ADMIN) security disable + @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_DATATYPES=1 RUN_DATATYPE_HLL=1 ./tox_runner.sh .. + @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_DATATYPES=1 RUN_DATATYPE_HLL=1 ./tox_runner.sh .. + setup: ./tox_setup.sh From 2a1d51a3f2222a7661ae93651de2670b86a7579a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Sep 2016 11:44:56 -0700 Subject: [PATCH 249/324] Improve timeout validation code --- riak/client/operations.py | 48 ++++++++++++++++++++++----------------- riak/tests/test_misc.py | 28 +++++++++++++++++++++++ 2 files changed, 55 insertions(+), 21 deletions(-) create mode 100644 riak/tests/test_misc.py diff --git a/riak/client/operations.py b/riak/client/operations.py index 8bf2b9c2..b52d14c9 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -1,3 +1,5 @@ +import six + import riak.client.multi from riak.client.transport import RiakClientTransport, \ @@ -6,7 +8,6 @@ from riak.datatypes import TYPES from riak.table import Table from riak.util import bytes_to_str -from six import string_types, PY2 class RiakClientOperations(RiakClientTransport): @@ -156,8 +157,7 @@ def get_index(self, transport, bucket, index, startkey, endkey=None, :type term_regex: string :rtype: :class:`~riak.client.index_page.IndexPage` """ - if timeout != 'infinity': - _validate_timeout(timeout) + _validate_timeout(timeout, infinity_ok=True) page = IndexPage(self, bucket, index, startkey, endkey, return_terms, max_results, term_regex) @@ -266,8 +266,7 @@ def stream_index(self, bucket, index, startkey, endkey=None, :rtype: :class:`~riak.client.index_page.IndexPage` """ - if timeout != 'infinity': - _validate_timeout(timeout) + _validate_timeout(timeout, infinity_ok=True) page = IndexPage(self, bucket, index, startkey, endkey, return_terms, max_results, term_regex) @@ -495,7 +494,7 @@ def stream_keys(self, bucket, timeout=None): try: for keylist in stream: if len(keylist) > 0: - if PY2: + if six.PY2: yield keylist else: yield [bytes_to_str(item) for item in keylist] @@ -552,7 +551,7 @@ def ts_describe(self, transport, table): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, string_types): + if isinstance(t, six.six.string_types): t = Table(self, table) return transport.ts_describe(t) @@ -573,7 +572,7 @@ def ts_get(self, transport, table, key): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, string_types): + if isinstance(t, six.six.string_types): t = Table(self, table) return transport.ts_get(t, key) @@ -610,7 +609,7 @@ def ts_delete(self, transport, table, key): :rtype: boolean """ t = table - if isinstance(t, string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_delete(t, key) @@ -631,7 +630,7 @@ def ts_query(self, transport, table, query, interpolations=None): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_query(t, query, interpolations) @@ -703,7 +702,7 @@ def get(self, transport, robj, r=None, pr=None, timeout=None, :type notfound_ok: bool """ _validate_timeout(timeout) - if not isinstance(robj.key, string_types): + if not isinstance(robj.key, six.string_types): raise TypeError( 'key must be a string, instead got {0}'.format(repr(robj.key))) @@ -1055,11 +1054,7 @@ def update_counter(self, bucket, key, value, w=None, dw=None, pw=None, :param returnvalue: whether to return the updated value of the counter :type returnvalue: bool """ - if PY2: - valid_types = (int, long) # noqa - else: - valid_types = (int,) - if type(value) not in valid_types: + if not isinstance(value, six.integer_types): raise TypeError("Counter update amount must be an integer") if value == 0: raise ValueError("Cannot increment counter by 0") @@ -1227,11 +1222,22 @@ def _fetch_datatype(self, transport, bucket, key, r=None, pr=None, include_context=include_context) -def _validate_timeout(timeout): +def _validate_timeout(timeout, infinity_ok=False): """ Raises an exception if the given timeout is an invalid value. """ - if not (timeout is None or - ((type(timeout) == int or - (PY2 and type(timeout) == long)) and timeout > 0)): # noqa - raise ValueError("timeout must be a positive integer") + if timeout is None: + return + + if timeout == 'infinity': + if infinity_ok: + return + else: + raise ValueError( + 'timeout must be a positive integer ' + '("infinity" is not valid)') + + if isinstance(timeout, six.integer_types) and timeout > 0: + return + + raise ValueError('timeout must be a positive integer') diff --git a/riak/tests/test_misc.py b/riak/tests/test_misc.py new file mode 100644 index 00000000..15605114 --- /dev/null +++ b/riak/tests/test_misc.py @@ -0,0 +1,28 @@ +import unittest + + +class MiscTests(unittest.TestCase): + def test_timeout_validation(self): + from riak.client.operations import _validate_timeout + # valid cases + try: + _validate_timeout(None) + _validate_timeout(None, infinity_ok=True) + _validate_timeout('infinity', infinity_ok=True) + _validate_timeout(1234) + _validate_timeout(1234567898765432123456789) + except ValueError: + self.fail('_validate_timeout() unexpectedly raised ValueError') + # invalid cases + with self.assertRaises(ValueError): + _validate_timeout('infinity') + with self.assertRaises(ValueError): + _validate_timeout('infinity-foo') + with self.assertRaises(ValueError): + _validate_timeout('foobarbaz') + with self.assertRaises(ValueError): + _validate_timeout('1234') + with self.assertRaises(ValueError): + _validate_timeout(0) + with self.assertRaises(ValueError): + _validate_timeout(12.34) From 978d08a377c40076b4ee19501894cc748f5265f2 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Sep 2016 12:18:23 -0700 Subject: [PATCH 250/324] Removed duplicated namespace --- riak/client/operations.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index c8812ccf..c143cfbc 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -553,7 +553,7 @@ def ts_describe(self, transport, table): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, six.six.string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_describe(t) @@ -574,7 +574,7 @@ def ts_get(self, transport, table, key): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, six.six.string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_get(t, key) From 962e451c5c5216d6bc5c7323b300f505f9d1524f Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Sep 2016 13:22:07 -0700 Subject: [PATCH 251/324] Removed duplicated namespace --- riak/client/operations.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index b52d14c9..1dfd2646 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -551,7 +551,7 @@ def ts_describe(self, transport, table): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, six.six.string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_describe(t) @@ -572,7 +572,7 @@ def ts_get(self, transport, table, key): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, six.six.string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_get(t, key) From f2c8aa829c06fa1f32117d8cf3feee92b7691f30 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Sep 2016 11:44:56 -0700 Subject: [PATCH 252/324] Improve timeout validation code --- riak/client/operations.py | 48 ++++++++++++++++++++++----------------- riak/tests/test_misc.py | 28 +++++++++++++++++++++++ 2 files changed, 55 insertions(+), 21 deletions(-) create mode 100644 riak/tests/test_misc.py diff --git a/riak/client/operations.py b/riak/client/operations.py index 8bf2b9c2..b52d14c9 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -1,3 +1,5 @@ +import six + import riak.client.multi from riak.client.transport import RiakClientTransport, \ @@ -6,7 +8,6 @@ from riak.datatypes import TYPES from riak.table import Table from riak.util import bytes_to_str -from six import string_types, PY2 class RiakClientOperations(RiakClientTransport): @@ -156,8 +157,7 @@ def get_index(self, transport, bucket, index, startkey, endkey=None, :type term_regex: string :rtype: :class:`~riak.client.index_page.IndexPage` """ - if timeout != 'infinity': - _validate_timeout(timeout) + _validate_timeout(timeout, infinity_ok=True) page = IndexPage(self, bucket, index, startkey, endkey, return_terms, max_results, term_regex) @@ -266,8 +266,7 @@ def stream_index(self, bucket, index, startkey, endkey=None, :rtype: :class:`~riak.client.index_page.IndexPage` """ - if timeout != 'infinity': - _validate_timeout(timeout) + _validate_timeout(timeout, infinity_ok=True) page = IndexPage(self, bucket, index, startkey, endkey, return_terms, max_results, term_regex) @@ -495,7 +494,7 @@ def stream_keys(self, bucket, timeout=None): try: for keylist in stream: if len(keylist) > 0: - if PY2: + if six.PY2: yield keylist else: yield [bytes_to_str(item) for item in keylist] @@ -552,7 +551,7 @@ def ts_describe(self, transport, table): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, string_types): + if isinstance(t, six.six.string_types): t = Table(self, table) return transport.ts_describe(t) @@ -573,7 +572,7 @@ def ts_get(self, transport, table, key): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, string_types): + if isinstance(t, six.six.string_types): t = Table(self, table) return transport.ts_get(t, key) @@ -610,7 +609,7 @@ def ts_delete(self, transport, table, key): :rtype: boolean """ t = table - if isinstance(t, string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_delete(t, key) @@ -631,7 +630,7 @@ def ts_query(self, transport, table, query, interpolations=None): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_query(t, query, interpolations) @@ -703,7 +702,7 @@ def get(self, transport, robj, r=None, pr=None, timeout=None, :type notfound_ok: bool """ _validate_timeout(timeout) - if not isinstance(robj.key, string_types): + if not isinstance(robj.key, six.string_types): raise TypeError( 'key must be a string, instead got {0}'.format(repr(robj.key))) @@ -1055,11 +1054,7 @@ def update_counter(self, bucket, key, value, w=None, dw=None, pw=None, :param returnvalue: whether to return the updated value of the counter :type returnvalue: bool """ - if PY2: - valid_types = (int, long) # noqa - else: - valid_types = (int,) - if type(value) not in valid_types: + if not isinstance(value, six.integer_types): raise TypeError("Counter update amount must be an integer") if value == 0: raise ValueError("Cannot increment counter by 0") @@ -1227,11 +1222,22 @@ def _fetch_datatype(self, transport, bucket, key, r=None, pr=None, include_context=include_context) -def _validate_timeout(timeout): +def _validate_timeout(timeout, infinity_ok=False): """ Raises an exception if the given timeout is an invalid value. """ - if not (timeout is None or - ((type(timeout) == int or - (PY2 and type(timeout) == long)) and timeout > 0)): # noqa - raise ValueError("timeout must be a positive integer") + if timeout is None: + return + + if timeout == 'infinity': + if infinity_ok: + return + else: + raise ValueError( + 'timeout must be a positive integer ' + '("infinity" is not valid)') + + if isinstance(timeout, six.integer_types) and timeout > 0: + return + + raise ValueError('timeout must be a positive integer') diff --git a/riak/tests/test_misc.py b/riak/tests/test_misc.py new file mode 100644 index 00000000..15605114 --- /dev/null +++ b/riak/tests/test_misc.py @@ -0,0 +1,28 @@ +import unittest + + +class MiscTests(unittest.TestCase): + def test_timeout_validation(self): + from riak.client.operations import _validate_timeout + # valid cases + try: + _validate_timeout(None) + _validate_timeout(None, infinity_ok=True) + _validate_timeout('infinity', infinity_ok=True) + _validate_timeout(1234) + _validate_timeout(1234567898765432123456789) + except ValueError: + self.fail('_validate_timeout() unexpectedly raised ValueError') + # invalid cases + with self.assertRaises(ValueError): + _validate_timeout('infinity') + with self.assertRaises(ValueError): + _validate_timeout('infinity-foo') + with self.assertRaises(ValueError): + _validate_timeout('foobarbaz') + with self.assertRaises(ValueError): + _validate_timeout('1234') + with self.assertRaises(ValueError): + _validate_timeout(0) + with self.assertRaises(ValueError): + _validate_timeout(12.34) From 581ff6384f54f2dbcbc267d4b3a17e3695b725cc Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Sep 2016 13:22:07 -0700 Subject: [PATCH 253/324] Removed duplicated namespace --- riak/client/operations.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index b52d14c9..1dfd2646 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -551,7 +551,7 @@ def ts_describe(self, transport, table): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, six.six.string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_describe(t) @@ -572,7 +572,7 @@ def ts_get(self, transport, table, key): :rtype: :class:`TsObject ` """ t = table - if isinstance(t, six.six.string_types): + if isinstance(t, six.string_types): t = Table(self, table) return transport.ts_get(t, key) From e1ce06ade276e6504ba57fc67e52befc7ce8ec5e Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 9 Sep 2016 09:54:31 -0700 Subject: [PATCH 254/324] Restore test lines --- riak/tests/test_datatypes.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 73552a47..fb270ac8 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -97,6 +97,8 @@ def op(self, dtype): def check_op_output(self, op): self.assertIn('adds', op) self.assertItemsEqual(op['adds'], ['bar', 'foo']) + self.assertIn('removes', op) + self.assertIn('foo', op['removes']) def test_removes_require_context(self): dtype = self.dtype(self.bucket, 'key') From 969930fd4b5acdb7083c5f7a8eca63f5107e600c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 23 Sep 2016 07:26:29 -0700 Subject: [PATCH 255/324] Add steps to Travis CI build to install Riak --- .travis.yml | 46 +++++++++++++++++++++++++++++++++++++--------- tools | 2 +- 2 files changed, 38 insertions(+), 10 deletions(-) diff --git a/.travis.yml b/.travis.yml index 928a8cd1..c110b436 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,15 +1,43 @@ +sudo: required + +dist: trusty + language: python + python: -- '2.7' -- '3.3' -- '3.4' -- '3.5' -- 3.5-dev + - '2.7' + - '3.3' + - '3.4' + - '3.5' + - 'nightly' + install: -- pip install --upgrade flake8 + - pip install --upgrade flake8 + +before_script: + - sudo ./tools/travis-ci/riak-install + - sudo ./tools/devrel/riak-cluster-config "$(which riak-admin)" 8098 false false + +env: + matrix: + - RIAK_TEST_PROTOCOL=pbc + - RIAK_TEST_PROTOCOL=http + global: + - RIAK_TEST_PB_PORT=8087 + - RIAK_TEST_HTTP_PORT=8098 + - RUN_BTYPES=1 + - RUN_CLIENT=1 + - RUN_MAPREDUCE=1 + - RUN_KV=1 + - RUN_RESOLVE=1 + - RUN_YZ=1 + - RUN_DATATYPES=1 + - RUN_INDEXES=1 + script: -- python setup.py test -- flake8 --exclude=riak/pb riak *.py + - flake8 --exclude=riak/pb riak *.py + - python setup.py test + notifications: slack: - secure: M2DFhniND+ZJAXmN0LgUWrqUPkvxL+kompUww/lj0n0jTrPFEUWDJ+VAhQzg/1Aw7h/Wx0w19/DMwn5oc1KTHI3uY+9eGZHt5ohM0AANuRD8pIjWKa8OU4/kt2yxUPadUFsF+id5gmugxVfOkNnKQkvEy6Nj7WxWqeuN+N+RGgU= + secure: CQHpKSbvzvGOKfQ8GdYdK0Huaz0y2xLbwhUWNH/xkrxO/OuqvYvZn7SlJRCLIYobj+zSiwsQRrz9G19gXGZMaDCwtdVVgEnddzm15bLjsUsrWU0FgRufJuATre+AVFByngvhAmdDIvcxVVhobIYo+F6m/8/OWRGhnQvvWOVtnMA= diff --git a/tools b/tools index 8d1b6e50..e4ee33d3 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 8d1b6e502a4082693e935774a77a73d4896e4bf5 +Subproject commit e4ee33d3947145a4a7d9a3b4dd7f53dbfb604bb8 From 9800401df5ba19270b34499a2c97b0205a56e040 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 23 Sep 2016 07:33:26 -0700 Subject: [PATCH 256/324] Update tools/ submodule --- tools | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools b/tools index e4ee33d3..16a6c5f6 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit e4ee33d3947145a4a7d9a3b4dd7f53dbfb604bb8 +Subproject commit 16a6c5f6bc57f3ab3f8a6f83198edbe3ae97de3c From dc6d55d672a740ec6c822a6d5f9c4c0091d3ed64 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 23 Sep 2016 08:28:12 -0700 Subject: [PATCH 257/324] Update to encrypted Travis CI notification token --- .travis.yml | 54 +++++++++++++++++++++++------------------------------ 1 file changed, 23 insertions(+), 31 deletions(-) diff --git a/.travis.yml b/.travis.yml index c110b436..fb19f21a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,43 +1,35 @@ sudo: required - dist: trusty - language: python - python: - - '2.7' - - '3.3' - - '3.4' - - '3.5' - - 'nightly' - +- '2.7' +- '3.3' +- '3.4' +- '3.5' +- nightly install: - - pip install --upgrade flake8 - +- pip install --upgrade flake8 before_script: - - sudo ./tools/travis-ci/riak-install - - sudo ./tools/devrel/riak-cluster-config "$(which riak-admin)" 8098 false false - +- sudo ./tools/travis-ci/riak-install +- sudo ./tools/devrel/riak-cluster-config "$(which riak-admin)" 8098 false false env: matrix: - - RIAK_TEST_PROTOCOL=pbc - - RIAK_TEST_PROTOCOL=http + - RIAK_TEST_PROTOCOL=pbc + - RIAK_TEST_PROTOCOL=http global: - - RIAK_TEST_PB_PORT=8087 - - RIAK_TEST_HTTP_PORT=8098 - - RUN_BTYPES=1 - - RUN_CLIENT=1 - - RUN_MAPREDUCE=1 - - RUN_KV=1 - - RUN_RESOLVE=1 - - RUN_YZ=1 - - RUN_DATATYPES=1 - - RUN_INDEXES=1 - + - RIAK_TEST_PB_PORT=8087 + - RIAK_TEST_HTTP_PORT=8098 + - RUN_BTYPES=1 + - RUN_CLIENT=1 + - RUN_MAPREDUCE=1 + - RUN_KV=1 + - RUN_RESOLVE=1 + - RUN_YZ=1 + - RUN_DATATYPES=1 + - RUN_INDEXES=1 script: - - flake8 --exclude=riak/pb riak *.py - - python setup.py test - +- flake8 --exclude=riak/pb riak *.py +- python setup.py test notifications: slack: - secure: CQHpKSbvzvGOKfQ8GdYdK0Huaz0y2xLbwhUWNH/xkrxO/OuqvYvZn7SlJRCLIYobj+zSiwsQRrz9G19gXGZMaDCwtdVVgEnddzm15bLjsUsrWU0FgRufJuATre+AVFByngvhAmdDIvcxVVhobIYo+F6m/8/OWRGhnQvvWOVtnMA= + secure: kU1XcvTAliCWKuYpMWEMbD4qkbmlnWGLAIKbBQjtIh5ZRzISgjdUFzGcC31eHoQFv12LQdp5KAFj0Y1FyEvLxi0W8VeWKpsBGc06ntuECaN9MNHRBzKKclrTMGTfpBWZ5IO17XSUu2lKaNz6GDGRkiZA+sxYAVPfZSXY3u86IuY= From 52127c48b2bfa7aa515842787fe444e4d8304068 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 27 Sep 2016 18:25:56 -0700 Subject: [PATCH 258/324] Switch to using dpkg for installing Riak, use 2.0.7 and 2.1.4 Skip preflist test if not supported update to latest tools Skip write once test if bucket type does not exist --- .travis.yml | 46 ++++++++++++++++--------------- riak/tests/test_btypes.py | 5 +++- riak/tests/test_kv.py | 17 +++++++----- riak/transports/http/transport.py | 2 ++ riak/transports/tcp/transport.py | 2 ++ tools | 2 +- 6 files changed, 43 insertions(+), 31 deletions(-) diff --git a/.travis.yml b/.travis.yml index fb19f21a..7c2cd9a8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,34 +2,36 @@ sudo: required dist: trusty language: python python: -- '2.7' -- '3.3' -- '3.4' -- '3.5' -- nightly + - '2.7' + - '3.3' + - '3.4' + - '3.5' + - nightly install: -- pip install --upgrade flake8 + - pip install --upgrade flake8 before_script: -- sudo ./tools/travis-ci/riak-install -- sudo ./tools/devrel/riak-cluster-config "$(which riak-admin)" 8098 false false + - sudo ./tools/travis-ci/riak-install -d "$RIAK_DOWNLOAD_URL" + - sudo ./tools/devrel/riak-cluster-config "$(which riak-admin)" 8098 false false env: matrix: - - RIAK_TEST_PROTOCOL=pbc - - RIAK_TEST_PROTOCOL=http + - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb + - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb + - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.1/2.1.4/ubuntu/trusty/riak_2.1.4-1_amd64.deb + - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.1/2.1.4/ubuntu/trusty/riak_2.1.4-1_amd64.deb global: - - RIAK_TEST_PB_PORT=8087 - - RIAK_TEST_HTTP_PORT=8098 - - RUN_BTYPES=1 - - RUN_CLIENT=1 - - RUN_MAPREDUCE=1 - - RUN_KV=1 - - RUN_RESOLVE=1 - - RUN_YZ=1 - - RUN_DATATYPES=1 - - RUN_INDEXES=1 + - RIAK_TEST_PB_PORT=8087 + - RIAK_TEST_HTTP_PORT=8098 + - RUN_BTYPES=1 + - RUN_CLIENT=1 + - RUN_MAPREDUCE=1 + - RUN_KV=1 + - RUN_RESOLVE=1 + - RUN_YZ=1 + - RUN_DATATYPES=1 + - RUN_INDEXES=1 script: -- flake8 --exclude=riak/pb riak *.py -- python setup.py test + - flake8 --exclude=riak/pb riak *.py + - python setup.py test notifications: slack: secure: kU1XcvTAliCWKuYpMWEMbD4qkbmlnWGLAIKbBQjtIh5ZRzISgjdUFzGcC31eHoQFv12LQdp5KAFj0Y1FyEvLxi0W8VeWKpsBGc06ntuECaN9MNHRBzKKclrTMGTfpBWZ5IO17XSUu2lKaNz6GDGRkiZA+sxYAVPfZSXY3u86IuY= diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index 97d1b1a6..ea427c4b 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -155,7 +155,10 @@ def test_write_once_bucket_type(self): skey = 'write_once-init' btype = self.client.bucket_type(bt) bucket = btype.bucket(bt) - sobj = bucket.get(skey) + try: + sobj = bucket.get(skey) + except RiakError as e: + raise unittest.SkipTest(e) if not sobj.exists: for i in range(100): o = bucket.new(self.key_name + str(i)) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index aec403e2..67dd901f 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -616,16 +616,19 @@ def test_get_params(self): self.assertFalse(missing.exists) def test_preflist(self): + nodes = ['riak@127.0.0.1', 'dev1@127.0.0.1'] bucket = self.client.bucket(self.bucket_name) bucket.new(self.key_name, data={"foo": "one", "bar": "baz"}).store() - preflist = bucket.get_preflist(self.key_name) - preflist2 = self.client.get_preflist(bucket, self.key_name) - nodes = ['riak@127.0.0.1', 'dev1@127.0.0.1'] - for pref in (preflist, preflist2): - self.assertEqual(len(pref), 3) - self.assertIn(pref[0]['node'], nodes) - [self.assertTrue(node['primary']) for node in pref] + try: + preflist = bucket.get_preflist(self.key_name) + preflist2 = self.client.get_preflist(bucket, self.key_name) + for pref in (preflist, preflist2): + self.assertEqual(len(pref), 3) + self.assertIn(pref[0]['node'], nodes) + [self.assertTrue(node['primary']) for node in pref] + except NotImplementedError as e: + raise unittest.SkipTest(e) def generate_siblings(self, original, count=5, delay=None): vals = [] diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index bfa9eb96..73d96149 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -770,6 +770,8 @@ def get_preflist(self, bucket, key): :type key: string :rtype: list of dicts """ + if not self.preflists(): + raise NotImplementedError("fetching preflists is not supported.") bucket_type = self._get_bucket_type(bucket.bucket_type) url = self.preflist_path(bucket.name, key, bucket_type=bucket_type) status, headers, body = self._request('GET', url) diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 0d845875..99415093 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -526,6 +526,8 @@ def get_preflist(self, bucket, key): :type key: string :rtype: list of dicts """ + if not self.preflists(): + raise NotImplementedError("fetching preflists is not supported.") msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_KEY_PREFLIST_REQ codec = self._get_codec(msg_code) msg = codec.encode_get_preflist(bucket, key) diff --git a/tools b/tools index 16a6c5f6..f00acf11 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 16a6c5f6bc57f3ab3f8a6f83198edbe3ae97de3c +Subproject commit f00acf1152e1dc493737939ac338a02215177345 From a0b4d397ec68be652919e8d02c11ba50427662e5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 12 Oct 2016 06:37:22 -0700 Subject: [PATCH 259/324] Remove buildbot directory and migrate what it did to main Makefile simplifying readme --- .gitignore | 1 + .runner | 132 +++++++++++++++++++++++++++++++++++ .travis.yml | 4 +- Makefile | 47 +++++++++++-- README.md | 96 ++++++------------------- buildbot/Makefile | 67 ------------------ buildbot/clean-env | 100 -------------------------- buildbot/tox_runner.sh | 17 ----- buildbot/tox_setup.sh | 124 -------------------------------- riak/tests/__init__.py | 1 - riak/tests/test_datatypes.py | 39 +++++++---- tools | 2 +- tox.ini | 8 +-- 13 files changed, 230 insertions(+), 408 deletions(-) create mode 100755 .runner delete mode 100644 buildbot/Makefile delete mode 100755 buildbot/clean-env delete mode 100755 buildbot/tox_runner.sh delete mode 100755 buildbot/tox_setup.sh diff --git a/.gitignore b/.gitignore index de4adbc9..280d6cc9 100644 --- a/.gitignore +++ b/.gitignore @@ -16,3 +16,4 @@ riak.egg-info/ #*# *~ .idea/ +.pyenv/ diff --git a/.runner b/.runner new file mode 100755 index 00000000..900df035 --- /dev/null +++ b/.runner @@ -0,0 +1,132 @@ +#!/usr/bin/env bash + +set -o errexit +set -o nounset + +have_tox='false' +if hash tox 2>/dev/null +then + echo '[INFO] tox command present, will use that to run tests' + have_tox='true' +fi + +have_py2='false' +if hash python2 2>/dev/null +then + have_py2='true' +fi + +have_py3='false' +if hash python3 2>/dev/null +then + have_py3='true' +fi + +have_riak_admin='false' +if hash riak-admin 2>/dev/null +then + have_riak_admin='true' + $riak_admin='riak-admin' +else + set +o nounset + + if [[ -x $RIAK_ADMIN ]] + then + have_riak_admin='true' + riak_admin="$RIAK_ADMIN" + elif [[ -x $RIAK_DIR/bin/riak-admin ]] + then + have_riak_admin='true' + riak_admin="$RIAK_DIR/bin/riak-admin" + fi + + set -o nounset +fi + +function lint +{ + if ! hash flake8 2>/dev/null + then + pip install --upgrade flake8 + fi + flake8 --exclude=riak/pb riak *.py +} + +function run_tests +{ + if [[ $have_tox == 'true' ]] + then + tox + else + if [[ $have_py2 == 'true' ]] + then + python2 setup.py test + fi + if [[ $have_py3 == 'true' ]] + then + python3 setup.py test + fi + fi +} + +function run_tests_each_protocol +{ + for protocol in pbc http + do + export RIAK_TEST_PROTOCOL="$protocol" + run_tests + done +} + +function export_test_environment_vars +{ + local riak_test_host="${RIAK_TEST_HOST:-localhost}" + local -i riak_test_pb_port="${RIAK_TEST_PB_PORT:-8087}" + local -i riak_test_http_port="${RIAK_TEST_HTTP_PORT:-8098}" + export RUN_BTYPES=1 + export RUN_CLIENT=1 + export RUN_MAPREDUCE=1 + export RUN_KV=1 + export RUN_RESOLVE=1 + export RUN_YZ=1 + export RUN_DATATYPES=1 + export RUN_INDEXES=1 + export RIAK_TEST_HOST="$riak_test_host" + export RIAK_TEST_PB_PORT="$riak_test_pb_port" + export RIAK_TEST_HTTP_PORT="$riak_test_http_port" +} + +function security_test +{ + if [[ $have_riak_admin == 'true' ]] + then + export_test_environment_vars + export RUN_SECURITY=1 + $riak_admin security enable + run_tests_each_protocol + else + echo '[ERROR] riak-admin must be in PATH, RIAK_ADMIN var set to path, or RIAK_DIR set.' 1>&2 + exit 1 + fi +} + +function integration_test +{ + export_test_environment_vars + run_tests_each_protocol +} + +arg="${1:-lint}" +case "$arg" in + 'lint') + lint;; + 'unit-test') + run_tests;; + 'integration-test') + integration_test;; + 'security-test') + security_test;; + *) + echo "[ERROR] unknown argument: '$arg'" 1>&2 + exit 1;; +esac diff --git a/.travis.yml b/.travis.yml index 7c2cd9a8..dfeeda6d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,10 +8,10 @@ python: - '3.5' - nightly install: - - pip install --upgrade flake8 + - pip install --upgrade pip setuptools flake8 before_script: - sudo ./tools/travis-ci/riak-install -d "$RIAK_DOWNLOAD_URL" - - sudo ./tools/devrel/riak-cluster-config "$(which riak-admin)" 8098 false false + - sudo ./tools/setup-riak env: matrix: - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb diff --git a/Makefile b/Makefile index f3c32efd..ed4eaa94 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,3 @@ -.PHONY: pb_clean pb_compile pb_build release release_sdist test_sdist - unexport LANG unexport LC_ADDRESS unexport LC_COLLATE @@ -17,12 +15,20 @@ unexport LC_TIME PANDOC_VERSION := $(shell pandoc --version) PROTOC_VERSION := $(shell protoc --version) -clean: pb_clean +PROJDIR = $(realpath $(CURDIR)) +TOOLS_DIR = $(PROJDIR)/tools/devrel +CA_DIR = $(PROJDIR)/tools/test-ca + +.PHONY: lint +lint: + ./.runner lint +.PHONY: pb_clean pb_clean: @echo "==> Python (clean)" @rm -rf riak/pb/*_pb2.py riak/pb/*.pyc riak/pb/__pycache__ __pycache__ py-build +.PHONY: pb_compile pb_compile: pb_clean ifeq ($(PROTOC_VERSION),) $(error The protoc command is required to parse proto files) @@ -34,9 +40,11 @@ endif @protoc -Iriak_pb/src --python_out=riak/pb riak_pb/src/*.proto @python setup.py build_messages +.PHONY: test_sdist test_sdist: @python setup.py sdist +.PHONY: release_sdist release_sdist: ifeq ($(VERSION),) $(error VERSION must be set to build a release and deploy this package) @@ -51,13 +59,14 @@ endif @echo "==> Python tagging version $(VERSION)" # NB: Python client version strings do NOT start with 'v'. Le Sigh. # validate VERSION and allow pre-releases - @bash ./build/publish $(VERSION) validate + @./build/publish $(VERSION) validate @git tag --sign -a "$(VERSION)" -m "riak-python-client $(VERSION)" --local-user "$(RELEASE_GPG_KEYNAME)" @git push --tags @echo "==> Python (sdist release)" @python setup.py sdist upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) - @bash ./build/publish $(VERSION) + @./build/publish $(VERSION) +.PHONY: release release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) @@ -70,3 +79,31 @@ endif @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.5 (release)" @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + +.PHONY: unit-test +unit-test: + @./.runner unit-test + +.PHONY: integration-test +integration-test: + @./.runner integration-test + +.PHONY: security-test +security-test: + @./.runner security-test + +.PHONY: test +test: integration-test + +.PHONY: help +help: + @echo '' + @echo ' Targets: + @echo ' ------------------------------------------------------------' + @echo ' lint - Run linter (flake8) ' + @echo ' test - Run all tests ' + @echo ' unit-test - Run unit tests ' + @echo ' integration-test - Run integration tests ' + @echo ' security-test - Run integration tests (security enabled) ' + @echo ' ------------------------------------------------------------' + @echo '' diff --git a/README.md b/README.md index 6ccb0aea..900cd74c 100644 --- a/README.md +++ b/README.md @@ -48,106 +48,56 @@ To install from [PyPI](https://pypi.python.org/pypi/riak) directly you can use ` pip install riak ``` -Testing -======= - -To setup the default test configuration build a test Riak node (from a `riak` directory) - -```sh -make rel -``` - -See [Basic Cluster Setup](http://docs.basho.com/riak/latest/ops/building/basic-cluster-setup/) for more details. +# Testing -For all of the simple default values, set the `RIAK_DIR` environment variable to the root of your Riak installation. Then from the `riak-python-client` directory +## Unit Tests -```sh -make -C buildbot preconfigure -``` - -Start your Riak node with `riak start` from the the Riak directory, then +Unit tests will be executed via `tox` if it is in your `PATH`, otherwise by the `python2` and (if available), `python3` executables: ```sh -make -C buildbot configure -make -C buildbot test +make unit-test ``` -That will run the test suite twice: once with security enabled and once without. - -Connections to Riak in Tests ----------------------------- - -If your Riak server isn't running on localhost or you have built a Riak devrel from source, use the environment variables `RIAK_TEST_HOST`, `RIAK_TEST_HTTP_PORT` and `RIAK_TEST_PB_PORT` to specify where to find the Riak server. `RIAK_TEST_PROTOCOL` to specify which protocol to test. Can be either `pbc` or `http`. - -Some of the connection tests need port numbers that are NOT in use. If ports 1023 and 1022 are in use on your test system, set the environment variables `DUMMY_HTTP_PORT` and `DUMMY_PB_PORT` to unused port numbers. - -Testing Search --------------- +## Integration Tests -If you don't have [Riak Search](http://docs.basho.com/riak/latest/dev/using/search/) enabled, you can set the `RUN_SEARCH` environment variable to 0 skip those tests. +You have two options to run Riak locally - either build from source, or use a pre-installed Riak package. -If you don't have [Search 2.0](https://github.com/basho/yokozuna) enabled, you can set the `RUN_YZ` environment variable to 0 to skip those tests. +### Source -Testing Bucket Types (Riak 2+) ------------------------------- - -To test bucket-types, you must run the `create_bucket_types` setup command, which will create the bucket-types used in testing, or create them manually yourself. It can be run like so (substituting `$RIAK` with the root of your Riak install) +To setup the default test configuration, build a Riak node from a clone of `github.com/basho/riak`: ```sh -./setup.py create_bucket_types --riak-admin=$RIAK/bin/riak-admin +# check out latest release tag +git checkout riak-2.1.4 +make locked-deps +make rel ``` -You may alternately add these lines to `setup.cfg` - -```ini -[create_bucket_types] -riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin -``` +[Source build documentation](http://docs.basho.com/riak/kv/latest/setup/installing/source/). -To skip the bucket-type tests, set the `RUN_BTYPES` environment variable to `0`. +When building a from source, the protocol buffers port will be `8087` and HTTP will be `8098`. -Testing Data Types (Riak 2+) ----------------------------- +### Package -To test data types, you must set up bucket types (see above.) +Install using your platform's package manager ([docs](http://docs.basho.com/riak/kv/latest/setup/installing/)) -To skip the data type tests, set the `RUN_DATATYPES` environment variable to `0`. +When installing from a package, the protocol buffers port will be `8087` and HTTP will be `8098`. -Testing Timeseries (Riak 2.1+) ------------------------------- +### Running Integration Tests -To test timeseries data, you must run the `setup_timeseries` command, which will create the bucket-types used in testing, or create them manually yourself. It can be run like so (substituting `$RIAK` with the root of your Riak install) +* Ensure you've initialized this repo's submodules: ```sh -./setup.py setup_timeseries --riak-admin=$RIAK/bin/riak-admin +git submodule update --init ``` -You may alternately add these lines to `setup.cfg` +* Run the following: ```sh -[setup_timeseries] -riak-admin=/Users/sean/dev/riak/rel/riak/bin/riak-admin +./tools/setup-riak +make integration-test ``` -To enable the timeseries tests, set the `RUN_TIMESERIES` environment variable to `1`. - -Testing Secondary Indexes -------------------------- - -To test [Secondary Indexes](http://docs.basho.com/riak/latest/dev/using/2i/), the `RUN_INDEXES` environment variable must be set to 1 (or 0 to skip them.) - -Testing Security (Riak 2+) --------------------------- - -Ensure that the hostname `riak-test` resolves to your Riak host (most likely `localhost`). This is so the SSL host verification can succeed. - -By default [Security](http://docs.basho.com/riak/latest/ops/running/authz/) is not enabled on Riak. Once `security = on` is configured in the `riak.conf` file it can be enabled with `riak-admin`. - -To run the tests - -```sh -RUN_SECURITY=1 RIAK_TEST_HTTP_PORT=18098 python setup.py test -``` Contributors -------------------------- diff --git a/buildbot/Makefile b/buildbot/Makefile deleted file mode 100644 index 74144c2e..00000000 --- a/buildbot/Makefile +++ /dev/null @@ -1,67 +0,0 @@ -ifndef RIAK_DIR -$(error RIAK_DIR is not set) -endif - -unexport PYENV_VERSION -unexport LANG -unexport LC_ADDRESS -unexport LC_COLLATE -unexport LC_CTYPE -unexport LC_IDENTIFICATION -unexport LC_MEASUREMENT -unexport LC_MESSAGES -unexport LC_MONETARY -unexport LC_NAME -unexport LC_NUMERIC -unexport LC_PAPER -unexport LC_TELEPHONE -unexport LC_TIME - -PROJDIR = $(realpath $(CURDIR)/..) -TOOLS_DIR = $(PROJDIR)/tools/devrel -CA_DIR = $(PROJDIR)/tools/test-ca -RIAK_CONF = $(RIAK_DIR)/etc/riak.conf -ADV_CONF = $(RIAK_DIR)/etc/advanced.config -RIAK_ADMIN = $(RIAK_DIR)/bin/riak-admin - -preconfigure: - $(TOOLS_DIR)/gen-riak-conf $(RIAK_CONF) 8098 8087 18098 $(CA_DIR)/certs/cacert.pem $(CA_DIR)/certs/riak-test-cert.pem $(CA_DIR)/private/riak-test-key.pem - $(TOOLS_DIR)/gen-adv-conf $(ADV_CONF) - -configure: - $(TOOLS_DIR)/riak-cluster-config $(RIAK_ADMIN) 8098 true true - -compile: - @echo NO-OP - -lint: - @pip install --upgrade pep8 flake8 - @cd ..; flake8 --exclude=riak/pb riak *.py - -test: setup test_normal test_security - -test_normal: - @echo "Testing Riak Python Client (without security)" - @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_BTYPES=1 RUN_CLIENT=1 RUN_MAPREDUCE=1 RUN_KV=1 RUN_RESOLVE=1 RUN_YZ=1 RUN_DATATYPES=1 RUN_INDEXES=1 ./tox_runner.sh .. - -test_security: - @echo "Testing Riak Python Client (with security)" - @$(RIAK_ADMIN) security enable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_BTYPES=1 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=18098 RUN_BTYPES=1 RUN_CLIENT=1 RUN_KV=1 RUN_YZ=1 RUN_INDEXES=1 RUN_SECURITY=1 ./tox_runner.sh .. - -test_timeseries: - @echo "Testing Riak Python Client (timeseries)" - @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_CLIENT=1 RUN_KV=1 RUN_INDEXES=1 RUN_TIMESERIES=1 ./tox_runner.sh .. - -test_hll: - @echo "Testing Riak Python Client (HLL datatype)" - @$(RIAK_ADMIN) security disable - @RIAK_TEST_PROTOCOL='pbc' RIAK_TEST_PB_PORT=8087 RUN_DATATYPES=1 RUN_DATATYPE_HLL=1 ./tox_runner.sh .. - @RIAK_TEST_PROTOCOL='http' RIAK_TEST_HTTP_PORT=8098 RUN_DATATYPES=1 RUN_DATATYPE_HLL=1 ./tox_runner.sh .. - -setup: - ./tox_setup.sh diff --git a/buildbot/clean-env b/buildbot/clean-env deleted file mode 100755 index da865250..00000000 --- a/buildbot/clean-env +++ /dev/null @@ -1,100 +0,0 @@ -#!/usr/bin/env bash - -set -o nounset -set -o errexit - -function now -{ - date '+%Y-%m-%d %H:%M:%S' -} - -function perr -{ - echo "$(now) [error]: $@" 1>&2 -} - -function pinfo -{ - echo "$(now) [info]: $@" -} - -function errexit -{ - perr "$@" - exit 1 -} - -function clean_venvs -{ - for VENV in $(pyenv virtualenvs | awk '/^[[:space:]]*riak-/ { print $1 }') - do - pinfo Uninstalling virtualenv "$VENV" - pyenv uninstall --force "$VENV" - done -} - -function clean_pythons -{ - for RPY in $(pyenv versions | awk '/^[[:space:]]*riak_/ { print $1 }') - do - pinfo Uninstalling python "$RPY" - pyenv uninstall --force "$RPY" - done -} - -function clean_tox -{ - if [[ -d ./.tox ]] - then - pinfo Removing ./.tox - rm -rf ./.tox - fi -} - -function usage -{ - echo " -clean-env: Clean up your pyenv - -Usage: - -clean-env [-p] [-t] [-v] - --p Clean up Riak-specific Python versions --t Clean up tox --v Clean up Riak-specific virtualenvs -" - exit 0 -} - -opt_clean_pythons='false' -opt_clean_venvs='false' -opt_clean_tox='false' - -while getopts 'ptv' opt; do - case $opt in - p) - opt_clean_pythons='true';; - t) - opt_clean_tox='true';; - v) - opt_clean_venvs='true';; - *) - usage;; - esac -done - -if [[ $opt_clean_venvs == 'true' ]] -then - clean_venvs -fi - -if [[ $opt_clean_pythons == 'true' ]] -then - clean_pythons -fi - -if [[ $opt_clean_tox == 'true' ]] -then - clean_tox -fi diff --git a/buildbot/tox_runner.sh b/buildbot/tox_runner.sh deleted file mode 100755 index 2a1ec737..00000000 --- a/buildbot/tox_runner.sh +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env bash -# pyenv root -export PYENV_ROOT="$HOME/.pyenv" - -# Add pyenv root to PATH -# and initialize pyenv -PATH="$PYENV_ROOT/bin:$PATH" -# initialize pyenv -eval "$(pyenv init -)" -# initialize pyenv virtualenv -eval "$(pyenv virtualenv-init -)" - -# Change directory if an argument is passed in -if [[ ! -z "$1" ]]; then - cd "$1" -fi -tox diff --git a/buildbot/tox_setup.sh b/buildbot/tox_setup.sh deleted file mode 100755 index 2b45963f..00000000 --- a/buildbot/tox_setup.sh +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env bash - -unset PYENV_VERSION - -if [[ ! -d $PYENV_ROOT ]] -then - export PYENV_ROOT="$HOME/.pyenv" -fi - -declare -r PROJDIR="$PWD/.." -if [[ ! -s $PROJDIR/riak/__init__.py ]] -then - echo "[ERROR] script must be run from the buildbot/ dir in github.com/basho/riak-python-client" 1>&2 - exit 1 -fi - -rm -f $PROJDIR/.python-version - -# Install pyenv if it's missing -if [[ ! -d $PYENV_ROOT ]] -then - git clone 'https://github.com/yyuu/pyenv.git' $PYENV_ROOT -else - (cd $PYENV_ROOT && git fetch --all) -fi - -(cd $PYENV_ROOT && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) - -declare -r pyenv_virtualenv_dir="$PYENV_ROOT/plugins/pyenv-virtualenv" -if [[ ! -d $pyenv_virtualenv_dir ]] -then - git clone 'https://github.com/yyuu/pyenv-virtualenv.git' $pyenv_virtualenv_dir -else - (cd $pyenv_virtualenv_dir && git fetch --all) -fi - -(cd $pyenv_virtualenv_dir && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) - -declare -r pyenv_alias_dir="$PYENV_ROOT/plugins/pyenv-alias" -if [[ ! -d $pyenv_alias_dir ]] -then - git clone 'https://github.com/s1341/pyenv-alias.git' $pyenv_alias_dir -else - (cd $pyenv_alias_dir && git pull origin master) -fi - -# Add pyenv root to PATH -# and initialize pyenv -if [[ $PATH != */.pyenv* ]] -then - echo "[INFO] adding $PYENV_ROOT/bin to PATH" - export PATH="$PYENV_ROOT/bin:$PATH" -fi - -if [[ $(type -t pyenv) != 'function' ]] -then - echo "[INFO] init pyenv" - eval "$(pyenv init -)" - eval "$(pyenv virtualenv-init -)" -fi - -do_pip_upgrades='false' - -# NB: 2.7.8 is special-cased -for pyver in 2.7 3.3 3.4 3.5 -do - riak_py_alias="riak_$pyver" - if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "$riak_py_alias" - then - # Need to install it - do_pip_upgrades='true' - - declare -i pymaj="${pyver%.*}" - declare -i pymin="${pyver#*.}" - pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]+\$" | tail -n1 | sed -e 's/[[:space:]]//g')" - - echo "[INFO] installing Python $pyver_latest" - VERSION_ALIAS="$riak_py_alias" pyenv install "$pyver_latest" - pyenv virtualenv "$riak_py_alias" "riak-py$pymaj$pymin" - fi -done - -if ! pyenv versions | fgrep -q 'riak_2.7.8' -then - # Need to install it - do_pip_upgrades='true' - - echo "[INFO] installing Python 2.7.8" - VERSION_ALIAS='riak_2.7.8' pyenv install '2.7.8' - pyenv virtualenv 'riak_2.7.8' 'riak-py278' -fi - -pushd $PROJDIR -pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278 - -pyenv rehash - -if [[ $do_pip_upgrades == 'true' ]] -then - for PY in $(pyenv versions --bare --skip-aliases | grep '^riak_') - do - echo "[INFO] $PY - upgrading pip / setuptools" - PYENV_VERSION="$PY" pip install --upgrade pip setuptools - done -fi - -python_version="$(python --version)" -if [[ $python_version == Python\ 3* ]] -then - pip install --ignore-installed tox - if ! pip show --quiet tox - then - echo "[ERROR] install of 'tox' failed" 1>&2 - popd - exit 1 - fi - pyenv rehash -else - echo "[ERROR] expected Python 3 to be 'python' at this point" 1>&2 - popd - exit 1 -fi - -popd diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 4237a780..aa475c9c 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -52,7 +52,6 @@ def hostname_resolves(hostname): RUN_BTYPES = int(os.environ.get('RUN_BTYPES', '0')) RUN_DATATYPES = int(os.environ.get('RUN_DATATYPES', '0')) -RUN_DATATYPE_HLL = int(os.environ.get('RUN_DATATYPE_HLL', '0')) RUN_CLIENT = int(os.environ.get('RUN_CLIENT', '0')) RUN_INDEXES = int(os.environ.get('RUN_INDEXES', '0')) RUN_KV = int(os.environ.get('RUN_KV', '0')) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index fb270ac8..1d79fdf2 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -3,7 +3,7 @@ import riak.datatypes as datatypes from riak import RiakError, RiakBucket, BucketType, RiakObject -from riak.tests import RUN_DATATYPES, RUN_DATATYPE_HLL +from riak.tests import RUN_DATATYPES from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -160,27 +160,39 @@ def test_removes_require_context(self): self.assertTrue(dtype.modified) -@unittest.skipUnless(RUN_DATATYPE_HLL, 'RUN_DATATYPE_HLL is 0') +@unittest.skipUnless(RUN_DATATYPES, 'RUN_DATATYPES is 0') class HllDatatypeIntegrationTests(IntegrationTestBase, unittest.TestCase): def test_fetch_bucket_type_props(self): - btype = self.client.bucket_type('hlls') - props = btype.get_properties() + try: + btype = self.client.bucket_type('hlls') + props = btype.get_properties() + except RiakError as e: + raise unittest.SkipTest(e) self.assertEqual(14, props['hll_precision']) def test_set_same_hll_precision(self): - btype = self.client.bucket_type('hlls') - btype.set_property('hll_precision', 14) - props = btype.get_properties() - self.assertEqual(14, props['hll_precision']) + try: + btype = self.client.bucket_type('hlls') + btype.set_property('hll_precision', 14) + props = btype.get_properties() + self.assertEqual(14, props['hll_precision']) + except RiakError as e: + raise unittest.SkipTest(e) def test_set_larger_hll_precision(self): - btype = self.client.bucket_type('hlls') + try: + btype = self.client.bucket_type('hlls') + except RiakError as e: + raise unittest.SkipTest(e) with self.assertRaises(RiakError): btype.set_property('hll_precision', 15) def test_set_invalid_hll_precision(self): - btype = self.client.bucket_type('hlls') + try: + btype = self.client.bucket_type('hlls') + except RiakError as e: + raise unittest.SkipTest(e) with self.assertRaises(ValueError): btype.set_property('hll_precision', 3) with self.assertRaises(ValueError): @@ -189,8 +201,11 @@ def test_set_invalid_hll_precision(self): btype.set_property('hll_precision', 0) def test_dt_hll(self): - btype = self.client.bucket_type('hlls') - props = btype.get_properties() + try: + btype = self.client.bucket_type('hlls') + props = btype.get_properties() + except RiakError as e: + raise unittest.SkipTest(e) self.assertEqual(14, props['hll_precision']) bucket = btype.bucket(self.bucket_name) myhll = datatypes.Hll(bucket, self.key_name) diff --git a/tools b/tools index f00acf11..e6bc53e8 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit f00acf1152e1dc493737939ac338a02215177345 +Subproject commit e6bc53e856f0ada5f0e810989d706e997b5d7efe diff --git a/tox.ini b/tox.ini index 10387e42..f411b799 100644 --- a/tox.ini +++ b/tox.ini @@ -1,13 +1,9 @@ # Tox (http://tox.testrun.org/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the -# test suite on all supported python versions. To use it, "pip install tox" -# and then run "tox" from this directory. +# test suite on all supported python versions. [tox] -envlist = riak-py278, riak-py27, riak-py33, riak-py34, riak-py35 - -[testenv:riak-py278] -basepython = {env:HOME}/.pyenv/versions/riak-py278/bin/python2.7 +envlist = py2, py3 [testenv] install_command = pip install --upgrade {packages} From daab21b46c6dc5d5857258a7c59085e0b71686d8 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 18 Oct 2016 10:20:22 -0700 Subject: [PATCH 260/324] detect timeseries buckets --- .runner | 7 ++--- riak/tests/test_datatypes.py | 41 ++++++++++++++---------------- riak/tests/test_timeseries_pbuf.py | 39 ++++++++++++++++++---------- riak/tests/test_timeseries_ttb.py | 34 +++++++++++++++++++------ 4 files changed, 74 insertions(+), 47 deletions(-) diff --git a/.runner b/.runner index 900df035..e17f4735 100755 --- a/.runner +++ b/.runner @@ -85,12 +85,13 @@ function export_test_environment_vars local -i riak_test_http_port="${RIAK_TEST_HTTP_PORT:-8098}" export RUN_BTYPES=1 export RUN_CLIENT=1 - export RUN_MAPREDUCE=1 + export RUN_DATATYPES=1 + export RUN_INDEXES=1 export RUN_KV=1 + export RUN_MAPREDUCE=1 export RUN_RESOLVE=1 + export RUN_TIMESERIES=1 export RUN_YZ=1 - export RUN_DATATYPES=1 - export RUN_INDEXES=1 export RIAK_TEST_HOST="$riak_test_host" export RIAK_TEST_PB_PORT="$riak_test_pb_port" export RIAK_TEST_HTTP_PORT="$riak_test_http_port" diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 1d79fdf2..11061854 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -163,36 +163,36 @@ def test_removes_require_context(self): @unittest.skipUnless(RUN_DATATYPES, 'RUN_DATATYPES is 0') class HllDatatypeIntegrationTests(IntegrationTestBase, unittest.TestCase): - def test_fetch_bucket_type_props(self): + @classmethod + def setUpClass(cls): + super(HllDatatypeIntegrationTests, cls).setUpClass() + client = cls.create_client() try: - btype = self.client.bucket_type('hlls') + btype = client.bucket_type('hlls') props = btype.get_properties() except RiakError as e: raise unittest.SkipTest(e) + finally: + client.close() + + def test_fetch_bucket_type_props(self): + btype = self.client.bucket_type('hlls') + props = btype.get_properties() self.assertEqual(14, props['hll_precision']) def test_set_same_hll_precision(self): - try: - btype = self.client.bucket_type('hlls') - btype.set_property('hll_precision', 14) - props = btype.get_properties() - self.assertEqual(14, props['hll_precision']) - except RiakError as e: - raise unittest.SkipTest(e) + btype = self.client.bucket_type('hlls') + btype.set_property('hll_precision', 14) + props = btype.get_properties() + self.assertEqual(14, props['hll_precision']) def test_set_larger_hll_precision(self): - try: - btype = self.client.bucket_type('hlls') - except RiakError as e: - raise unittest.SkipTest(e) + btype = self.client.bucket_type('hlls') with self.assertRaises(RiakError): btype.set_property('hll_precision', 15) def test_set_invalid_hll_precision(self): - try: - btype = self.client.bucket_type('hlls') - except RiakError as e: - raise unittest.SkipTest(e) + btype = self.client.bucket_type('hlls') with self.assertRaises(ValueError): btype.set_property('hll_precision', 3) with self.assertRaises(ValueError): @@ -201,11 +201,8 @@ def test_set_invalid_hll_precision(self): btype.set_property('hll_precision', 0) def test_dt_hll(self): - try: - btype = self.client.bucket_type('hlls') - props = btype.get_properties() - except RiakError as e: - raise unittest.SkipTest(e) + btype = self.client.bucket_type('hlls') + props = btype.get_properties() self.assertEqual(14, props['hll_precision']) bucket = btype.bucket(self.bucket_name) myhll = datatypes.Hll(bucket, self.key_name) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 4588d01d..e404eb05 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -30,7 +30,8 @@ ex1ms = 1420113900987 -@unittest.skipUnless(is_timeseries_supported(), "Timeseries not supported") +@unittest.skipUnless(is_timeseries_supported(), + 'Timeseries not supported by this Python version') class TimeseriesPbufUnitTests(unittest.TestCase): @classmethod def setUpClass(cls): @@ -194,7 +195,8 @@ def test_decode_data_from_query(self): @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, - 'Timeseries not supported or RUN_TIMESERIES is 0') + 'Timeseries not supported by this Python version' + ' or RUN_TIMESERIES is 0') class TimeseriesPbufTests(IntegrationTestBase, unittest.TestCase): client_options = {'transport_options': {'use_ttb': False, 'ts_convert_timestamp': True}} @@ -218,11 +220,15 @@ def setUpClass(cls): ['hash1', 'user2', fiveMinsAgo, 'wind', None], ['hash1', 'user2', cls.now, 'snow', 20.1] ] - ts_obj = table.new(rows) - result = ts_obj.store() + try: + ts_obj = table.new(rows) + result = ts_obj.store() + except (RiakError, NotImplementedError) as e: + raise unittest.SkipTest(e) + finally: + client.close() if result is not True: raise AssertionError("expected success") - client.close() cls.nowMsec = unix_time_millis(cls.now) cls.fiveMinsAgo = fiveMinsAgo @@ -246,10 +252,15 @@ def setUpClass(cls): ] cls.encoded_rows = encoded_rows - def validate_len(self, ts_obj, expected_len): - self.assertEqual(len(ts_obj.columns.names), expected_len) - self.assertEqual(len(ts_obj.columns.types), expected_len) - self.assertEqual(len(ts_obj.rows), expected_len) + def validate_len(self, ts_obj, elen): + if isinstance(elen, tuple): + self.assertIn(len(ts_obj.columns.names), elen) + self.assertIn(len(ts_obj.columns.types), elen) + self.assertIn(len(ts_obj.rows), elen) + else: + self.assertEqual(len(ts_obj.columns.names), elen) + self.assertEqual(len(ts_obj.columns.types), elen) + self.assertEqual(len(ts_obj.rows), elen) def validate_data(self, ts_obj): if ts_obj.columns is not None: @@ -295,31 +306,31 @@ def test_query_that_returns_table_description(self): query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, 5) + self.validate_len(ts_obj, (5, 7)) def test_query_that_returns_table_description_using_interpolation(self): query = 'Describe {table}' ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, 5) + self.validate_len(ts_obj, (5, 7)) def test_query_description_via_table(self): query = 'describe {table}' table = Table(self.client, table_name) ts_obj = table.query(query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, 5) + self.validate_len(ts_obj, (5, 7)) def test_get_description(self): ts_obj = self.client.ts_describe(table_name) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, 5) + self.validate_len(ts_obj, (5, 7)) def test_get_description_via_table(self): table = Table(self.client, table_name) ts_obj = table.describe() self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, 5) + self.validate_len(ts_obj, (5, 7)) def test_query_that_returns_no_data(self): fmt = """ diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index aed4879d..61d417a5 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -9,11 +9,11 @@ from riak import RiakError from riak.table import Table +from riak.tests import RUN_TIMESERIES from riak.ts_object import TsObject from riak.codecs.ttb import TtbCodec from riak.util import str_to_bytes, bytes_to_str, \ unix_time_millis, is_timeseries_supported -from riak.tests import RUN_TIMESERIES from riak.tests.base import IntegrationTestBase rpberrorresp_a = Atom('rpberrorresp') @@ -41,7 +41,8 @@ ts1 = ts0 + fiveMins -@unittest.skipUnless(is_timeseries_supported(), "Timeseries not supported") +@unittest.skipUnless(is_timeseries_supported(), + 'Timeseries not supported by this Python version') class TimeseriesTtbUnitTests(unittest.TestCase): def setUp(self): self.table = Table(None, table_name) @@ -121,7 +122,8 @@ def test_encode_data_for_put(self): @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, - 'Timeseries not supported or RUN_TIMESERIES is 0') + 'Timeseries not supported by this Python version' + ' or RUN_TIMESERIES is 0') class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): client_options = {'transport_options': {'use_ttb': True, 'ts_convert_timestamp': True}} @@ -129,6 +131,26 @@ class TimeseriesTtbTests(IntegrationTestBase, unittest.TestCase): @classmethod def setUpClass(cls): super(TimeseriesTtbTests, cls).setUpClass() + client = cls.create_client() + skey = 'test-key' + btype = client.bucket_type(table_name) + bucket = btype.bucket(table_name) + try: + sobj = bucket.get(skey) + except (RiakError, NotImplementedError) as e: + raise unittest.SkipTest(e) + finally: + client.close() + + def validate_len(self, ts_obj, elen): + if isinstance(elen, tuple): + self.assertIn(len(ts_obj.columns.names), elen) + self.assertIn(len(ts_obj.columns.types), elen) + self.assertIn(len(ts_obj.rows), elen) + else: + self.assertEqual(len(ts_obj.columns.names), elen) + self.assertEqual(len(ts_obj.columns.types), elen) + self.assertEqual(len(ts_obj.rows), elen) def test_insert_data_via_sql(self): query = """ @@ -162,11 +184,7 @@ def test_query_that_returns_table_description(self): query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - ts_cols = ts_obj.columns - self.assertEqual(len(ts_cols.names), 5) - self.assertEqual(len(ts_cols.types), 5) - row = ts_obj.rows[0] - self.assertEqual(len(row), 5) + self.validate_len(ts_obj, (5, 7)) def test_store_and_fetch_gh_483(self): now = datetime.datetime(2015, 1, 1, 12, 0, 0) From c7c00213565c1411d2b380ac24c0cbad76a2e5ca Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 18 Oct 2016 10:32:59 -0700 Subject: [PATCH 261/324] make linter happy --- riak/tests/test_datatypes.py | 2 +- riak/tests/test_timeseries_ttb.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 11061854..2642837c 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -169,7 +169,7 @@ def setUpClass(cls): client = cls.create_client() try: btype = client.bucket_type('hlls') - props = btype.get_properties() + btype.get_properties() except RiakError as e: raise unittest.SkipTest(e) finally: diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 61d417a5..3ac37c57 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -136,7 +136,7 @@ def setUpClass(cls): btype = client.bucket_type(table_name) bucket = btype.bucket(table_name) try: - sobj = bucket.get(skey) + bucket.get(skey) except (RiakError, NotImplementedError) as e: raise unittest.SkipTest(e) finally: From e2221cac506822ce4df154c270434370a8701fdb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 18 Oct 2016 13:36:14 -0700 Subject: [PATCH 262/324] readme formatting --- README.md | 26 +++++++++----------------- 1 file changed, 9 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 900cd74c..3bd350bf 100644 --- a/README.md +++ b/README.md @@ -1,35 +1,28 @@ -Python Client for Riak -====================== +# Python Client for Riak -Build Status -============ +## Build Status [![Build Status](https://travis-ci.org/basho/riak-python-client.svg?branch=master)](https://travis-ci.org/basho/riak-python-client) -Documentation -============= +## Documentation [Documentation for the Riak Python Client Library](http://basho.github.io/riak-python-client/index.html) is available [here](http://basho.github.io/riak-python-client/index.html). Documentation for Riak is available [here](http://docs.basho.com/riak/latest). -Repository Cloning -================== +## Repository Cloning *NOTE*: please clone this repository using the `--recursive` argument to `git clone` or follow the clone with `git submodule update --init`. This repository uses two submodules. -Install -======= +# Installation The recommended versions of Python for use with this client are Python `2.7.8` (or greater, `2.7.11` as of `2016-06-21`), `3.3.x`, `3.4.x` and `3.5.x`. The latest version from each series should be preferred. Older versions of the Python `2.7.X` and `3.X` series should be used with caution as they are not covered by integration tests. -Riak TS (Timeseries) -=================== +## Riak TS (Timeseries) You must use version `2.7.11`, `3.4.4` or `3.5.1` (or greater within a version series). Otherwise you will be affected by [this Python bug](https://bugs.python.org/issue23517). -From Source ------------ +## From Source ```sh python setup.py install @@ -37,8 +30,7 @@ python setup.py install There are additional dependencies on Python packages `setuptools` and `protobuf`. -From PyPI ---------- +## From PyPI Official packages are signed and published to [PyPI](https://pypi.python.org/pypi/riak). @@ -75,7 +67,7 @@ make rel [Source build documentation](http://docs.basho.com/riak/kv/latest/setup/installing/source/). -When building a from source, the protocol buffers port will be `8087` and HTTP will be `8098`. +When building from source, the protocol buffers port will be `8087` and HTTP will be `8098`. ### Package From 13c4f91a4869a50b7bb19369eb62183500d1bc79 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 18 Oct 2016 13:38:01 -0700 Subject: [PATCH 263/324] update to latest tools --- tools | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools b/tools index e6bc53e8..6ac6fb7c 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit e6bc53e856f0ada5f0e810989d706e997b5d7efe +Subproject commit 6ac6fb7cf92bac7ed94d8f33c18932456d487492 From 78523c28377fcb8d08a7f5006de1f73b02c3fe85 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 20 Oct 2016 17:43:28 -0700 Subject: [PATCH 264/324] Add security tests to Travis CI update tools/ to 1.0.2 add riak-test host alias for Travis CI Update to tools/ 1.0.3, run security on PB only --- .runner | 36 ++++++++++++++++++++++++++++-------- .travis.sh | 18 ++++++++++++++++++ .travis.yml | 9 ++++++--- riak/tests/__init__.py | 4 ++-- tools | 2 +- 5 files changed, 55 insertions(+), 14 deletions(-) create mode 100755 .travis.sh diff --git a/.runner b/.runner index e17f4735..af51efc6 100755 --- a/.runner +++ b/.runner @@ -54,6 +54,8 @@ function lint function run_tests { + local protocol="${1:-pbc}" + export RIAK_TEST_PROTOCOL="$protocol" if [[ $have_tox == 'true' ]] then tox @@ -73,16 +75,22 @@ function run_tests_each_protocol { for protocol in pbc http do - export RIAK_TEST_PROTOCOL="$protocol" - run_tests + run_tests "$protocol" done } -function export_test_environment_vars +function export_host_environment_vars { local riak_test_host="${RIAK_TEST_HOST:-localhost}" local -i riak_test_pb_port="${RIAK_TEST_PB_PORT:-8087}" local -i riak_test_http_port="${RIAK_TEST_HTTP_PORT:-8098}" + export RIAK_TEST_HOST="$riak_test_host" + export RIAK_TEST_PB_PORT="$riak_test_pb_port" + export RIAK_TEST_HTTP_PORT="$riak_test_http_port" +} + +function export_test_environment_vars +{ export RUN_BTYPES=1 export RUN_CLIENT=1 export RUN_DATATYPES=1 @@ -92,19 +100,30 @@ function export_test_environment_vars export RUN_RESOLVE=1 export RUN_TIMESERIES=1 export RUN_YZ=1 - export RIAK_TEST_HOST="$riak_test_host" - export RIAK_TEST_PB_PORT="$riak_test_pb_port" - export RIAK_TEST_HTTP_PORT="$riak_test_http_port" +} + +function unexport_test_environment_vars +{ + export RUN_BTYPES=0 + export RUN_CLIENT=0 + export RUN_DATATYPES=0 + export RUN_INDEXES=0 + export RUN_KV=0 + export RUN_MAPREDUCE=0 + export RUN_RESOLVE=0 + export RUN_TIMESERIES=0 + export RUN_YZ=0 } function security_test { if [[ $have_riak_admin == 'true' ]] then - export_test_environment_vars + export_host_environment_vars + unexport_test_environment_vars export RUN_SECURITY=1 $riak_admin security enable - run_tests_each_protocol + run_tests 'pbc' else echo '[ERROR] riak-admin must be in PATH, RIAK_ADMIN var set to path, or RIAK_DIR set.' 1>&2 exit 1 @@ -113,6 +132,7 @@ function security_test function integration_test { + export_host_environment_vars export_test_environment_vars run_tests_each_protocol } diff --git a/.travis.sh b/.travis.sh new file mode 100755 index 00000000..497de259 --- /dev/null +++ b/.travis.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env bash +set -o errexit + +flake8 --exclude=riak/pb riak *.py + +sudo riak-admin security disable + +python setup.py test + +sudo riak-admin security enable + +if [[ $RIAK_TEST_PROTOCOL == 'pbc' ]] +then + export RUN_SECURITY=1 + python setup.py test --test-suite riak.tests.test_security +else + echo '[INFO]: security tests run on PB protocol only' +fi diff --git a/.travis.yml b/.travis.yml index dfeeda6d..29b72d67 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,11 +7,14 @@ python: - '3.4' - '3.5' - nightly +addons: + hosts: + - riak-test install: - pip install --upgrade pip setuptools flake8 before_script: - sudo ./tools/travis-ci/riak-install -d "$RIAK_DOWNLOAD_URL" - - sudo ./tools/setup-riak + - sudo ./tools/setup-riak -s env: matrix: - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb @@ -29,9 +32,9 @@ env: - RUN_YZ=1 - RUN_DATATYPES=1 - RUN_INDEXES=1 + - RUN_SECURITY=0 script: - - flake8 --exclude=riak/pb riak *.py - - python setup.py test + - ./.travis.sh notifications: slack: secure: kU1XcvTAliCWKuYpMWEMbD4qkbmlnWGLAIKbBQjtIh5ZRzISgjdUFzGcC31eHoQFv12LQdp5KAFj0Y1FyEvLxi0W8VeWKpsBGc06ntuECaN9MNHRBzKKclrTMGTfpBWZ5IO17XSUu2lKaNz6GDGRkiZA+sxYAVPfZSXY3u86IuY= diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index aa475c9c..237892da 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -40,10 +40,10 @@ def hostname_resolves(hostname): PROTOCOL = os.environ.get('RIAK_TEST_PROTOCOL', 'pbc') PB_HOST = os.environ.get('RIAK_TEST_PB_HOST', HOST) -PB_PORT = int(os.environ.get('RIAK_TEST_PB_PORT', '10017')) +PB_PORT = int(os.environ.get('RIAK_TEST_PB_PORT', '8087')) HTTP_HOST = os.environ.get('RIAK_TEST_HTTP_HOST', HOST) -HTTP_PORT = int(os.environ.get('RIAK_TEST_HTTP_PORT', '10018')) +HTTP_PORT = int(os.environ.get('RIAK_TEST_HTTP_PORT', '8098')) # these ports are used to simulate errors, there shouldn't # be anything listening on either port. diff --git a/tools b/tools index 6ac6fb7c..7b4b423d 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 6ac6fb7cf92bac7ed94d8f33c18932456d487492 +Subproject commit 7b4b423d5276e698399f9030835a5de53b95e55e From 6b0ffc8f356e5c29f327d10a58c6ebbcfe655c2a Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 4 Nov 2016 08:52:33 -0700 Subject: [PATCH 265/324] Update tools/ to 1.2.0 --- tools | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools b/tools index 7b4b423d..bf0c48ce 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 7b4b423d5276e698399f9030835a5de53b95e55e +Subproject commit bf0c48cef336c5c88968cd4c30660e4e177afa88 From 52c3c2ac3df9312cbf9600415beff23f90500f95 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 7 Nov 2016 11:55:34 -0800 Subject: [PATCH 266/324] Update timeseries tests, add exception if client is closed and pools are None --- .gitignore | 2 +- .runner | 9 +++++++++ Makefile | 1 + riak/client/__init__.py | 4 ++++ riak/client/transport.py | 3 +++ riak/tests/test_client.py | 6 ++++++ riak/tests/test_timeseries_pbuf.py | 10 +++++----- riak/tests/test_timeseries_ttb.py | 2 +- 8 files changed, 30 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 280d6cc9..5cc03116 100644 --- a/.gitignore +++ b/.gitignore @@ -16,4 +16,4 @@ riak.egg-info/ #*# *~ .idea/ -.pyenv/ +envs/ diff --git a/.runner b/.runner index af51efc6..91b20b5c 100755 --- a/.runner +++ b/.runner @@ -137,6 +137,13 @@ function integration_test run_tests_each_protocol } +function timeseries_test +{ + unexport_test_environment_vars + export RUN_TIMESERIES=1 + run_tests_each_protocol +} + arg="${1:-lint}" case "$arg" in 'lint') @@ -147,6 +154,8 @@ case "$arg" in integration_test;; 'security-test') security_test;; + 'timeseries-test') + timeseries_test;; *) echo "[ERROR] unknown argument: '$arg'" 1>&2 exit 1;; diff --git a/Makefile b/Makefile index ed4eaa94..1630a76e 100644 --- a/Makefile +++ b/Makefile @@ -105,5 +105,6 @@ help: @echo ' unit-test - Run unit tests ' @echo ' integration-test - Run integration tests ' @echo ' security-test - Run integration tests (security enabled) ' + @echo ' timeseries-test - Run timeseries integration tests ' @echo ' ------------------------------------------------------------' @echo '' diff --git a/riak/client/__init__.py b/riak/client/__init__.py index dc97ad52..f7e61446 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -313,14 +313,18 @@ def close(self): self._stop_multi_pools() if self._http_pool is not None: self._http_pool.clear() + self._http_pool = None if self._tcp_pool is not None: self._tcp_pool.clear() + self._tcp_pool = None def _stop_multi_pools(self): if self._multiget_pool: self._multiget_pool.stop() + self._multiget_pool = None if self._multiput_pool: self._multiput_pool.stop() + self._multiput_pool = None def _create_node(self, n): if isinstance(n, RiakNode): diff --git a/riak/client/transport.py b/riak/client/transport.py index bb2aaef9..5ba83731 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -150,6 +150,9 @@ def _choose_pool(self, protocol=None): pool = self._tcp_pool else: raise ValueError("invalid protocol %s" % protocol) + if pool is None: + # NB: GH-500, this can happen if client is closed + raise RuntimeError("Client is closed.") return pool diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index 97bbbed7..fa506c29 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -120,6 +120,12 @@ def test_timeout_validation(self): 'val4', timeout=bad): pass + def test_close_stops_operation_requests(self): + c = self.create_client() + c.ping() + c.close() + self.assertRaises(RuntimeError, c.ping) + def test_multiget_bucket(self): """ Multiget operations can be invoked on buckets. diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index e404eb05..527296be 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -306,31 +306,31 @@ def test_query_that_returns_table_description(self): query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 7)) + self.validate_len(ts_obj, (5, 8)) def test_query_that_returns_table_description_using_interpolation(self): query = 'Describe {table}' ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 7)) + self.validate_len(ts_obj, (5, 8)) def test_query_description_via_table(self): query = 'describe {table}' table = Table(self.client, table_name) ts_obj = table.query(query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 7)) + self.validate_len(ts_obj, (5, 8)) def test_get_description(self): ts_obj = self.client.ts_describe(table_name) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 7)) + self.validate_len(ts_obj, (5, 8)) def test_get_description_via_table(self): table = Table(self.client, table_name) ts_obj = table.describe() self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 7)) + self.validate_len(ts_obj, (5, 8)) def test_query_that_returns_no_data(self): fmt = """ diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 3ac37c57..8f9a990c 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -184,7 +184,7 @@ def test_query_that_returns_table_description(self): query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 7)) + self.validate_len(ts_obj, (5, 8)) def test_store_and_fetch_gh_483(self): now = datetime.datetime(2015, 1, 1, 12, 0, 0) From 9c7806f262bd1d354879107b95108e6f5b1e391c Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 7 Nov 2016 13:06:54 -0800 Subject: [PATCH 267/324] check _closed --- riak/client/transport.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/client/transport.py b/riak/client/transport.py index 5ba83731..92dd3eff 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -150,7 +150,7 @@ def _choose_pool(self, protocol=None): pool = self._tcp_pool else: raise ValueError("invalid protocol %s" % protocol) - if pool is None: + if pool is None or self._closed: # NB: GH-500, this can happen if client is closed raise RuntimeError("Client is closed.") return pool From 943058ce8ab742167c9a08e8c54fe31f7d204124 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 7 Nov 2016 15:10:53 -0800 Subject: [PATCH 268/324] Workaround for stats version that starts with riak_ts- --- Makefile | 4 ++++ riak/tests/__init__.py | 3 +++ riak/tests/test_timeseries_pbuf.py | 10 +++++----- riak/tests/test_timeseries_ttb.py | 2 +- riak/transports/http/transport.py | 7 ++++++- 5 files changed, 19 insertions(+), 7 deletions(-) diff --git a/Makefile b/Makefile index 1630a76e..c5e46a05 100644 --- a/Makefile +++ b/Makefile @@ -92,6 +92,10 @@ integration-test: security-test: @./.runner security-test +.PHONY: timeseries-test +timeseries-test: + @./.runner timeseries-test + .PHONY: test test: integration-test diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 237892da..62318fba 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -62,6 +62,9 @@ def hostname_resolves(hostname): RUN_TIMESERIES = int(os.environ.get('RUN_TIMESERIES', '0')) RUN_YZ = int(os.environ.get('RUN_YZ', '0')) +if PROTOCOL != 'pbc': + RUN_TIMESERIES = 0 + RUN_SECURITY = int(os.environ.get('RUN_SECURITY', '0')) if RUN_SECURITY: h = 'riak-test' diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 527296be..d069686a 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -306,31 +306,31 @@ def test_query_that_returns_table_description(self): query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 8)) + self.validate_len(ts_obj, (5, 7, 8)) def test_query_that_returns_table_description_using_interpolation(self): query = 'Describe {table}' ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 8)) + self.validate_len(ts_obj, (5, 7, 8)) def test_query_description_via_table(self): query = 'describe {table}' table = Table(self.client, table_name) ts_obj = table.query(query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 8)) + self.validate_len(ts_obj, (5, 7, 8)) def test_get_description(self): ts_obj = self.client.ts_describe(table_name) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 8)) + self.validate_len(ts_obj, (5, 7, 8)) def test_get_description_via_table(self): table = Table(self.client, table_name) ts_obj = table.describe() self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 8)) + self.validate_len(ts_obj, (5, 7, 8)) def test_query_that_returns_no_data(self): fmt = """ diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 8f9a990c..5616282a 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -184,7 +184,7 @@ def test_query_that_returns_table_description(self): query = fmt.format(table=table_name) ts_obj = self.client.ts_query(table_name, query) self.assertIsNotNone(ts_obj) - self.validate_len(ts_obj, (5, 8)) + self.validate_len(ts_obj, (5, 7, 8)) def test_store_and_fetch_gh_483(self): now = datetime.datetime(2015, 1, 1, 12, 0, 0) diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index 73d96149..61eb7eec 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -73,7 +73,12 @@ def stats(self): def _server_version(self): stats = self.stats() if stats is not None: - return stats['riak_kv_version'] + import re + s = stats['riak_kv_version'] + if s.startswith('riak_ts-'): + return stats['riak_pb_version'] + else: + return s # If stats is disabled, we can't assume the Riak version # is >= 1.1. However, we can assume the new URL scheme is # at least version 1.0 From 71c68acc7f3afe4b0911474f5370e9f23e4e7c86 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 7 Nov 2016 15:51:22 -0800 Subject: [PATCH 269/324] lint error --- riak/transports/http/transport.py | 1 - 1 file changed, 1 deletion(-) diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index 61eb7eec..1599d368 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -73,7 +73,6 @@ def stats(self): def _server_version(self): stats = self.stats() if stats is not None: - import re s = stats['riak_kv_version'] if s.startswith('riak_ts-'): return stats['riak_pb_version'] From b74f131a0a7fb444e1c3538cf8cd71f43dce8cfb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 8 Nov 2016 16:31:42 -0800 Subject: [PATCH 270/324] update tests for 2.2.0 --- riak/tests/test_yokozuna.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index a4f325f1..a065cb3f 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -155,7 +155,8 @@ def test_yz_search_queries(self): wait_for_yz_index(bucket, "H") # multiterm results = bucket.search("username_s:(F OR H)") - self.assertEqual(2, len(results['docs'])) + l = len(results['docs']) + self.assertTrue(l == 2 or l == 3) # boolean results = bucket.search("username_s:Z AND name_s:ryan") self.assertEqual(1, len(results['docs'])) @@ -170,7 +171,8 @@ def test_yz_search_queries(self): self.assertEqual(2, len(results['docs'])) # regexp results = bucket.search('name_s:/br.*/') - self.assertEqual(2, len(results['docs'])) + l = len(results['docs']) + self.assertTrue(l == 2 or l == 3) # Parameters: # limit results = bucket.search('username_s:*', rows=2) From 6a2a250a7240c2586140996d3969392a18ba4bdf Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 10:43:05 -0800 Subject: [PATCH 271/324] 2.6.0 release notes --- RELNOTES.md | 84 +++++++++++++++++++++++++++-------------------------- 1 file changed, 43 insertions(+), 41 deletions(-) diff --git a/RELNOTES.md b/RELNOTES.md index 796142d0..726219e5 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,52 +1,54 @@ # Riak Python Client Release Notes -## [2.5.5 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.5) +## [`2.6.0` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.6.0) + +## [`2.5.5` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.5) * [Stop all pools when client shuts down](https://github.com/basho/riak-python-client/pull/488) * [Calling `close` on client closes pools, remove global multi pools](https://github.com/basho/riak-python-client/pull/490). *NOTE*: if you use the multi get or put features of the client, you *MUST* call `close()` on your `RiakClient` instance to correctly clean up the thread pools used for these multi-operations. -## [2.5.4 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.4) +## [`2.5.4` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.4) * [When converting `datetime` objects to send to Riak TS, `tzinfo` will be used if present](https://github.com/basho/riak-python-client/pull/486) * [Workaround for incorrect version returned by Riak TS OSS](https://github.com/basho/riak-python-client/pull/472) -## [2.5.3 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.3) +## [`2.5.3` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.3) * [Bug fix for raising `BadResource`](https://github.com/basho/riak-python-client/pull/481) -## [2.5.2 Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.2) +## [`2.5.2` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.2) * *NOTE*: for Riak TS data, automatic conversion from epoch values *to* Python `datetime` objects has been removed. If you would like to have automatic conversion, use `RiakClient(transport_options={'ts_convert_timestamp': True})` * Miscellaneous fixes for term-to-binary encoding of messages for Riak TS. * [Ensure `six` is not required during installation](https://github.com/basho/riak-python-client/pull/459) -## [2.5.0 Release - Deprecated](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) +## [`2.5.0` Release - Deprecated](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.0) -* *NOTE*: due to the `basho-erlastic` dependency, this version will not install correctly. Please use `2.5.2`. +* *NOTE*: due to the `basho-erlastic` dependency, this version will not install correctly. Please use ``2.5.2``. * *NOTE*: for Riak TS data, automatic conversion from epoch values *to* Python `datetime` objects has been removed. If you would like to have automatic conversion, use `RiakClient(transport_options={'ts_convert_timestamp': True})` * [Socket Enhancements](https://github.com/basho/riak-python-client/pull/453) - Resolves [#399](https://github.com/basho/riak-python-client/issues/399) * [Add multi-put](https://github.com/basho/riak-python-client/pull/452) -* [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) *Note:* This requires at least version `1.3.0` of Riak TS. +* [Add support for term-to-binary encoding](https://github.com/basho/riak-python-client/pull/448) *Note:* This requires at least version ``1.3.0`` of Riak TS. -## 2.4.2 Patch Release - 2016-02-20 +## `2.4.2` Patch Release - 2016-02-20 * [Fix SSL host name](https://github.com/basho/riak-python-client/pull/436) * [Use `riak-client-tools`](https://github.com/basho/riak-python-client/issues/434) -## 2.4.1 Patch Release - 2016-02-03 +## `2.4.1` Patch Release - 2016-02-03 * [Riak TS: Millisecond precision](https://github.com/basho/riak-python-client/issues/430) * [Fix release process](https://github.com/basho/riak-python-client/issues/429) -## 2.4.0 Feature Release - 2016-01-13 +## `2.4.0` Feature Release - 2016-01-13 This release enhances Riak Time Series functionality. * [Encapsulate table description](https://github.com/basho/riak-python-client/pull/422) -## 2.3.0 Feature Release - 2015-12-14 +## `2.3.0` Feature Release - 2015-12-14 -Release 2.3.0 features support for new +Release `2.3.0` features support for new [time series](https://github.com/basho/riak-python-client/pull/416) functionality. @@ -59,15 +61,15 @@ There are also many bugfixes and new enhancements: (https://github.com/basho/riak-python-client/pull/418) * [Support for Preflists and Write-Once bucket types] (https://github.com/basho/riak-python-client/pull/414) -* [Support Riak 2.1.1] +* [Support Riak `2.1.1`] (https://github.com/basho/riak-python-client/pull/407) -* [Native SSL support for Python 2.7.9+] +* [Native SSL support for Python `2.7.9`+] (https://github.com/basho/riak-python-client/pull/397) -## 2.2.0 Feature Release - 2014-12-18 +## `2.2.0` Feature Release - 2014-12-18 -Release 2.2.0 features support for +Release `2.2.0` features support for [Python 3](https://github.com/basho/riak-python-client/pull/379), specifically 3.3 and 3.4. This version uses the native SSL security instead of [pyOpenSSL](http://pypi.python.org/pypi/pyOpenSSL) which is required @@ -92,9 +94,9 @@ notably: (https://github.com/basho/riak-python-client/pull/388) -## 2.1.0 Feature Release - 2014-09-03 +## `2.1.0` Feature Release - 2014-09-03 -Release 2.1.0 features support for Riak 2.0 capabilities including: +Release `2.1.0` features support for Riak 2.0 capabilities including: * Bucket Types * Riak Data Types (CRDTs) @@ -119,9 +121,9 @@ notably: * The additional request options `basic_quorum` and `notfound_ok` are now supported. -## 2.0.3 Patch Release - 2014-03-06 +## `2.0.3` Patch Release - 2014-03-06 -Release 2.0.3 includes support for 1.4.4's 2I regexp feature and fixes +Release `2.0.3` includes support for 1.4.4's 2I regexp feature and fixes a few bugs: * Docs generation now uses the version from the top-level package. @@ -129,17 +131,17 @@ a few bugs: * More errors will be caught and propagated properly from multiget requests, preventing deadlocks on the caller side. -## 2.0.2 Patch release - 2013-11-18 +## `2.0.2` Patch release - 2013-11-18 -Release 2.0.2 includes support for the 1.4.1+ "timeout" option on +Release `2.0.2` includes support for the 1.4.1+ "timeout" option on secondary index queries. -## 2.0.1 Patch release - 2013-08-28 +## `2.0.1` Patch release - 2013-08-28 -Release 2.0.1 includes a minor compatibility fix for Python 2.6 and an +Release `2.0.1` includes a minor compatibility fix for Python 2.6 and an updated README. -## 2.0.0 Feature Release - 2013-07-30 +## `2.0.0` Feature Release - 2013-07-30 Release 2.0 is the culmination of many months of rearchitecting the client. Highlights: @@ -182,9 +184,9 @@ Other bugfixes: * Enabling and disabling search indexing on a bucket now uses the `search` bucket property. -## 1.5.2 Patch Release - 2013-01-31 +## `1.5.2` Patch Release - 2013-01-31 -Release 1.5.2 fixes some bugs and adds HTTPS/SSL support. +Release `1.5.2` fixes some bugs and adds HTTPS/SSL support. * Added support for HTTPS. * Fixed writing of the `app.config` for the `TestServer`. @@ -195,24 +197,24 @@ Release 1.5.2 fixes some bugs and adds HTTPS/SSL support. * Prevent fetching the `protobuf` package from Google Code. * Prefer `simplejson` over `json` when present. -## 1.5.1 Patch Release - 2012-10-24 +## `1.5.1` Patch Release - 2012-10-24 -Release 1.5.1 fixes one bug and some documentation errors. +Release `1.5.1` fixes one bug and some documentation errors. * Fix bug where `http_status` is used instead of `http_code`. * Fix documentation of `RiakMapReduce.index` method. * Fix documentation of `RiakClient.__init__` method. -## 1.5.0 Feature Release - 2012-08-29 +## `1.5.0` Feature Release - 2012-08-29 -Release 1.5.0 is a feature release that supports Riak 1.2. +Release `1.5.0` is a feature release that supports Riak 1.2. Noteworthy features: * Riak 1.2 features are now supported, including Search and 2I queries over Protocol Buffers transport. The Protocol Buffers message definitions now exist as a separate package, available on - [PyPi](http://pypi.python.org/pypi/riak_pb/1.2.0). + [PyPi](http://pypi.python.org/pypi/riak_pb/`1.2.0`). **NOTE:** The return value of search queries over HTTP and MapReduce were changed to be compatible with the results returned from the @@ -231,7 +233,7 @@ Noteworthy bugfixes: * Various fixes were made to the TestServer and it will throw an exception when it fails to start. -## 1.4.1 Patch Release - 2012-06-19 +## `1.4.1` Patch Release - 2012-06-19 Noteworthy features: @@ -241,16 +243,16 @@ Noteworthy bugfixes: * Map Reduce queries now use "application/json" as the Content-Type -## 1.4.0 Feature Release - 2012-03-30 +## `1.4.0` Feature Release - 2012-03-30 -Release 1.4.0 is a feature release comprising over 117 individual +Release `1.4.0` is a feature release comprising over 117 individual commits. Noteworthy features: * Python 2.6 and 2.7 are supported. On 2.6, the unittest2 package is required to run the test suite. -* Google's official protobuf package (2.4.1 or later) is now a +* Google's official protobuf package (`2.4.1` or later) is now a dependency. The package from downloads.basho.com/support is no longer necessary. * Travis-CI is enabled on the client. Go to @@ -284,11 +286,11 @@ Noteworthy bugfixes: be handled properly when no results are returned. There are lots of other great fixes from our wonderful -community. [Check them out!](https://github.com/basho/riak-python-client/compare/1.3.0...1.4.0) +community. [Check them out!](https://github.com/basho/riak-python-client/compare/`1.3.0`...1.4.0) -## 1.3.0 Feature Release - 2011-08-04 +## `1.3.0` Feature Release - 2011-08-04 -Release 1.3.0 is a feature release bringing a slew of updates. +Release `1.3.0` is a feature release bringing a slew of updates. Noteworthy features: @@ -314,9 +316,9 @@ Fixes: pool. (Reid Draper) * #42: Reset protocol buffer connection up on connection error (Brett Hoerner) -## 1.2.2 Patch Release - 2011-06-22 +## `1.2.2` Patch Release - 2011-06-22 -Release 1.2.2 is a minor patch release. +Release `1.2.2` is a minor patch release. Noteworthy fixes and improvements: From a0970adb5057b40862671ca5071ae759c5f26bec Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 10:58:41 -0800 Subject: [PATCH 272/324] fix linter errors --- commands.py | 1 + riak/codecs/pbuf.py | 1 + riak/tests/__init__.py | 1 + riak/tests/pool-grinder.py | 1 + riak/tests/test_comparison.py | 1 + riak/tests/test_feature_detection.py | 1 + riak/tests/test_pool.py | 1 + riak/tests/test_yokozuna.py | 1 + riak/transports/tcp/__init__.py | 1 + riak/tz.py | 1 + 10 files changed, 10 insertions(+) diff --git a/commands.py b/commands.py index ac55b4a8..3d1e13c0 100644 --- a/commands.py +++ b/commands.py @@ -65,6 +65,7 @@ def check_output(*popenargs, **kwargs): raise CalledProcessError(retcode, cmd, output=output) return output + try: import simplejson as json except ImportError: diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 2b364403..0bd792e5 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -26,6 +26,7 @@ def _invert(d): out[value] = key return out + REPL_TO_PY = { riak.pb.riak_pb2.RpbBucketProps.FALSE: False, riak.pb.riak_pb2.RpbBucketProps.TRUE: True, diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 62318fba..65aef57a 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -29,6 +29,7 @@ def hostname_resolves(hostname): except socket.error: return 0 + distutils_debug = os.environ.get('DISTUTILS_DEBUG', '0') if distutils_debug == '1': logger = logging.getLogger() diff --git a/riak/tests/pool-grinder.py b/riak/tests/pool-grinder.py index 6bf0f2d4..4e39ead3 100755 --- a/riak/tests/pool-grinder.py +++ b/riak/tests/pool-grinder.py @@ -88,6 +88,7 @@ def _run(): else: return True + ret = True count = 0 while ret: diff --git a/riak/tests/test_comparison.py b/riak/tests/test_comparison.py index 86fb9f8b..b73d4bf5 100644 --- a/riak/tests/test_comparison.py +++ b/riak/tests/test_comparison.py @@ -163,5 +163,6 @@ def test_client_hash(self): b.close() c.close() + if __name__ == '__main__': unittest.main() diff --git a/riak/tests/test_feature_detection.py b/riak/tests/test_feature_detection.py index 894c8b17..b0ac63a6 100644 --- a/riak/tests/test_feature_detection.py +++ b/riak/tests/test_feature_detection.py @@ -194,5 +194,6 @@ def test_21(self): self.assertTrue(t.preflists()) self.assertTrue(t.write_once()) + if __name__ == '__main__': unittest.main() diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index eb6e0d54..3825c23c 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -349,5 +349,6 @@ def _run(): for th in threads: th.join() + if __name__ == '__main__': unittest.main() diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index a065cb3f..5d1adf53 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -19,6 +19,7 @@ def wait_for_yz_index(bucket, key, index=None): while len(bucket.search('_yz_rk:' + key, index=index)['docs']) == 0: pass + # YZ index on bucket of the same name testrun_yz = {'btype': None, 'bucket': 'yzbucket', 'index': 'yzbucket'} # YZ index on bucket of a different name diff --git a/riak/transports/tcp/__init__.py b/riak/transports/tcp/__init__.py index 2634af0a..eddeefe1 100644 --- a/riak/transports/tcp/__init__.py +++ b/riak/transports/tcp/__init__.py @@ -23,6 +23,7 @@ def create_resource(self): def destroy_resource(self, tcp): tcp.close() + # These are a specific set of socket errors # that could be raised on send/recv that indicate # that the socket is closed or reset, and is not diff --git a/riak/tz.py b/riak/tz.py index 30544b9f..b20054ee 100644 --- a/riak/tz.py +++ b/riak/tz.py @@ -15,4 +15,5 @@ def tzname(self, dt): def dst(self, dt): return ZERO + utc = UTC() From d3b43e62014246239299cf0e7a3601efe37620eb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 11:29:20 -0800 Subject: [PATCH 273/324] remove comments --- Makefile | 2 -- 1 file changed, 2 deletions(-) diff --git a/Makefile b/Makefile index c5e46a05..f380952b 100644 --- a/Makefile +++ b/Makefile @@ -57,8 +57,6 @@ ifeq ($(RELEASE_GPG_KEYNAME),) endif @python -c 'import pypandoc' @echo "==> Python tagging version $(VERSION)" - # NB: Python client version strings do NOT start with 'v'. Le Sigh. - # validate VERSION and allow pre-releases @./build/publish $(VERSION) validate @git tag --sign -a "$(VERSION)" -m "riak-python-client $(VERSION)" --local-user "$(RELEASE_GPG_KEYNAME)" @git push --tags From cf965ab045d7bd9906d9f9c03fc4c1b0157b5384 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 11:54:36 -0800 Subject: [PATCH 274/324] Allow setting PYPI_REPOSITORY for testing purposes --- Makefile | 26 ++++++++++++++------------ 1 file changed, 14 insertions(+), 12 deletions(-) diff --git a/Makefile b/Makefile index f380952b..b3d4825d 100644 --- a/Makefile +++ b/Makefile @@ -19,6 +19,8 @@ PROJDIR = $(realpath $(CURDIR)) TOOLS_DIR = $(PROJDIR)/tools/devrel CA_DIR = $(PROJDIR)/tools/test-ca +PYPI_REPOSITORY ?= pypi + .PHONY: lint lint: ./.runner lint @@ -58,10 +60,9 @@ endif @python -c 'import pypandoc' @echo "==> Python tagging version $(VERSION)" @./build/publish $(VERSION) validate - @git tag --sign -a "$(VERSION)" -m "riak-python-client $(VERSION)" --local-user "$(RELEASE_GPG_KEYNAME)" - @git push --tags - @echo "==> Python (sdist release)" - @python setup.py sdist upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + @echo "==> pypi repository: $(PYPI_REPOSITORY)" + @echo "==> Python (sdist)" + @python setup.py sdist upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @./build/publish $(VERSION) .PHONY: release @@ -69,14 +70,15 @@ release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif - @echo "==> Python 2.7 (release)" - @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) - @echo "==> Python 3.3 (release)" - @python3.3 setup.py build --build-base=py-build/3.3 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) - @echo "==> Python 3.4 (release)" - @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) - @echo "==> Python 3.5 (release)" - @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + @echo "==> pypi repository: $(PYPI_REPOSITORY)" + @echo "==> Python 2.7 (bdist_egg)" + @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + @echo "==> Python 3.3 (bdist_egg)" + @python3.3 setup.py build --build-base=py-build/3.3 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + @echo "==> Python 3.4 (bdist_egg)" + @python3.4 setup.py build --build-base=py-build/3.4 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + @echo "==> Python 3.5 (bdist_egg)" + @python3.5 setup.py build --build-base=py-build/3.5 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) .PHONY: unit-test unit-test: From 5ba2dd5f0a25bedc8719afade8275032832e7683 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 12:13:18 -0800 Subject: [PATCH 275/324] restore git tagging --- Makefile | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Makefile b/Makefile index b3d4825d..4ef5e92c 100644 --- a/Makefile +++ b/Makefile @@ -16,14 +16,12 @@ PANDOC_VERSION := $(shell pandoc --version) PROTOC_VERSION := $(shell protoc --version) PROJDIR = $(realpath $(CURDIR)) -TOOLS_DIR = $(PROJDIR)/tools/devrel -CA_DIR = $(PROJDIR)/tools/test-ca PYPI_REPOSITORY ?= pypi .PHONY: lint lint: - ./.runner lint + $(PROJDIR)/.runner lint .PHONY: pb_clean pb_clean: @@ -59,14 +57,16 @@ ifeq ($(RELEASE_GPG_KEYNAME),) endif @python -c 'import pypandoc' @echo "==> Python tagging version $(VERSION)" - @./build/publish $(VERSION) validate + @$(PROJDIR)/build/publish $(VERSION) validate + @git tag --sign -a "$(VERSION)" -m "riak-python-client $(VERSION)" --local-user "$(RELEASE_GPG_KEYNAME)" + @git push --tags @echo "==> pypi repository: $(PYPI_REPOSITORY)" @echo "==> Python (sdist)" @python setup.py sdist upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) - @./build/publish $(VERSION) + @$(PROJDIR)/build/publish $(VERSION) .PHONY: release -release: release_sdist +release: # release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif @@ -82,19 +82,19 @@ endif .PHONY: unit-test unit-test: - @./.runner unit-test + @$(PROJDIR)/runner unit-test .PHONY: integration-test integration-test: - @./.runner integration-test + @$(PROJDIR)/runner integration-test .PHONY: security-test security-test: - @./.runner security-test + @$(PROJDIR)/runner security-test .PHONY: timeseries-test timeseries-test: - @./.runner timeseries-test + @$(PROJDIR)/runner timeseries-test .PHONY: test test: integration-test From d4a24760e7830569dc64edd91e685ede5934fd07 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 12:31:00 -0800 Subject: [PATCH 276/324] Remove commented out target --- Makefile | 2 +- build/pyenv-setup | 124 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 125 insertions(+), 1 deletion(-) create mode 100755 build/pyenv-setup diff --git a/Makefile b/Makefile index 4ef5e92c..e5c80093 100644 --- a/Makefile +++ b/Makefile @@ -66,7 +66,7 @@ endif @$(PROJDIR)/build/publish $(VERSION) .PHONY: release -release: # release_sdist +release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif diff --git a/build/pyenv-setup b/build/pyenv-setup new file mode 100755 index 00000000..102a66a4 --- /dev/null +++ b/build/pyenv-setup @@ -0,0 +1,124 @@ +#!/usr/bin/env bash + +unset PYENV_VERSION + +if [[ ! -d $PYENV_ROOT ]] +then + export PYENV_ROOT="$HOME/.pyenv" +fi + +declare -r PROJDIR="$PWD" +if [[ ! -s $PROJDIR/riak/__init__.py ]] +then + echo "[ERROR] script must be run from the clone of github.com/basho/riak-python-client" 1>&2 + exit 1 +fi + +rm -f $PROJDIR/.python-version + +# Install pyenv if it's missing +if [[ ! -d $PYENV_ROOT ]] +then + git clone 'https://github.com/yyuu/pyenv.git' $PYENV_ROOT +else + (cd $PYENV_ROOT && git fetch --all) +fi + +(cd $PYENV_ROOT && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) + +declare -r pyenv_virtualenv_dir="$PYENV_ROOT/plugins/pyenv-virtualenv" +if [[ ! -d $pyenv_virtualenv_dir ]] +then + git clone 'https://github.com/yyuu/pyenv-virtualenv.git' $pyenv_virtualenv_dir +else + (cd $pyenv_virtualenv_dir && git fetch --all) +fi + +(cd $pyenv_virtualenv_dir && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) + +declare -r pyenv_alias_dir="$PYENV_ROOT/plugins/pyenv-alias" +if [[ ! -d $pyenv_alias_dir ]] +then + git clone 'https://github.com/s1341/pyenv-alias.git' $pyenv_alias_dir +else + (cd $pyenv_alias_dir && git pull origin master) +fi + +# Add pyenv root to PATH +# and initialize pyenv +if [[ $PATH != */.pyenv* ]] +then + echo "[INFO] adding $PYENV_ROOT/bin to PATH" + export PATH="$PYENV_ROOT/bin:$PATH" +fi + +if [[ $(type -t pyenv) != 'function' ]] +then + echo "[INFO] init pyenv" + eval "$(pyenv init -)" + eval "$(pyenv virtualenv-init -)" +fi + +do_pip_upgrades='false' + +# NB: 2.7.8 is special-cased +for pyver in 2.7 3.3 3.4 3.5 +do + riak_py_alias="riak_$pyver" + if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "$riak_py_alias" + then + # Need to install it + do_pip_upgrades='true' + + declare -i pymaj="${pyver%.*}" + declare -i pymin="${pyver#*.}" + pyver_latest="$(pyenv install --list | grep -E "^[[:space:]]+$pymaj\\.$pymin\\.[[:digit:]]+\$" | tail -n1 | sed -e 's/[[:space:]]//g')" + + echo "[INFO] installing Python $pyver_latest" + VERSION_ALIAS="$riak_py_alias" pyenv install "$pyver_latest" + pyenv virtualenv "$riak_py_alias" "riak-py$pymaj$pymin" + fi +done + +if ! pyenv versions | fgrep -q 'riak_2.7.8' +then + # Need to install it + do_pip_upgrades='true' + + echo "[INFO] installing Python 2.7.8" + VERSION_ALIAS='riak_2.7.8' pyenv install '2.7.8' + pyenv virtualenv 'riak_2.7.8' 'riak-py278' +fi + +pushd $PROJDIR +pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278 + +pyenv rehash + +if [[ $do_pip_upgrades == 'true' ]] +then + for PY in $(pyenv versions --bare --skip-aliases | grep '^riak_') + do + echo "[INFO] $PY - upgrading pip / setuptools" + PYENV_VERSION="$PY" pip install --upgrade pip setuptools + done +fi + +python_version="$(python --version)" +if [[ $python_version == Python\ 3* ]] +then + pip install --ignore-installed tox + if ! pip show --quiet tox + then + echo "[ERROR] install of 'tox' failed" 1>&2 + popd + exit 1 + fi + pyenv rehash +else + echo "[ERROR] expected Python 3 to be 'python' at this point" 1>&2 + popd + exit 1 +fi + +popd From f3d21622ed72875516b5a078b76a1693f9487624 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 12:47:47 -0800 Subject: [PATCH 277/324] remove virtualenv from pyenv setup --- Makefile | 4 ++-- build/pyenv-setup | 15 +-------------- 2 files changed, 3 insertions(+), 16 deletions(-) diff --git a/Makefile b/Makefile index e5c80093..08b4885a 100644 --- a/Makefile +++ b/Makefile @@ -66,13 +66,13 @@ endif @$(PROJDIR)/build/publish $(VERSION) .PHONY: release -release: release_sdist +release: # release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif @echo "==> pypi repository: $(PYPI_REPOSITORY)" @echo "==> Python 2.7 (bdist_egg)" - @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + # @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.3 (bdist_egg)" @python3.3 setup.py build --build-base=py-build/3.3 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.4 (bdist_egg)" diff --git a/build/pyenv-setup b/build/pyenv-setup index 102a66a4..84657872 100755 --- a/build/pyenv-setup +++ b/build/pyenv-setup @@ -26,16 +26,6 @@ fi (cd $PYENV_ROOT && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) -declare -r pyenv_virtualenv_dir="$PYENV_ROOT/plugins/pyenv-virtualenv" -if [[ ! -d $pyenv_virtualenv_dir ]] -then - git clone 'https://github.com/yyuu/pyenv-virtualenv.git' $pyenv_virtualenv_dir -else - (cd $pyenv_virtualenv_dir && git fetch --all) -fi - -(cd $pyenv_virtualenv_dir && git checkout $(git describe --tags $(git rev-list --tags --max-count=1))) - declare -r pyenv_alias_dir="$PYENV_ROOT/plugins/pyenv-alias" if [[ ! -d $pyenv_alias_dir ]] then @@ -56,7 +46,6 @@ if [[ $(type -t pyenv) != 'function' ]] then echo "[INFO] init pyenv" eval "$(pyenv init -)" - eval "$(pyenv virtualenv-init -)" fi do_pip_upgrades='false' @@ -76,7 +65,6 @@ do echo "[INFO] installing Python $pyver_latest" VERSION_ALIAS="$riak_py_alias" pyenv install "$pyver_latest" - pyenv virtualenv "$riak_py_alias" "riak-py$pymaj$pymin" fi done @@ -87,11 +75,10 @@ then echo "[INFO] installing Python 2.7.8" VERSION_ALIAS='riak_2.7.8' pyenv install '2.7.8' - pyenv virtualenv 'riak_2.7.8' 'riak-py278' fi pushd $PROJDIR -pyenv local riak-py35 riak-py34 riak-py33 riak-py27 riak-py278 +pyenv local 'riak_3.5' 'riak_3.4' 'riak_3.3' 'riak_2.7' 'riak_2.7.8' pyenv rehash From 95f9dd1598f40ebda510b611782b5e8760351dab Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 12:48:48 -0800 Subject: [PATCH 278/324] restore line --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 08b4885a..4ef5e92c 100644 --- a/Makefile +++ b/Makefile @@ -72,7 +72,7 @@ ifeq ($(RELEASE_GPG_KEYNAME),) endif @echo "==> pypi repository: $(PYPI_REPOSITORY)" @echo "==> Python 2.7 (bdist_egg)" - # @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) + @python2.7 setup.py build --build-base=py-build/2.7 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.3 (bdist_egg)" @python3.3 setup.py build --build-base=py-build/3.3 bdist_egg upload --repository $(PYPI_REPOSITORY) --show-response --sign --identity $(RELEASE_GPG_KEYNAME) @echo "==> Python 3.4 (bdist_egg)" From d4747316347a6c274a54dbe325d3fec2787beb12 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 12:49:40 -0800 Subject: [PATCH 279/324] restore line --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 4ef5e92c..e5c80093 100644 --- a/Makefile +++ b/Makefile @@ -66,7 +66,7 @@ endif @$(PROJDIR)/build/publish $(VERSION) .PHONY: release -release: # release_sdist +release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) endif From 4b2e350cdf9b66727b1039daec7bc22a137ae360 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 17 Nov 2016 13:00:49 -0800 Subject: [PATCH 280/324] 2.6.1 release notes, check for .python-version --- Makefile | 6 ++++++ RELNOTES.md | 4 ++++ 2 files changed, 10 insertions(+) diff --git a/Makefile b/Makefile index e5c80093..6b97d1dd 100644 --- a/Makefile +++ b/Makefile @@ -54,6 +54,9 @@ ifeq ($(PANDOC_VERSION),) endif ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) +endif +ifeq ("$(wildcard $(PROJDIR)/.python-version)","") + $(error expected $(PROJDIR)/.python-version to exist. Run $(PROJDIR)/build/pyenv-setup) endif @python -c 'import pypandoc' @echo "==> Python tagging version $(VERSION)" @@ -69,6 +72,9 @@ endif release: release_sdist ifeq ($(RELEASE_GPG_KEYNAME),) $(error RELEASE_GPG_KEYNAME must be set to build a release and deploy this package) +endif +ifeq ("$(wildcard $(PROJDIR)/.python-version)","") + $(error expected $(PROJDIR)/.python-version to exist. Run $(PROJDIR)/build/pyenv-setup) endif @echo "==> pypi repository: $(PYPI_REPOSITORY)" @echo "==> Python 2.7 (bdist_egg)" diff --git a/RELNOTES.md b/RELNOTES.md index 726219e5..9d21f8dd 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,6 +1,10 @@ # Riak Python Client Release Notes +## [`2.6.1` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.6.0) + * NOTE: Due to pypi upload errors, `2.6.1` takes the place of `2.6.0`. + ## [`2.6.0` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.6.0) + * NOTE: Due to pypi upload errors, `2.6.1` takes the place of `2.6.0`. ## [`2.5.5` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.5.5) From e874a141dbc5d9561d3ad46fa5d0feef80c91f36 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 19 Nov 2016 09:00:18 -0800 Subject: [PATCH 281/324] Fix Makefile --- Makefile | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Makefile b/Makefile index 6b97d1dd..735bacfe 100644 --- a/Makefile +++ b/Makefile @@ -88,19 +88,19 @@ endif .PHONY: unit-test unit-test: - @$(PROJDIR)/runner unit-test + @$(PROJDIR)/.runner unit-test .PHONY: integration-test integration-test: - @$(PROJDIR)/runner integration-test + @$(PROJDIR)/.runner integration-test .PHONY: security-test security-test: - @$(PROJDIR)/runner security-test + @$(PROJDIR)/.runner security-test .PHONY: timeseries-test timeseries-test: - @$(PROJDIR)/runner timeseries-test + @$(PROJDIR)/.runner timeseries-test .PHONY: test test: integration-test From 9e549324fac30ac75ed2ae27d42d92989e73a9ab Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sat, 19 Nov 2016 18:48:57 -0800 Subject: [PATCH 282/324] do at least one-retry --- docs/advanced.rst | 13 ++++++++++--- riak/client/transport.py | 24 +++++++++++++++++------- riak/exceptions.py | 16 ++++++++++++++++ riak/tests/test_pool.py | 3 ++- riak/transports/pool.py | 14 +++----------- riak/transports/tcp/__init__.py | 5 ++++- riak/transports/tcp/connection.py | 8 +++++--- 7 files changed, 57 insertions(+), 26 deletions(-) create mode 100644 riak/exceptions.py diff --git a/docs/advanced.rst b/docs/advanced.rst index d77d2bb3..2ddfb8a5 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -10,16 +10,23 @@ understanding how it works and development purposes. Connection pool --------------- -.. currentmodule:: riak.transports.pool - -.. autoexception:: BadResource .. autoclass:: Resource :members: + .. autoclass:: Pool :members: .. autoclass:: PoolIterator +--------------- +Exceptions +--------------- + +.. currentmodule:: riak.exceptions + +.. autoexception:: BadResource +.. autoexception:: ConnectionClosed + ----------- Retry logic ----------- diff --git a/riak/client/transport.py b/riak/client/transport.py index 92dd3eff..0f17eb5d 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from riak.transports.pool import BadResource +from riak.exceptions import BadResource, ConnectionClosed from riak.transports.tcp import is_retryable as is_tcp_retryable from riak.transports.http import is_retryable as is_http_retryable import threading @@ -112,22 +112,31 @@ def _with_retries(self, pool, fn): def _skip_bad_nodes(transport): return transport._node not in skip_nodes - retry_count = self.retries - - for retry in range(retry_count): + retry_count = self.retries - 1 + first_try = True + current_try = 0 + while True: try: with pool.transaction(_filter=_skip_bad_nodes) as transport: try: return fn(transport) - except (IOError, HTTPException) as e: + except (IOError, HTTPException, ConnectionClosed) as e: if _is_retryable(e): transport._node.error_rate.incr(1) skip_nodes.append(transport._node) - raise BadResource(e) + if first_try: + first_try = False + continue + else: + raise BadResource(e) else: raise + # NB: no exceptions if made it here + break except BadResource as e: - if retry < (retry_count - 1): + first_try = False + if current_try < retry_count: + current_try += 1 continue else: # Re-raise the inner exception @@ -168,6 +177,7 @@ def _is_retryable(error): return is_tcp_retryable(error) or is_http_retryable(error) +# http://thecodeship.com/patterns/guide-to-python-function-decorators/ def retryable(fn, protocol=None): """ Wraps a client operation that can be retried according to the set diff --git a/riak/exceptions.py b/riak/exceptions.py new file mode 100644 index 00000000..fb657a6d --- /dev/null +++ b/riak/exceptions.py @@ -0,0 +1,16 @@ +class BadResource(Exception): + """ + Users of a :class:`Pool` should raise this error when the pool + resource currently in-use is bad and should be removed from the + pool. + """ + pass + + +class ConnectionClosed(Exception): + """ + Users of a :class:`Pool` should raise this error when the pool + resource currently in-use has been closed and should be removed + from the pool. + """ + pass diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index 3825c23c..bc2496e5 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -7,9 +7,10 @@ from time import sleep from riak import RiakError +from riak.exceptions import BadResource from riak.tests import RUN_POOL from riak.tests.comparison import Comparison -from riak.transports.pool import Pool, BadResource +from riak.transports.pool import Pool if PY2: from Queue import Queue diff --git a/riak/transports/pool.py b/riak/transports/pool.py index d0a9ee7f..06bc3308 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -2,19 +2,10 @@ import threading +from riak.exceptions import BadResource from contextlib import contextmanager -# This file is a rough port of the Innertube Ruby library -class BadResource(Exception): - """ - Users of a :class:`Pool` should raise this error when the pool - resource currently in-use is bad and should be removed from the - pool. - """ - pass - - class Resource(object): """ A member of the :class:`Pool`, a container for the actual resource @@ -60,7 +51,8 @@ class Pool(object): Example:: - from riak.Pool import Pool, BadResource + from riak.Pool import Pool + from riak.exceptions import BadResource class ListPool(Pool): def create_resource(self): return [] diff --git a/riak/transports/tcp/__init__.py b/riak/transports/tcp/__init__.py index eddeefe1..c60bdd34 100644 --- a/riak/transports/tcp/__init__.py +++ b/riak/transports/tcp/__init__.py @@ -1,6 +1,7 @@ import errno import socket +from riak.exceptions import ConnectionClosed from riak.transports.pool import Pool from riak.transports.tcp.transport import TcpTransport @@ -48,7 +49,9 @@ def is_retryable(err): :rtype: boolean """ - if isinstance(err, socket.error): + if isinstance(err, ConnectionClosed): + return True + elif isinstance(err, socket.error): code = err.args[0] return code in CONN_CLOSED_ERRORS else: diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index da2eef19..86f18918 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -8,7 +8,7 @@ from riak import RiakError from riak.codecs.pbuf import PbufCodec from riak.security import SecurityError, USE_STDLIB_SSL -from riak.transports.pool import BadResource +from riak.exceptions import BadResource, ConnectionClosed if USE_STDLIB_SSL: import ssl @@ -206,8 +206,10 @@ def _recv(self, msglen): # https://docs.python.org/2/howto/sockets.html#using-a-socket # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: - ex = RiakError('recv_into returned zero bytes unexpectedly') - raise BadResource(ex) + ex = RiakError( + 'recv_into returned zero bytes unexpectedly, ' + 'expected {}'.format(toread)) + raise ConnectionClosed(ex) view = view[nbytes:] # slicing views is cheap toread -= nbytes nread += nbytes From 4d440d84f8fec4fe54276fd4a1ab409ec217b840 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Sun, 20 Nov 2016 14:52:14 -0800 Subject: [PATCH 283/324] add lots of debugging --- riak/client/transport.py | 9 ++++++--- riak/transports/pool.py | 8 ++++++++ riak/transports/tcp/connection.py | 8 +++++--- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/riak/client/transport.py b/riak/client/transport.py index 0f17eb5d..68a6807b 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -2,8 +2,11 @@ from riak.exceptions import BadResource, ConnectionClosed from riak.transports.tcp import is_retryable as is_tcp_retryable from riak.transports.http import is_retryable as is_http_retryable -import threading from six import PY2 + +import threading +import sys + if PY2: from httplib import HTTPException else: @@ -121,6 +124,7 @@ def _skip_bad_nodes(transport): try: return fn(transport) except (IOError, HTTPException, ConnectionClosed) as e: + # TODO FIXME delete resource if _is_retryable(e): transport._node.error_rate.incr(1) skip_nodes.append(transport._node) @@ -131,9 +135,8 @@ def _skip_bad_nodes(transport): raise BadResource(e) else: raise - # NB: no exceptions if made it here - break except BadResource as e: + # TODO FIXME delete resource first_try = False if current_try < retry_count: current_try += 1 diff --git a/riak/transports/pool.py b/riak/transports/pool.py index 06bc3308..f92851ae 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -1,6 +1,7 @@ from __future__ import print_function import threading +import sys from riak.exceptions import BadResource from contextlib import contextmanager @@ -34,6 +35,7 @@ def release(self): """ Releases this resource back to the pool it came from. """ + sys.stderr.write('Resource.release: {}\n'.format(self)) self.pool.release(self) @@ -111,6 +113,7 @@ def _filter(obj): resource = Resource(default, self) else: resource = Resource(self.create_resource(), self) + sys.stderr.write('created resource: {}\n'.format(resource)) self.resources.append(resource) resource.claimed = True return resource @@ -126,6 +129,7 @@ def release(self, resource): :param resource: Resource """ with self.releaser: + sys.stderr.write('Pool.release: {}\n'.format(self)) resource.claimed = False self.releaser.notify_all() @@ -148,10 +152,13 @@ def transaction(self, _filter=None, default=None): resource = self.acquire(_filter=_filter, default=default) try: yield resource.object + sys.stderr.write('Pool.transaction after yield statement: {}\n'.format(resource.object)) except BadResource: + sys.stderr.write('Pool.transaction handling BadResource\n') self.delete_resource(resource) raise finally: + sys.stderr.write('Pool.transaction finally statement\n') self.release(resource) def delete_resource(self, resource): @@ -163,6 +170,7 @@ def delete_resource(self, resource): :param resource: the resource to remove :type resource: Resource """ + sys.stderr.write('deleting resource: {}\n'.format(resource)) with self.lock: self.resources.remove(resource) self.destroy_resource(resource.object) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 86f18918..547c4843 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,6 +1,7 @@ import logging import socket import struct +import sys import riak.pb.riak_pb2 import riak.pb.messages @@ -164,6 +165,7 @@ def _recv_msg(self): # it might still receive the data later and mix up with a # subsequent request. # https://github.com/basho/riak-python-client/issues/425 + sys.stderr.write('socket recv timed out reading first four bytes\n') raise BadResource(e) mv = memoryview(msgbuf) mcb = mv[0:1] @@ -206,9 +208,9 @@ def _recv(self, msglen): # https://docs.python.org/2/howto/sockets.html#using-a-socket # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: - ex = RiakError( - 'recv_into returned zero bytes unexpectedly, ' - 'expected {}'.format(toread)) + msg = 'socket recv returned zero bytes unexpectedly, expected {}'.format(toread) + sys.stderr.write(msg) + ex = RiakError(msg) raise ConnectionClosed(ex) view = view[nbytes:] # slicing views is cheap toread -= nbytes From b19872940966ae4989b2df166e4cdfebfc824bb7 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 21 Nov 2016 15:37:13 -0800 Subject: [PATCH 284/324] make lint happy --- riak/client/transport.py | 10 ++++++---- riak/transports/pool.py | 25 +++++++++++++++++++------ riak/transports/tcp/connection.py | 7 +++++-- 3 files changed, 30 insertions(+), 12 deletions(-) diff --git a/riak/client/transport.py b/riak/client/transport.py index 68a6807b..fd1bb877 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -5,7 +5,6 @@ from six import PY2 import threading -import sys if PY2: from httplib import HTTPException @@ -120,11 +119,14 @@ def _skip_bad_nodes(transport): current_try = 0 while True: try: - with pool.transaction(_filter=_skip_bad_nodes) as transport: + with pool.transaction( + _filter=_skip_bad_nodes, + yield_resource=True) as resource: + transport = resource.object try: return fn(transport) except (IOError, HTTPException, ConnectionClosed) as e: - # TODO FIXME delete resource + resource.errored = True if _is_retryable(e): transport._node.error_rate.incr(1) skip_nodes.append(transport._node) @@ -136,9 +138,9 @@ def _skip_bad_nodes(transport): else: raise except BadResource as e: - # TODO FIXME delete resource first_try = False if current_try < retry_count: + resource.errored = True current_try += 1 continue else: diff --git a/riak/transports/pool.py b/riak/transports/pool.py index f92851ae..9713ba6b 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -22,14 +22,17 @@ def __init__(self, obj, pool): :type obj: object """ - self.object = obj """The wrapped pool resource.""" + self.object = obj - self.claimed = False """Whether the resource is currently in use.""" + self.claimed = False - self.pool = pool """The pool that this resource belongs to.""" + self.pool = pool + + """True if this Resource errored.""" + self.errored = False def release(self): """ @@ -134,7 +137,7 @@ def release(self, resource): self.releaser.notify_all() @contextmanager - def transaction(self, _filter=None, default=None): + def transaction(self, _filter=None, default=None, yield_resource=False): """ transaction(_filter=None, default=None) @@ -148,11 +151,21 @@ def transaction(self, _filter=None, default=None): :type _filter: callable :param default: a value that will be used instead of calling :meth:`create_resource` if a new resource needs to be created + :param yield_resource: set to True to yield the Resource object + itself + :type yield_resource: boolean """ resource = self.acquire(_filter=_filter, default=default) try: - yield resource.object - sys.stderr.write('Pool.transaction after yield statement: {}\n'.format(resource.object)) + if yield_resource: + yield resource + else: + yield resource.object + sys.stderr.write( + 'Pool.transaction after yield statement: {}\n' + .format(resource.object)) + if resource.errored: + self.delete_resource(resource) except BadResource: sys.stderr.write('Pool.transaction handling BadResource\n') self.delete_resource(resource) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 547c4843..93f96b51 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -165,7 +165,9 @@ def _recv_msg(self): # it might still receive the data later and mix up with a # subsequent request. # https://github.com/basho/riak-python-client/issues/425 - sys.stderr.write('socket recv timed out reading first four bytes\n') + sys.stderr.write( + 'socket recv timed out ' + 'reading first four bytes\n') raise BadResource(e) mv = memoryview(msgbuf) mcb = mv[0:1] @@ -208,7 +210,8 @@ def _recv(self, msglen): # https://docs.python.org/2/howto/sockets.html#using-a-socket # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: - msg = 'socket recv returned zero bytes unexpectedly, expected {}'.format(toread) + msg = 'socket recv returned ' + 'zero bytes unexpectedly, expected {}'.format(toread) sys.stderr.write(msg) ex = RiakError(msg) raise ConnectionClosed(ex) From 107a3e2329c4231f128f9b4c3994ddbc678c2f26 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 21 Nov 2016 15:59:12 -0800 Subject: [PATCH 285/324] clean up resources when retries in effect --- riak/transports/pool.py | 4 ++-- riak/transports/tcp/connection.py | 7 +++---- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/riak/transports/pool.py b/riak/transports/pool.py index 9713ba6b..1385f206 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -132,7 +132,7 @@ def release(self, resource): :param resource: Resource """ with self.releaser: - sys.stderr.write('Pool.release: {}\n'.format(self)) + sys.stderr.write('Pool.release: {}\n'.format(resource)) resource.claimed = False self.releaser.notify_all() @@ -163,7 +163,7 @@ def transaction(self, _filter=None, default=None, yield_resource=False): yield resource.object sys.stderr.write( 'Pool.transaction after yield statement: {}\n' - .format(resource.object)) + .format(resource)) if resource.errored: self.delete_resource(resource) except BadResource: diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 93f96b51..2f8c49a3 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -166,8 +166,7 @@ def _recv_msg(self): # subsequent request. # https://github.com/basho/riak-python-client/issues/425 sys.stderr.write( - 'socket recv timed out ' - 'reading first four bytes\n') + 'socket recv timed out reading first four bytes\n') raise BadResource(e) mv = memoryview(msgbuf) mcb = mv[0:1] @@ -210,8 +209,8 @@ def _recv(self, msglen): # https://docs.python.org/2/howto/sockets.html#using-a-socket # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: - msg = 'socket recv returned ' - 'zero bytes unexpectedly, expected {}'.format(toread) + msg = 'socket recv returned zero bytes unexpectedly, ' \ + 'expected {}'.format(toread) sys.stderr.write(msg) ex = RiakError(msg) raise ConnectionClosed(ex) From 666d5dc958772d334e9d37ce4840b3f75a8e5978 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 22 Nov 2016 08:27:05 -0800 Subject: [PATCH 286/324] remove some debugging --- riak/exceptions.py | 2 +- riak/transports/pool.py | 10 ---------- riak/transports/tcp/connection.py | 8 ++------ riak/transports/tcp/stream.py | 2 +- riak/transports/tcp/transport.py | 8 +++++++- 5 files changed, 11 insertions(+), 19 deletions(-) diff --git a/riak/exceptions.py b/riak/exceptions.py index fb657a6d..5646d7f7 100644 --- a/riak/exceptions.py +++ b/riak/exceptions.py @@ -7,7 +7,7 @@ class BadResource(Exception): pass -class ConnectionClosed(Exception): +class ConnectionClosed(BadResource): """ Users of a :class:`Pool` should raise this error when the pool resource currently in-use has been closed and should be removed diff --git a/riak/transports/pool.py b/riak/transports/pool.py index 1385f206..a35e65e8 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -1,7 +1,6 @@ from __future__ import print_function import threading -import sys from riak.exceptions import BadResource from contextlib import contextmanager @@ -38,7 +37,6 @@ def release(self): """ Releases this resource back to the pool it came from. """ - sys.stderr.write('Resource.release: {}\n'.format(self)) self.pool.release(self) @@ -116,7 +114,6 @@ def _filter(obj): resource = Resource(default, self) else: resource = Resource(self.create_resource(), self) - sys.stderr.write('created resource: {}\n'.format(resource)) self.resources.append(resource) resource.claimed = True return resource @@ -132,7 +129,6 @@ def release(self, resource): :param resource: Resource """ with self.releaser: - sys.stderr.write('Pool.release: {}\n'.format(resource)) resource.claimed = False self.releaser.notify_all() @@ -161,17 +157,12 @@ def transaction(self, _filter=None, default=None, yield_resource=False): yield resource else: yield resource.object - sys.stderr.write( - 'Pool.transaction after yield statement: {}\n' - .format(resource)) if resource.errored: self.delete_resource(resource) except BadResource: - sys.stderr.write('Pool.transaction handling BadResource\n') self.delete_resource(resource) raise finally: - sys.stderr.write('Pool.transaction finally statement\n') self.release(resource) def delete_resource(self, resource): @@ -183,7 +174,6 @@ def delete_resource(self, resource): :param resource: the resource to remove :type resource: Resource """ - sys.stderr.write('deleting resource: {}\n'.format(resource)) with self.lock: self.resources.remove(resource) self.destroy_resource(resource.object) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 2f8c49a3..393b93a2 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,7 +1,6 @@ import logging import socket import struct -import sys import riak.pb.riak_pb2 import riak.pb.messages @@ -157,7 +156,7 @@ def _ssl_handshake(self): # fail if *any* exceptions are thrown during SSL handshake raise SecurityError(e) - def _recv_msg(self): + def _recv_msg(self, stream=False): try: msgbuf = self._recv_pkt() except socket.timeout as e: @@ -165,8 +164,6 @@ def _recv_msg(self): # it might still receive the data later and mix up with a # subsequent request. # https://github.com/basho/riak-python-client/issues/425 - sys.stderr.write( - 'socket recv timed out reading first four bytes\n') raise BadResource(e) mv = memoryview(msgbuf) mcb = mv[0:1] @@ -210,8 +207,7 @@ def _recv(self, msglen): # https://github.com/basho/riak-python-client/issues/399 if nbytes == 0: msg = 'socket recv returned zero bytes unexpectedly, ' \ - 'expected {}'.format(toread) - sys.stderr.write(msg) + 'expected {}'.format(toread) ex = RiakError(msg) raise ConnectionClosed(ex) view = view[nbytes:] # slicing views is cheap diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index 1e913bda..dbf6e869 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -30,7 +30,7 @@ def next(self): raise StopIteration try: - resp_code, data = self.transport._recv_msg() + resp_code, data = self.transport._recv_msg(stream=True) self.codec.maybe_riak_error(resp_code, data) expect = self._expect self.codec.maybe_incorrect_code(resp_code, expect) diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 99415093..9bb45751 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -1,4 +1,5 @@ import six +import sys import riak.pb.messages @@ -6,6 +7,7 @@ from riak.codecs import Codec, Msg from riak.codecs.pbuf import PbufCodec from riak.codecs.ttb import TtbCodec +from riak.exceptions import BadResource from riak.pb.messages import MSG_CODE_TS_TTB_MSG from riak.transports.transport import Transport from riak.ts_object import TsObject @@ -96,7 +98,9 @@ def ping(self): msg_code = riak.pb.messages.MSG_CODE_PING_REQ codec = self._get_codec(msg_code) msg = codec.encode_ping() + sys.stderr.write('sending ping request\n') resp_code, _ = self._request(msg, codec) + sys.stderr.write('ping response: {}\n'.format(resp_code)) if resp_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: @@ -553,5 +557,7 @@ def _request(self, msg, codec=None): resp_code in riak.pb.messages.MESSAGE_CLASSES: msg = codec.parse_msg(resp_code, data) else: - raise Exception("unknown msg code %s" % resp_code) + # NB: raise a BadResource to ensure this connection is + # closed and not re-used + raise BadResource('unknown msg code {}'.format(resp_code)) return resp_code, msg From ee97b562e1d067700d5908ddced59bfe030b9ef4 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 22 Nov 2016 10:15:16 -0800 Subject: [PATCH 287/324] Make some streaming operations "first time" re-tryable --- docs/advanced.rst | 8 +--- riak/client/operations.py | 61 +++++++++++++------------------ riak/client/transport.py | 28 +++++++++++++- riak/exceptions.py | 16 -------- riak/tests/test_pool.py | 3 +- riak/transports/pool.py | 37 ++++++++++++++++--- riak/transports/tcp/__init__.py | 9 +++-- riak/transports/tcp/connection.py | 18 +++++++-- riak/transports/tcp/stream.py | 6 ++- riak/transports/tcp/transport.py | 5 +-- 10 files changed, 113 insertions(+), 78 deletions(-) delete mode 100644 riak/exceptions.py diff --git a/docs/advanced.rst b/docs/advanced.rst index 2ddfb8a5..fe355f88 100644 --- a/docs/advanced.rst +++ b/docs/advanced.rst @@ -10,6 +10,8 @@ understanding how it works and development purposes. Connection pool --------------- +.. currentmodule:: riak.transports.pool + .. autoclass:: Resource :members: @@ -18,12 +20,6 @@ Connection pool .. autoclass:: PoolIterator ---------------- -Exceptions ---------------- - -.. currentmodule:: riak.exceptions - .. autoexception:: BadResource .. autoexception:: ConnectionClosed diff --git a/riak/client/operations.py b/riak/client/operations.py index c143cfbc..1174193f 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -87,24 +87,21 @@ def stream_buckets(self, bucket_type=None, timeout=None): """ _validate_timeout(timeout) + if bucket_type: bucketfn = self._bucket_type_bucket_builder else: bucketfn = self._default_type_bucket_builder - resource = self._acquire() - transport = resource.object - stream = transport.stream_buckets(bucket_type=bucket_type, - timeout=timeout) - stream.attach(resource) - try: - for bucket_list in stream: - bucket_list = [bucketfn(bytes_to_str(name), bucket_type) - for name in bucket_list] - if len(bucket_list) > 0: - yield bucket_list - finally: - stream.close() + def make_op(transport): + return transport.stream_buckets( + bucket_type=bucket_type, timeout=timeout) + + for bucket_list in self._stream_with_retry(make_op): + bucket_list = [bucketfn(bytes_to_str(name), bucket_type) + for name in bucket_list] + if len(bucket_list) > 0: + yield bucket_list @retryable def ping(self, transport): @@ -489,19 +486,16 @@ def stream_keys(self, bucket, timeout=None): :rtype: iterator """ _validate_timeout(timeout) - resource = self._acquire() - transport = resource.object - stream = transport.stream_keys(bucket, timeout=timeout) - stream.attach(resource) - try: - for keylist in stream: - if len(keylist) > 0: - if six.PY2: - yield keylist - else: - yield [bytes_to_str(item) for item in keylist] - finally: - stream.close() + + def make_op(transport): + return transport.stream_keys(bucket, timeout=timeout) + + for keylist in self._stream_with_retry(make_op): + if len(keylist) > 0: + if six.PY2: + yield keylist + else: + yield [bytes_to_str(item) for item in keylist] @retryable def put(self, transport, robj, w=None, dw=None, pw=None, return_body=None, @@ -799,15 +793,12 @@ def stream_mapred(self, inputs, query, timeout): :rtype: iterator """ _validate_timeout(timeout) - resource = self._acquire() - transport = resource.object - stream = transport.stream_mapred(inputs, query, timeout) - stream.attach(resource) - try: - for phase, data in stream: - yield phase, data - finally: - stream.close() + + def make_op(transport): + return transport.stream_mapred(inputs, query, timeout) + + for phase, data in self._stream_with_retry(make_op): + yield phase, data @retryable def create_search_index(self, transport, index, schema=None, n_val=None, diff --git a/riak/client/transport.py b/riak/client/transport.py index fd1bb877..7ee81e0f 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -1,5 +1,5 @@ from contextlib import contextmanager -from riak.exceptions import BadResource, ConnectionClosed +from riak.transports.pool import BadResource, ConnectionClosed from riak.transports.tcp import is_retryable as is_tcp_retryable from riak.transports.http import is_retryable as is_http_retryable from six import PY2 @@ -86,7 +86,8 @@ def _transport(self): _transport() Yields a single transport to the caller from the default pool, - without retries. + without retries. NB: no need to re-try as this method is only + used by CRDT operations that should never be re-tried. """ pool = self._choose_pool() with pool.transaction() as transport: @@ -100,6 +101,29 @@ def _acquire(self): """ return self._choose_pool().acquire() + def _stream_with_retry(self, make_op): + first_try = True + while True: + resource = self._acquire() + transport = resource.object + streaming_op = make_op(transport) + streaming_op.attach(resource) + try: + for item in streaming_op: + yield item + break + except BadResource as e: + resource.errored = True + # NB: *only* re-try if connection closed happened + # at the start of the streaming op + if first_try and not e.mid_stream: + first_try = False + continue + else: + raise + finally: + streaming_op.close() + def _with_retries(self, pool, fn): """ Performs the passed function with retries against the given pool. diff --git a/riak/exceptions.py b/riak/exceptions.py deleted file mode 100644 index 5646d7f7..00000000 --- a/riak/exceptions.py +++ /dev/null @@ -1,16 +0,0 @@ -class BadResource(Exception): - """ - Users of a :class:`Pool` should raise this error when the pool - resource currently in-use is bad and should be removed from the - pool. - """ - pass - - -class ConnectionClosed(BadResource): - """ - Users of a :class:`Pool` should raise this error when the pool - resource currently in-use has been closed and should be removed - from the pool. - """ - pass diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index bc2496e5..3825c23c 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -7,10 +7,9 @@ from time import sleep from riak import RiakError -from riak.exceptions import BadResource from riak.tests import RUN_POOL from riak.tests.comparison import Comparison -from riak.transports.pool import Pool +from riak.transports.pool import Pool, BadResource if PY2: from Queue import Queue diff --git a/riak/transports/pool.py b/riak/transports/pool.py index a35e65e8..63b9aa2d 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -2,10 +2,36 @@ import threading -from riak.exceptions import BadResource from contextlib import contextmanager +class BadResource(Exception): + """ + Users of a :class:`Pool` should raise this error when the pool + resource currently in-use is bad and should be removed from the + pool. + + :param mid_stream: did this exception happen mid-streaming op? + :type mid_stream: boolean + """ + def __init__(self, ex, mid_stream=False): + super(BadResource, self).__init__(ex) + self.mid_stream = mid_stream + + +class ConnectionClosed(BadResource): + """ + Users of a :class:`Pool` should raise this error when the pool + resource currently in-use has been closed and should be removed + from the pool. + + :param mid_stream: did this exception happen mid-streaming op? + :type mid_stream: boolean + """ + def __init__(self, ex, mid_stream=False): + super(ConnectionClosed, self).__init__(ex, mid_stream) + + class Resource(object): """ A member of the :class:`Pool`, a container for the actual resource @@ -37,7 +63,10 @@ def release(self): """ Releases this resource back to the pool it came from. """ - self.pool.release(self) + if self.errored: + self.pool.delete_resource(self) + else: + self.pool.release(self) class Pool(object): @@ -54,8 +83,7 @@ class Pool(object): Example:: - from riak.Pool import Pool - from riak.exceptions import BadResource + from riak.transports.pool import Pool class ListPool(Pool): def create_resource(self): return [] @@ -69,7 +97,6 @@ def destroy_resource(self): resource.append(1) with pool.transaction() as resource2: print(repr(resource2)) # should be [1] - """ def __init__(self): diff --git a/riak/transports/tcp/__init__.py b/riak/transports/tcp/__init__.py index c60bdd34..32b0dbee 100644 --- a/riak/transports/tcp/__init__.py +++ b/riak/transports/tcp/__init__.py @@ -1,8 +1,7 @@ import errno import socket -from riak.exceptions import ConnectionClosed -from riak.transports.pool import Pool +from riak.transports.pool import Pool, ConnectionClosed from riak.transports.tcp.transport import TcpTransport @@ -50,7 +49,11 @@ def is_retryable(err): :rtype: boolean """ if isinstance(err, ConnectionClosed): - return True + # NB: only retryable if we're not mid-streaming + if err.mid_stream: + return False + else: + return True elif isinstance(err, socket.error): code = err.args[0] return code in CONN_CLOSED_ERRORS diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 393b93a2..75193e19 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -8,7 +8,7 @@ from riak import RiakError from riak.codecs.pbuf import PbufCodec from riak.security import SecurityError, USE_STDLIB_SSL -from riak.exceptions import BadResource, ConnectionClosed +from riak.transports.pool import BadResource, ConnectionClosed if USE_STDLIB_SSL: import ssl @@ -52,7 +52,10 @@ def _non_connect_send_msg(self, msg_code, data): Similar to self._send, but doesn't try to initiate a connection, thus preventing an infinite loop. """ - self._socket.sendall(self._encode_msg(msg_code, data)) + try: + self._socket.sendall(self._encode_msg(msg_code, data)) + except BrokenPipeError as e: + raise ConnectionClosed(e) def _send_msg(self, msg_code, data): self._connect() @@ -156,15 +159,22 @@ def _ssl_handshake(self): # fail if *any* exceptions are thrown during SSL handshake raise SecurityError(e) - def _recv_msg(self, stream=False): + def _recv_msg(self, mid_stream=False): + """ + :param mid_stream: are we receiving in a streaming operation? + :type mid_stream: boolean + """ try: msgbuf = self._recv_pkt() + except BadResource as e: + e.mid_stream = mid_stream + raise except socket.timeout as e: # A timeout can leave the socket in an inconsistent state because # it might still receive the data later and mix up with a # subsequent request. # https://github.com/basho/riak-python-client/issues/425 - raise BadResource(e) + raise BadResource(e, mid_stream) mv = memoryview(msgbuf) mcb = mv[0:1] if self.bytes_required: diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index dbf6e869..c73778bf 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -21,6 +21,7 @@ def __init__(self, transport, codec): self.transport = transport self.codec = codec self.resource = None + self._mid_stream = False def __iter__(self): return self @@ -30,7 +31,8 @@ def next(self): raise StopIteration try: - resp_code, data = self.transport._recv_msg(stream=True) + resp_code, data = self.transport._recv_msg( + mid_stream=self._mid_stream) self.codec.maybe_riak_error(resp_code, data) expect = self._expect self.codec.maybe_incorrect_code(resp_code, expect) @@ -38,6 +40,8 @@ def next(self): except: self.finished = True raise + finally: + self._mid_stream = True if self._is_done(resp): self.finished = True diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 9bb45751..d8173402 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -1,5 +1,4 @@ import six -import sys import riak.pb.messages @@ -7,8 +6,8 @@ from riak.codecs import Codec, Msg from riak.codecs.pbuf import PbufCodec from riak.codecs.ttb import TtbCodec -from riak.exceptions import BadResource from riak.pb.messages import MSG_CODE_TS_TTB_MSG +from riak.transports.pool import BadResource from riak.transports.transport import Transport from riak.ts_object import TsObject @@ -98,9 +97,7 @@ def ping(self): msg_code = riak.pb.messages.MSG_CODE_PING_REQ codec = self._get_codec(msg_code) msg = codec.encode_ping() - sys.stderr.write('sending ping request\n') resp_code, _ = self._request(msg, codec) - sys.stderr.write('ping response: {}\n'.format(resp_code)) if resp_code == riak.pb.messages.MSG_CODE_PING_RESP: return True else: From 2e075050b7fc28f9822fb3dc71a2d35e342f96b5 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 22 Nov 2016 10:20:00 -0800 Subject: [PATCH 288/324] better to set first_try in a finally: --- riak/client/transport.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/riak/client/transport.py b/riak/client/transport.py index 7ee81e0f..02094278 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -117,11 +117,11 @@ def _stream_with_retry(self, make_op): # NB: *only* re-try if connection closed happened # at the start of the streaming op if first_try and not e.mid_stream: - first_try = False continue else: raise finally: + first_try = False streaming_op.close() def _with_retries(self, pool, fn): @@ -155,14 +155,12 @@ def _skip_bad_nodes(transport): transport._node.error_rate.incr(1) skip_nodes.append(transport._node) if first_try: - first_try = False continue else: raise BadResource(e) else: raise except BadResource as e: - first_try = False if current_try < retry_count: resource.errored = True current_try += 1 @@ -170,6 +168,8 @@ def _skip_bad_nodes(transport): else: # Re-raise the inner exception raise e.args[0] + finally: + first_try = False def _choose_pool(self, protocol=None): """ From 1502977092e82b1e8ccbf184a6bda2cfe8363397 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 23 Nov 2016 09:30:49 -0800 Subject: [PATCH 289/324] update tools to 1.3.0 and Riak to 2.2.0 --- .travis.yml | 4 ++-- tools | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 29b72d67..4421cfbe 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,8 +19,8 @@ env: matrix: - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb - - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.1/2.1.4/ubuntu/trusty/riak_2.1.4-1_amd64.deb - - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.1/2.1.4/ubuntu/trusty/riak_2.1.4-1_amd64.deb + - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.2/2.2.0/ubuntu/trusty/riak_2.2.0-1_amd64.deb + - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.2/2.2.0/ubuntu/trusty/riak_2.2.0-1_amd64.deb global: - RIAK_TEST_PB_PORT=8087 - RIAK_TEST_HTTP_PORT=8098 diff --git a/tools b/tools index bf0c48ce..72939314 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit bf0c48cef336c5c88968cd4c30660e4e177afa88 +Subproject commit 72939314ab3151db776fcc01c92c26f6ee3dc499 From c1510071447baf947abe636e0a385efd5a0a4ce4 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 23 Nov 2016 10:55:06 -0800 Subject: [PATCH 290/324] Make execption handler Python 2 compatible --- riak/transports/tcp/connection.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 75193e19..04e0c3c4 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,3 +1,4 @@ +import errno import logging import socket import struct @@ -54,8 +55,11 @@ def _non_connect_send_msg(self, msg_code, data): """ try: self._socket.sendall(self._encode_msg(msg_code, data)) - except BrokenPipeError as e: - raise ConnectionClosed(e) + except (IOError, socket.error) as e: + if e.errno == errno.EPIPE: + raise ConnectionClosed(e) + else: + raise def _send_msg(self, msg_code, data): self._connect() From 6cdafc680f83e6d823e7569521beca087e61b539 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 23 Nov 2016 11:34:58 -0800 Subject: [PATCH 291/324] add TODO FUTURE notes --- riak/client/operations.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/riak/client/operations.py b/riak/client/operations.py index 1174193f..87c7f0b9 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -263,6 +263,8 @@ def stream_index(self, bucket, index, startkey, endkey=None, :rtype: :class:`~riak.client.index_page.IndexPage` """ + # TODO FUTURE: implement "retry on connection closed" + # as in stream_mapred _validate_timeout(timeout, infinity_ok=True) page = IndexPage(self, bucket, index, startkey, endkey, @@ -335,6 +337,8 @@ def paginate_stream_index(self, bucket, index, startkey, endkey=None, :class:`~riak.client.index_page.IndexPage` """ + # TODO FUTURE: implement "retry on connection closed" + # as in stream_mapred page = self.stream_index(bucket, index, startkey, endkey=endkey, max_results=max_results, From 2098840b3a9c96b76265c7fa3fb9c79eab24e06d Mon Sep 17 00:00:00 2001 From: Tin Tvrtkovic Date: Thu, 24 Nov 2016 13:49:44 +0100 Subject: [PATCH 292/324] Python3 fix for TCP socket options. --- riak/transports/tcp/connection.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index da2eef19..74787653 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -2,6 +2,8 @@ import socket import struct +import six + import riak.pb.riak_pb2 import riak.pb.messages @@ -225,7 +227,7 @@ def _connect(self): self._socket = socket.create_connection(self._address) if self._socket_tcp_options: ka_opts = self._socket_tcp_options - for k, v in ka_opts.iteritems(): + for k, v in six.iteritems(ka_opts): self._socket.setsockopt(socket.SOL_TCP, k, v) if self._socket_keepalive: self._socket.setsockopt( From 770c8474f8b693d5947540807416bce393adb6ca Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 24 Nov 2016 08:45:55 -0800 Subject: [PATCH 293/324] use six.iteritems in another place --- riak/codecs/http.py | 8 ++++---- riak/transports/tcp/connection.py | 2 -- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/riak/codecs/http.py b/riak/codecs/http.py index 5b74158a..84fed2a7 100644 --- a/riak/codecs/http.py +++ b/riak/codecs/http.py @@ -1,7 +1,7 @@ import re import csv +import six -from six import PY2, PY3 from cgi import parse_header from email import message_from_string from email.utils import parsedate_tz, mktime_tz @@ -13,7 +13,7 @@ from riak.transports.http.search import XMLSearchResult from riak.util import decode_index_value, bytes_to_str -if PY2: +if six.PY2: from urllib import unquote_plus else: from urllib.parse import unquote_plus @@ -63,7 +63,7 @@ def _parse_body(self, robj, response, expected_statuses): elif status == 300: ctype, params = parse_header(headers['content-type']) if ctype == 'multipart/mixed': - if PY3: + if six.PY3: data = bytes_to_str(data) boundary = re.compile('\r?\n--%s(?:--)?\r?\n' % re.escape(params['boundary'])) @@ -225,7 +225,7 @@ def _normalize_json_search_response(self, json): # Riak Search 1.0 Legacy assumptions about format resdoc[u'id'] = doc[u'id'] if u'fields' in doc: - for k, v in doc[u'fields'].iteritems(): + for k, v in six.iteritems(doc[u'fields']): resdoc[k] = v docs.append(resdoc) result['docs'] = docs diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 74787653..47d8133e 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,9 +1,7 @@ import logging import socket import struct - import six - import riak.pb.riak_pb2 import riak.pb.messages From be8e21f4271325a6578d87b6a47cfabb27dc102d Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 24 Nov 2016 08:48:08 -0800 Subject: [PATCH 294/324] add contributor --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 3bd350bf..bd57bc81 100644 --- a/README.md +++ b/README.md @@ -142,6 +142,7 @@ Contributors * Soren Hansen * Sreejith Kesavan * Timothée Peignier +* [Tin Tvrtković](https://github.com/Tinche) * [Vitaly Shestovskiy](https://github.com/lamp0chka) * William Kral * [Yasser Souri](https://github.com/yassersouri) From 7885fb9936c8689f8c48784dd3ebdf4a0ff309f8 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 28 Nov 2016 14:47:52 -0800 Subject: [PATCH 295/324] Update tools/ to 1.3.0, update Riak to 2.2.0 --- .travis.yml | 4 ++-- tools | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 29b72d67..4421cfbe 100644 --- a/.travis.yml +++ b/.travis.yml @@ -19,8 +19,8 @@ env: matrix: - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.0/2.0.7/ubuntu/trusty/riak_2.0.7-1_amd64.deb - - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.1/2.1.4/ubuntu/trusty/riak_2.1.4-1_amd64.deb - - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.1/2.1.4/ubuntu/trusty/riak_2.1.4-1_amd64.deb + - RIAK_TEST_PROTOCOL=pbc RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.2/2.2.0/ubuntu/trusty/riak_2.2.0-1_amd64.deb + - RIAK_TEST_PROTOCOL=http RIAK_DOWNLOAD_URL=http://s3.amazonaws.com/downloads.basho.com/riak/2.2/2.2.0/ubuntu/trusty/riak_2.2.0-1_amd64.deb global: - RIAK_TEST_PB_PORT=8087 - RIAK_TEST_HTTP_PORT=8098 diff --git a/tools b/tools index bf0c48ce..72939314 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit bf0c48cef336c5c88968cd4c30660e4e177afa88 +Subproject commit 72939314ab3151db776fcc01c92c26f6ee3dc499 From ff9a371161aaaedc2fdc0589ae3fa0bd19aab009 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 29 Nov 2016 11:57:08 -0800 Subject: [PATCH 296/324] Move code into try block --- riak/client/transport.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/client/transport.py b/riak/client/transport.py index 02094278..f2a09610 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -106,9 +106,9 @@ def _stream_with_retry(self, make_op): while True: resource = self._acquire() transport = resource.object - streaming_op = make_op(transport) - streaming_op.attach(resource) try: + streaming_op = make_op(transport) + streaming_op.attach(resource) for item in streaming_op: yield item break From 144da4ecd5e6ce1ad421329eb87058f646fdb197 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Wed, 30 Nov 2016 09:09:57 -0800 Subject: [PATCH 297/324] streaming_op may not be initialized if make_op throws an exception --- riak/client/transport.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/client/transport.py b/riak/client/transport.py index f2a09610..4010f165 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -106,6 +106,7 @@ def _stream_with_retry(self, make_op): while True: resource = self._acquire() transport = resource.object + streaming_op = None try: streaming_op = make_op(transport) streaming_op.attach(resource) @@ -122,7 +123,8 @@ def _stream_with_retry(self, make_op): raise finally: first_try = False - streaming_op.close() + if streaming_op: + streaming_op.close() def _with_retries(self, pool, fn): """ From 21039a52e1482d4f6c92afb212c8f2d117e422cc Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 5 Dec 2016 18:40:27 -0800 Subject: [PATCH 298/324] Update riak_pb and add encode / decode tests for BLOB column --- riak/codecs/pbuf.py | 7 +- riak/pb/riak_dt_pb2.py | 109 ++++++++++++++++++++++------- riak/pb/riak_ts_pb2.py | 9 ++- riak/tests/test_timeseries_pbuf.py | 19 ++++- riak_pb | 2 +- tools | 2 +- 6 files changed, 116 insertions(+), 32 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 0bd792e5..04515f9f 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -824,6 +824,8 @@ def decode_timeseries_col_type(self, col_type): return 'timestamp' elif col_type == TsColumnType.Value('BOOLEAN'): return 'boolean' + elif col_type == TsColumnType.Value('BLOB'): + return 'blob' else: msg = 'could not decode column type: {}'.format(col_type) raise RiakError(msg) @@ -845,8 +847,9 @@ def decode_timeseries_row(self, tsrow, tscols=None, if tscols is not None: col = tscols[i] if cell.HasField('varchar_value'): - if col and col.type != TsColumnType.Value('VARCHAR'): - raise TypeError('expected VARCHAR column') + if col and not (col.type == TsColumnType.Value('VARCHAR') or + col.type == TsColumnType.Value('BLOB')): + raise TypeError('expected VARCHAR or BLOB column') else: row.append(cell.varchar_value) elif cell.HasField('sint64_value'): diff --git a/riak/pb/riak_dt_pb2.py b/riak/pb/riak_dt_pb2.py index 5a4a2f86..1b640499 100644 --- a/riak/pb/riak_dt_pb2.py +++ b/riak/pb/riak_dt_pb2.py @@ -14,7 +14,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_dt.proto', package='', - serialized_pb='\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"d\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x04 \x01(\x04\"\x90\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\"2\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\x12\x07\n\x03HLL\x10\x04\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\x15\n\x05HllOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"n\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\x12\x16\n\x06hll_op\x18\x04 \x01(\x0b\x32\x06.HllOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"\x87\x01\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x06 \x01(\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') + serialized_pb='\n\rriak_dt.proto\"\x85\x01\n\x08MapField\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12$\n\x04type\x18\x02 \x02(\x0e\x32\x16.MapField.MapFieldType\"E\n\x0cMapFieldType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x0c\n\x08REGISTER\x10\x03\x12\x08\n\x04\x46LAG\x10\x04\x12\x07\n\x03MAP\x10\x05\"\x98\x01\n\x08MapEntry\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x15\n\rcounter_value\x18\x02 \x01(\x12\x12\x11\n\tset_value\x18\x03 \x03(\x0c\x12\x16\n\x0eregister_value\x18\x04 \x01(\x0c\x12\x12\n\nflag_value\x18\x05 \x01(\x08\x12\x1c\n\tmap_value\x18\x06 \x03(\x0b\x32\t.MapEntry\"\xcf\x01\n\nDtFetchReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x02(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\t\n\x01r\x18\x04 \x01(\r\x12\n\n\x02pr\x18\x05 \x01(\r\x12\x14\n\x0c\x62\x61sic_quorum\x18\x06 \x01(\x08\x12\x13\n\x0bnotfound_ok\x18\x07 \x01(\x08\x12\x0f\n\x07timeout\x18\x08 \x01(\r\x12\x15\n\rsloppy_quorum\x18\t \x01(\x08\x12\r\n\x05n_val\x18\n \x01(\r\x12\x1d\n\x0finclude_context\x18\x0b \x01(\x08:\x04true\"x\n\x07\x44tValue\x12\x15\n\rcounter_value\x18\x01 \x01(\x12\x12\x11\n\tset_value\x18\x02 \x03(\x0c\x12\x1c\n\tmap_value\x18\x03 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x04 \x01(\x04\x12\x12\n\ngset_value\x18\x05 \x03(\x0c\"\x9a\x01\n\x0b\x44tFetchResp\x12\x0f\n\x07\x63ontext\x18\x01 \x01(\x0c\x12#\n\x04type\x18\x02 \x02(\x0e\x32\x15.DtFetchResp.DataType\x12\x17\n\x05value\x18\x03 \x01(\x0b\x32\x08.DtValue\"<\n\x08\x44\x61taType\x12\x0b\n\x07\x43OUNTER\x10\x01\x12\x07\n\x03SET\x10\x02\x12\x07\n\x03MAP\x10\x03\x12\x07\n\x03HLL\x10\x04\x12\x08\n\x04GSET\x10\x05\"\x1e\n\tCounterOp\x12\x11\n\tincrement\x18\x01 \x01(\x12\"&\n\x05SetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\x12\x0f\n\x07removes\x18\x02 \x03(\x0c\"\x16\n\x06GSetOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\"\x15\n\x05HllOp\x12\x0c\n\x04\x61\x64\x64s\x18\x01 \x03(\x0c\"\xd1\x01\n\tMapUpdate\x12\x18\n\x05\x66ield\x18\x01 \x02(\x0b\x32\t.MapField\x12\x1e\n\ncounter_op\x18\x02 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x03 \x01(\x0b\x32\x06.SetOp\x12\x13\n\x0bregister_op\x18\x04 \x01(\x0c\x12\"\n\x07\x66lag_op\x18\x05 \x01(\x0e\x32\x11.MapUpdate.FlagOp\x12\x16\n\x06map_op\x18\x06 \x01(\x0b\x32\x06.MapOp\"!\n\x06\x46lagOp\x12\n\n\x06\x45NABLE\x10\x01\x12\x0b\n\x07\x44ISABLE\x10\x02\"@\n\x05MapOp\x12\x1a\n\x07removes\x18\x01 \x03(\x0b\x32\t.MapField\x12\x1b\n\x07updates\x18\x02 \x03(\x0b\x32\n.MapUpdate\"\x88\x01\n\x04\x44tOp\x12\x1e\n\ncounter_op\x18\x01 \x01(\x0b\x32\n.CounterOp\x12\x16\n\x06set_op\x18\x02 \x01(\x0b\x32\x06.SetOp\x12\x16\n\x06map_op\x18\x03 \x01(\x0b\x32\x06.MapOp\x12\x16\n\x06hll_op\x18\x04 \x01(\x0b\x32\x06.HllOp\x12\x18\n\x07gset_op\x18\x05 \x01(\x0b\x32\x07.GSetOp\"\xf1\x01\n\x0b\x44tUpdateReq\x12\x0e\n\x06\x62ucket\x18\x01 \x02(\x0c\x12\x0b\n\x03key\x18\x02 \x01(\x0c\x12\x0c\n\x04type\x18\x03 \x02(\x0c\x12\x0f\n\x07\x63ontext\x18\x04 \x01(\x0c\x12\x11\n\x02op\x18\x05 \x02(\x0b\x32\x05.DtOp\x12\t\n\x01w\x18\x06 \x01(\r\x12\n\n\x02\x64w\x18\x07 \x01(\r\x12\n\n\x02pw\x18\x08 \x01(\r\x12\x1a\n\x0breturn_body\x18\t \x01(\x08:\x05\x66\x61lse\x12\x0f\n\x07timeout\x18\n \x01(\r\x12\x15\n\rsloppy_quorum\x18\x0b \x01(\x08\x12\r\n\x05n_val\x18\x0c \x01(\r\x12\x1d\n\x0finclude_context\x18\r \x01(\x08:\x04true\"\x9b\x01\n\x0c\x44tUpdateResp\x12\x0b\n\x03key\x18\x01 \x01(\x0c\x12\x0f\n\x07\x63ontext\x18\x02 \x01(\x0c\x12\x15\n\rcounter_value\x18\x03 \x01(\x12\x12\x11\n\tset_value\x18\x04 \x03(\x0c\x12\x1c\n\tmap_value\x18\x05 \x03(\x0b\x32\t.MapEntry\x12\x11\n\thll_value\x18\x06 \x01(\x04\x12\x12\n\ngset_value\x18\x07 \x03(\x0c\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakDtPB') @@ -73,11 +73,15 @@ name='HLL', index=3, number=4, options=None, type=None), + _descriptor.EnumValueDescriptor( + name='GSET', index=4, number=5, + options=None, + type=None), ], containing_type=None, options=None, - serialized_start=715, - serialized_end=765, + serialized_start=735, + serialized_end=795, ) _MAPUPDATE_FLAGOP = _descriptor.EnumDescriptor( @@ -97,8 +101,8 @@ ], containing_type=None, options=None, - serialized_start=1039, - serialized_end=1072, + serialized_start=1093, + serialized_end=1126, ) @@ -334,6 +338,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='gset_value', full_name='DtValue.gset_value', index=4, + number=5, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -344,7 +355,7 @@ is_extendable=False, extension_ranges=[], serialized_start=518, - serialized_end=618, + serialized_end=638, ) @@ -386,8 +397,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=621, - serialized_end=765, + serialized_start=641, + serialized_end=795, ) @@ -414,8 +425,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=767, - serialized_end=797, + serialized_start=797, + serialized_end=827, ) @@ -449,8 +460,36 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=799, - serialized_end=837, + serialized_start=829, + serialized_end=867, +) + + +_GSETOP = _descriptor.Descriptor( + name='GSetOp', + full_name='GSetOp', + filename=None, + file=DESCRIPTOR, + containing_type=None, + fields=[ + _descriptor.FieldDescriptor( + name='adds', full_name='GSetOp.adds', index=0, + number=1, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), + ], + extensions=[ + ], + nested_types=[], + enum_types=[ + ], + options=None, + is_extendable=False, + extension_ranges=[], + serialized_start=869, + serialized_end=891, ) @@ -477,8 +516,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=839, - serialized_end=860, + serialized_start=893, + serialized_end=914, ) @@ -541,8 +580,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=863, - serialized_end=1072, + serialized_start=917, + serialized_end=1126, ) @@ -576,8 +615,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1074, - serialized_end=1138, + serialized_start=1128, + serialized_end=1192, ) @@ -616,6 +655,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='gset_op', full_name='DtOp.gset_op', index=4, + number=5, type=11, cpp_type=10, label=1, + has_default_value=False, default_value=None, + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -625,8 +671,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1140, - serialized_end=1250, + serialized_start=1195, + serialized_end=1331, ) @@ -737,8 +783,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1253, - serialized_end=1494, + serialized_start=1334, + serialized_end=1575, ) @@ -791,6 +837,13 @@ message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, options=None), + _descriptor.FieldDescriptor( + name='gset_value', full_name='DtUpdateResp.gset_value', index=6, + number=7, type=12, cpp_type=9, label=3, + has_default_value=False, default_value=[], + message_type=None, enum_type=None, containing_type=None, + is_extension=False, extension_scope=None, + options=None), ], extensions=[ ], @@ -800,8 +853,8 @@ options=None, is_extendable=False, extension_ranges=[], - serialized_start=1497, - serialized_end=1632, + serialized_start=1578, + serialized_end=1733, ) _MAPFIELD.fields_by_name['type'].enum_type = _MAPFIELD_MAPFIELDTYPE @@ -824,6 +877,7 @@ _DTOP.fields_by_name['set_op'].message_type = _SETOP _DTOP.fields_by_name['map_op'].message_type = _MAPOP _DTOP.fields_by_name['hll_op'].message_type = _HLLOP +_DTOP.fields_by_name['gset_op'].message_type = _GSETOP _DTUPDATEREQ.fields_by_name['op'].message_type = _DTOP _DTUPDATERESP.fields_by_name['map_value'].message_type = _MAPENTRY DESCRIPTOR.message_types_by_name['MapField'] = _MAPFIELD @@ -833,6 +887,7 @@ DESCRIPTOR.message_types_by_name['DtFetchResp'] = _DTFETCHRESP DESCRIPTOR.message_types_by_name['CounterOp'] = _COUNTEROP DESCRIPTOR.message_types_by_name['SetOp'] = _SETOP +DESCRIPTOR.message_types_by_name['GSetOp'] = _GSETOP DESCRIPTOR.message_types_by_name['HllOp'] = _HLLOP DESCRIPTOR.message_types_by_name['MapUpdate'] = _MAPUPDATE DESCRIPTOR.message_types_by_name['MapOp'] = _MAPOP @@ -882,6 +937,12 @@ class SetOp(_message.Message): # @@protoc_insertion_point(class_scope:SetOp) +@add_metaclass(_reflection.GeneratedProtocolMessageType) +class GSetOp(_message.Message): + DESCRIPTOR = _GSETOP + + # @@protoc_insertion_point(class_scope:GSetOp) + @add_metaclass(_reflection.GeneratedProtocolMessageType) class HllOp(_message.Message): DESCRIPTOR = _HLLOP diff --git a/riak/pb/riak_ts_pb2.py b/riak/pb/riak_ts_pb2.py index 6e2ee149..13a06a14 100644 --- a/riak/pb/riak_ts_pb2.py +++ b/riak/pb/riak_ts_pb2.py @@ -16,7 +16,7 @@ DESCRIPTOR = _descriptor.FileDescriptor( name='riak_ts.proto', package='', - serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*O\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') + serialized_pb='\n\rriak_ts.proto\x1a\nriak.proto\"[\n\nTsQueryReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\x15\n\x06stream\x18\x02 \x01(\x08:\x05\x66\x61lse\x12\x15\n\rcover_context\x18\x03 \x01(\x0c\"^\n\x0bTsQueryResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\x12\x12\n\x04\x64one\x18\x03 \x01(\x08:\x04true\"@\n\x08TsGetReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0f\n\x07timeout\x18\x03 \x01(\r\"H\n\tTsGetResp\x12%\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x02 \x03(\x0b\x32\x06.TsRow\"V\n\x08TsPutReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12%\n\x07\x63olumns\x18\x02 \x03(\x0b\x32\x14.TsColumnDescription\x12\x14\n\x04rows\x18\x03 \x03(\x0b\x32\x06.TsRow\"\x0b\n\tTsPutResp\"P\n\x08TsDelReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x14\n\x03key\x18\x02 \x03(\x0b\x32\x07.TsCell\x12\x0e\n\x06vclock\x18\x03 \x01(\x0c\x12\x0f\n\x07timeout\x18\x04 \x01(\r\"\x0b\n\tTsDelResp\"A\n\x0fTsInterpolation\x12\x0c\n\x04\x62\x61se\x18\x01 \x02(\x0c\x12 \n\x0einterpolations\x18\x02 \x03(\x0b\x32\x08.RpbPair\"@\n\x13TsColumnDescription\x12\x0c\n\x04name\x18\x01 \x02(\x0c\x12\x1b\n\x04type\x18\x02 \x02(\x0e\x32\r.TsColumnType\"\x1f\n\x05TsRow\x12\x16\n\x05\x63\x65lls\x18\x01 \x03(\x0b\x32\x07.TsCell\"{\n\x06TsCell\x12\x15\n\rvarchar_value\x18\x01 \x01(\x0c\x12\x14\n\x0csint64_value\x18\x02 \x01(\x12\x12\x17\n\x0ftimestamp_value\x18\x03 \x01(\x12\x12\x15\n\rboolean_value\x18\x04 \x01(\x08\x12\x14\n\x0c\x64ouble_value\x18\x05 \x01(\x01\"/\n\rTsListKeysReq\x12\r\n\x05table\x18\x01 \x02(\x0c\x12\x0f\n\x07timeout\x18\x02 \x01(\r\"4\n\x0eTsListKeysResp\x12\x14\n\x04keys\x18\x01 \x03(\x0b\x32\x06.TsRow\x12\x0c\n\x04\x64one\x18\x02 \x01(\x08\"q\n\rTsCoverageReq\x12\x1f\n\x05query\x18\x01 \x01(\x0b\x32\x10.TsInterpolation\x12\r\n\x05table\x18\x02 \x02(\x0c\x12\x15\n\rreplace_cover\x18\x03 \x01(\x0c\x12\x19\n\x11unavailable_cover\x18\x04 \x03(\x0c\"3\n\x0eTsCoverageResp\x12!\n\x07\x65ntries\x18\x01 \x03(\x0b\x32\x10.TsCoverageEntry\"[\n\x0fTsCoverageEntry\x12\n\n\x02ip\x18\x01 \x02(\x0c\x12\x0c\n\x04port\x18\x02 \x02(\r\x12\x15\n\rcover_context\x18\x03 \x02(\x0c\x12\x17\n\x05range\x18\x04 \x01(\x0b\x32\x08.TsRange\"\x93\x01\n\x07TsRange\x12\x12\n\nfield_name\x18\x01 \x02(\x0c\x12\x13\n\x0blower_bound\x18\x02 \x02(\x12\x12\x1d\n\x15lower_bound_inclusive\x18\x03 \x02(\x08\x12\x13\n\x0bupper_bound\x18\x04 \x02(\x12\x12\x1d\n\x15upper_bound_inclusive\x18\x05 \x02(\x08\x12\x0c\n\x04\x64\x65sc\x18\x06 \x02(\x0c*Y\n\x0cTsColumnType\x12\x0b\n\x07VARCHAR\x10\x00\x12\n\n\x06SINT64\x10\x01\x12\n\n\x06\x44OUBLE\x10\x02\x12\r\n\tTIMESTAMP\x10\x03\x12\x0b\n\x07\x42OOLEAN\x10\x04\x12\x08\n\x04\x42LOB\x10\x05\x42#\n\x17\x63om.basho.riak.protobufB\x08RiakTsPB') _TSCOLUMNTYPE = _descriptor.EnumDescriptor( name='TsColumnType', @@ -44,11 +44,15 @@ name='BOOLEAN', index=4, number=4, options=None, type=None), + _descriptor.EnumValueDescriptor( + name='BLOB', index=5, number=5, + options=None, + type=None), ], containing_type=None, options=None, serialized_start=1359, - serialized_end=1438, + serialized_end=1448, ) TsColumnType = enum_type_wrapper.EnumTypeWrapper(_TSCOLUMNTYPE) @@ -57,6 +61,7 @@ DOUBLE = 2 TIMESTAMP = 3 BOOLEAN = 4 +BLOB = 5 diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index d069686a..ade2afe1 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -46,8 +46,8 @@ def setUpClass(cls): 'expected {:d} to equal {:d}'.format(cls.ts1ms, ex1ms)) cls.rows = [ - [bd0, 0, 1.2, ts0, True], - [bd1, 3, 4.5, ts1, False] + [bd0, 0, 1.2, ts0, True, bd0], + [bd1, 3, 4.5, ts1, False, bd1] ] cls.test_key = ['hash1', 'user2', ts0] cls.table = Table(None, table_name) @@ -99,6 +99,8 @@ def test_encode_data_for_put(self): self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) + self.assertEqual(bytes_to_str(r0.cells[5].varchar_value), + self.rows[0][5]) r1 = req.rows[1] self.assertEqual(bytes_to_str(r1.cells[0].varchar_value), @@ -107,6 +109,8 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) + self.assertEqual(bytes_to_str(r1.cells[5].varchar_value), + self.rows[1][5]) def test_encode_data_for_listkeys(self): c = PbufCodec(client_timeouts=True) @@ -134,6 +138,9 @@ def test_decode_data_from_query(self): c4 = tqr.columns.add() c4.name = str_to_bytes('col_boolean') c4.type = TsColumnType.Value('BOOLEAN') + c5 = tqr.columns.add() + c5.name = str_to_bytes('col_blob') + c5.type = TsColumnType.Value('BLOB') r0 = tqr.rows.add() r0c0 = r0.cells.add() @@ -146,6 +153,8 @@ def test_decode_data_from_query(self): r0c3.timestamp_value = self.ts0ms r0c4 = r0.cells.add() r0c4.boolean_value = self.rows[0][4] + r0c5 = r0.cells.add() + r0c5.varchar_value = str_to_bytes(self.rows[0][5]) r1 = tqr.rows.add() r1c0 = r1.cells.add() @@ -158,6 +167,8 @@ def test_decode_data_from_query(self): r1c3.timestamp_value = self.ts1ms r1c4 = r1.cells.add() r1c4.boolean_value = self.rows[1][4] + r1c5 = r1.cells.add() + r1c5.varchar_value = str_to_bytes(self.rows[1][5]) tsobj = TsObject(None, self.table) c = PbufCodec() @@ -178,6 +189,8 @@ def test_decode_data_from_query(self): self.assertEqual(ct[3], 'timestamp') self.assertEqual(cn[4], 'col_boolean') self.assertEqual(ct[4], 'boolean') + self.assertEqual(cn[5], 'col_blob') + self.assertEqual(ct[5], 'blob') r0 = tsobj.rows[0] self.assertEqual(bytes_to_str(r0[0]), self.rows[0][0]) @@ -185,6 +198,7 @@ def test_decode_data_from_query(self): self.assertEqual(r0[2], self.rows[0][2]) self.assertEqual(r0[3], ts0) self.assertEqual(r0[4], self.rows[0][4]) + self.assertEqual(bytes_to_str(r0[5]), self.rows[0][5]) r1 = tsobj.rows[1] self.assertEqual(bytes_to_str(r1[0]), self.rows[1][0]) @@ -192,6 +206,7 @@ def test_decode_data_from_query(self): self.assertEqual(r1[2], self.rows[1][2]) self.assertEqual(r1[3], ts1) self.assertEqual(r1[4], self.rows[1][4]) + self.assertEqual(bytes_to_str(r1[5]), self.rows[1][5]) @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, diff --git a/riak_pb b/riak_pb index f5af9ffe..cb15cc47 160000 --- a/riak_pb +++ b/riak_pb @@ -1 +1 @@ -Subproject commit f5af9ffe04eb27689d483509de26574bdf70343f +Subproject commit cb15cc4770f3748289ba56245d62b1c0d07c33f7 diff --git a/tools b/tools index 72939314..1fa90702 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 72939314ab3151db776fcc01c92c26f6ee3dc499 +Subproject commit 1fa9070257c3443f04337e7e37ef083ed7ca2fe4 From e66ec0d9cb4875d386d20c32e150d8081bf425cb Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 6 Dec 2016 16:02:34 -0800 Subject: [PATCH 299/324] Use jdk_switcher to fix Travis CI issues --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index 4421cfbe..2013cbe3 100644 --- a/.travis.yml +++ b/.travis.yml @@ -13,6 +13,7 @@ addons: install: - pip install --upgrade pip setuptools flake8 before_script: + - jdk_switcher use oraclejdk8 - sudo ./tools/travis-ci/riak-install -d "$RIAK_DOWNLOAD_URL" - sudo ./tools/setup-riak -s env: From a72eb5cc5a11d7f9f7d224bea7ef018bc6cb39de Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 6 Dec 2016 16:44:14 -0800 Subject: [PATCH 300/324] fix mapreduce test --- riak/tests/test_mapreduce.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index 5a9d2c7a..bae404e3 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -175,7 +175,9 @@ def test_erlang_source_map_reduce_bucket_type(self): if e.value.startswith('May have tried'): strfun_allowed = False if strfun_allowed: - self.assertEqual(result, ['2', '3', '4']) + self.assertIn('2', result) + self.assertIn('3', result) + self.assertIn('4', result) def test_client_exceptional_paths(self): bucket = self.client.bucket(self.bucket_name) From 1ef59356d187e09855e40c7dff5ca5e76ecb1631 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 6 Dec 2016 17:02:07 -0800 Subject: [PATCH 301/324] fix another m/r test --- riak/tests/test_mapreduce.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index bae404e3..4b531368 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -151,7 +151,9 @@ def test_erlang_source_map_reduce(self): else: print("test_erlang_source_map_reduce {}".format(e.value)) if strfun_allowed: - self.assertEqual(result, ['2', '3', '4']) + self.assertIn('2', result) + self.assertIn('3', result) + self.assertIn('4', result) def test_erlang_source_map_reduce_bucket_type(self): # Create the object... From aba3c4971f324a6a1c4e890d3bf2e1877b80cf12 Mon Sep 17 00:00:00 2001 From: Alex Moore Date: Thu, 8 Dec 2016 18:14:50 -0500 Subject: [PATCH 302/324] Add some blob columns to the tests --- riak/tests/test_timeseries_pbuf.py | 19 +++++++++---------- riak/tests/test_timeseries_ttb.py | 15 +++++++++------ 2 files changed, 18 insertions(+), 16 deletions(-) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index ade2afe1..36320bf8 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -21,6 +21,8 @@ bd0 = '时间序列' bd1 = 'временные ряды' +blob0 = b'\x00\x01\x02\x03\x04\x05\x06\x07' + fiveMins = datetime.timedelta(0, 300) # NB: last arg is microseconds, 987ms expressed ts0 = datetime.datetime(2015, 1, 1, 12, 0, 0, 987000) @@ -46,8 +48,8 @@ def setUpClass(cls): 'expected {:d} to equal {:d}'.format(cls.ts1ms, ex1ms)) cls.rows = [ - [bd0, 0, 1.2, ts0, True, bd0], - [bd1, 3, 4.5, ts1, False, bd1] + [bd0, 0, 1.2, ts0, True, None], + [bd1, 3, 4.5, ts1, False, blob0] ] cls.test_key = ['hash1', 'user2', ts0] cls.table = Table(None, table_name) @@ -99,8 +101,7 @@ def test_encode_data_for_put(self): self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) - self.assertEqual(bytes_to_str(r0.cells[5].varchar_value), - self.rows[0][5]) + self.assertEqual(r0.cells[5].varchar_value, self.rows[0][5]) r1 = req.rows[1] self.assertEqual(bytes_to_str(r1.cells[0].varchar_value), @@ -109,8 +110,7 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) - self.assertEqual(bytes_to_str(r1.cells[5].varchar_value), - self.rows[1][5]) + self.assertEqual(r1.cells[5].varchar_value, self.rows[1][5]) def test_encode_data_for_listkeys(self): c = PbufCodec(client_timeouts=True) @@ -154,7 +154,6 @@ def test_decode_data_from_query(self): r0c4 = r0.cells.add() r0c4.boolean_value = self.rows[0][4] r0c5 = r0.cells.add() - r0c5.varchar_value = str_to_bytes(self.rows[0][5]) r1 = tqr.rows.add() r1c0 = r1.cells.add() @@ -168,7 +167,7 @@ def test_decode_data_from_query(self): r1c4 = r1.cells.add() r1c4.boolean_value = self.rows[1][4] r1c5 = r1.cells.add() - r1c5.varchar_value = str_to_bytes(self.rows[1][5]) + r1c5.varchar_value = self.rows[1][5] tsobj = TsObject(None, self.table) c = PbufCodec() @@ -198,7 +197,7 @@ def test_decode_data_from_query(self): self.assertEqual(r0[2], self.rows[0][2]) self.assertEqual(r0[3], ts0) self.assertEqual(r0[4], self.rows[0][4]) - self.assertEqual(bytes_to_str(r0[5]), self.rows[0][5]) + self.assertEqual(r0[5], self.rows[0][5]) r1 = tsobj.rows[1] self.assertEqual(bytes_to_str(r1[0]), self.rows[1][0]) @@ -206,7 +205,7 @@ def test_decode_data_from_query(self): self.assertEqual(r1[2], self.rows[1][2]) self.assertEqual(r1[3], ts1) self.assertEqual(r1[4], self.rows[1][4]) - self.assertEqual(bytes_to_str(r1[5]), self.rows[1][5]) + self.assertEqual(r1[5], self.rows[1][5]) @unittest.skipUnless(is_timeseries_supported() and RUN_TIMESERIES, diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 5616282a..10e919a6 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -36,6 +36,8 @@ bd0 = six.u('时间序列') bd1 = six.u('временные ряды') +blob0 = b'\x00\x01\x02\x03\x04\x05\x06\x07' + fiveMins = datetime.timedelta(0, 300) ts0 = datetime.datetime(2015, 1, 1, 12, 1, 2, 987000) ts1 = ts0 + fiveMins @@ -62,20 +64,20 @@ def test_encode_data_for_get(self): # {tsgetresp, # { # [<<"geohash">>, <<"user">>, <<"time">>, - # <<"weather">>, <<"temperature">>], - # [varchar, varchar, timestamp, varchar, double], - # [(<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, 90.3)] + # <<"weather">>, <<"temperature">>, <<"blob">>], + # [varchar, varchar, timestamp, varchar, double, blob], + # [(<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, 90.3, <<0,1,2,3,4,5,6,7>>)] # } # } def test_decode_data_from_get(self): colnames = ["varchar", "sint64", "double", "timestamp", - "boolean", "varchar", "varchar"] + "boolean", "varchar", "varchar", "blob"] coltypes = [varchar_a, sint64_a, double_a, timestamp_a, boolean_a, varchar_a, varchar_a] r0 = (bd0, 0, 1.2, unix_time_millis(ts0), True, - [], str1, None) + [], str1, None, None) r1 = (bd1, 3, 4.5, unix_time_millis(ts1), False, - [], str1, None) + [], str1, None, blob0) rows = [r0, r1] # { tsgetresp, { [colnames], [coltypes], [rows] } } data_t = colnames, coltypes, rows @@ -102,6 +104,7 @@ def test_decode_data_from_get(self): self.assertEqual(r[5], None) self.assertEqual(r[6], dr[6].encode('ascii')) self.assertEqual(r[7], None) + self.assertEqual(r[8], dr[8]) def test_encode_data_for_put(self): r0 = (bd0, 0, 1.2, unix_time_millis(ts0), True, []) From 116da1a8ba35e8d14c0d905d2fc9ebad2954847b Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 8 Dec 2016 15:52:44 -0800 Subject: [PATCH 303/324] fix test to use HasField to detect a null value --- riak/tests/test_timeseries_pbuf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 36320bf8..cd3f343e 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -101,7 +101,7 @@ def test_encode_data_for_put(self): self.assertEqual(r0.cells[2].double_value, self.rows[0][2]) self.assertEqual(r0.cells[3].timestamp_value, self.ts0ms) self.assertEqual(r0.cells[4].boolean_value, self.rows[0][4]) - self.assertEqual(r0.cells[5].varchar_value, self.rows[0][5]) + self.assertFalse(r0.cells[5].HasField('varchar_value')) r1 = req.rows[1] self.assertEqual(bytes_to_str(r1.cells[0].varchar_value), @@ -110,7 +110,7 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) - self.assertEqual(r1.cells[5].varchar_value, self.rows[1][5]) + self.assertTrue(r1.cells[5].HasField('varchar_value')) def test_encode_data_for_listkeys(self): c = PbufCodec(client_timeouts=True) From 72b5538f9cf1b9a01c38b80cc4bc3d9444c6909e Mon Sep 17 00:00:00 2001 From: Alex Moore Date: Thu, 8 Dec 2016 18:55:56 -0500 Subject: [PATCH 304/324] Check value in TS PBuf test --- riak/tests/test_timeseries_pbuf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index cd3f343e..bc347600 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -110,7 +110,7 @@ def test_encode_data_for_put(self): self.assertEqual(r1.cells[2].double_value, self.rows[1][2]) self.assertEqual(r1.cells[3].timestamp_value, self.ts1ms) self.assertEqual(r1.cells[4].boolean_value, self.rows[1][4]) - self.assertTrue(r1.cells[5].HasField('varchar_value')) + self.assertEqual(r1.cells[5].varchar_value, self.rows[1][5]) def test_encode_data_for_listkeys(self): c = PbufCodec(client_timeouts=True) From f3f63a70fc084c616f8338d4b9ea35d7f48303de Mon Sep 17 00:00:00 2001 From: Alex Moore Date: Thu, 8 Dec 2016 19:05:34 -0500 Subject: [PATCH 305/324] Fix some formatting errors --- riak/tests/test_timeseries_pbuf.py | 2 +- riak/tests/test_timeseries_ttb.py | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index bc347600..51257a0b 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -153,7 +153,7 @@ def test_decode_data_from_query(self): r0c3.timestamp_value = self.ts0ms r0c4 = r0.cells.add() r0c4.boolean_value = self.rows[0][4] - r0c5 = r0.cells.add() + r0.cells.add() r1 = tqr.rows.add() r1c0 = r1.cells.add() diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 10e919a6..765eba67 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -66,7 +66,8 @@ def test_encode_data_for_get(self): # [<<"geohash">>, <<"user">>, <<"time">>, # <<"weather">>, <<"temperature">>, <<"blob">>], # [varchar, varchar, timestamp, varchar, double, blob], - # [(<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, 90.3, <<0,1,2,3,4,5,6,7>>)] + # [(<<"hash1">>, <<"user2">>, 144378190987, <<"typhoon">>, + # 90.3, <<0,1,2,3,4,5,6,7>>)] # } # } def test_decode_data_from_get(self): From b67485e99bfb90f13671ab663decd49fb13ae8e5 Mon Sep 17 00:00:00 2001 From: Steffen Oschatz Date: Fri, 18 Nov 2016 13:41:15 +0100 Subject: [PATCH 306/324] added head_only parameter for pbc transport prevent http test for pbc only py3 combat added head parameter also to http to not break combat support head only request on multiget call --- riak/bucket.py | 17 +++++++++++++---- riak/client/operations.py | 8 ++++++-- riak/codecs/pbuf.py | 4 +++- riak/content.py | 2 ++ riak/riak_object.py | 7 +++++-- riak/tests/test_kv.py | 15 ++++++++++++++- riak/transports/http/transport.py | 2 +- riak/transports/tcp/transport.py | 4 ++-- riak/transports/transport.py | 2 +- 9 files changed, 47 insertions(+), 14 deletions(-) diff --git a/riak/bucket.py b/riak/bucket.py index f6dd3863..70340f08 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -194,7 +194,7 @@ def new(self, key=None, data=None, content_type='application/json', return obj def get(self, key, r=None, pr=None, timeout=None, include_context=None, - basic_quorum=None, notfound_ok=None): + basic_quorum=None, notfound_ok=None, head_only=False): """ Retrieve a :class:`~riak.riak_object.RiakObject` or :class:`~riak.datatypes.Datatype`, based on the presence and value @@ -216,6 +216,9 @@ def get(self, key, r=None, pr=None, timeout=None, include_context=None, :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool + :param head_only: whether to fetch without value, so only metadata + (only available on PB transport) + :type head_only: bool :rtype: :class:`RiakObject ` or :class:`~riak.datatypes.Datatype` @@ -231,10 +234,12 @@ def get(self, key, r=None, pr=None, timeout=None, include_context=None, obj = RiakObject(self._client, self, key) return obj.reload(r=r, pr=pr, timeout=timeout, basic_quorum=basic_quorum, - notfound_ok=notfound_ok) + notfound_ok=notfound_ok, + head_only=head_only) def multiget(self, keys, r=None, pr=None, timeout=None, - basic_quorum=None, notfound_ok=None): + basic_quorum=None, notfound_ok=None, + head_only=False): """ Retrieves a list of keys belonging to this bucket in parallel. @@ -251,6 +256,9 @@ def multiget(self, keys, r=None, pr=None, timeout=None, :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool + :param head_only: whether to fetch without value, so only metadata + (only available on PB transport) + :type head_only: bool :rtype: list of :class:`RiakObjects `, :class:`Datatypes `, or tuples of bucket_type, bucket, key, and the exception raised on fetch @@ -258,7 +266,8 @@ def multiget(self, keys, r=None, pr=None, timeout=None, bkeys = [(self.bucket_type.name, self.name, key) for key in keys] return self._client.multiget(bkeys, r=r, pr=pr, timeout=timeout, basic_quorum=basic_quorum, - notfound_ok=notfound_ok) + notfound_ok=notfound_ok, + head_only=head_only) def _get_resolver(self): if callable(self._resolver): diff --git a/riak/client/operations.py b/riak/client/operations.py index 87c7f0b9..bc6d4568 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -678,7 +678,7 @@ def ts_stream_keys(self, table, timeout=None): @retryable def get(self, transport, robj, r=None, pr=None, timeout=None, - basic_quorum=None, notfound_ok=None): + basic_quorum=None, notfound_ok=None, head_only=False): """ get(robj, r=None, pr=None, timeout=None) @@ -700,6 +700,9 @@ def get(self, transport, robj, r=None, pr=None, timeout=None, :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool + :param head_only: whether to fetch without value, so only metadata + (only available on PB transport) + :type head_only: bool """ _validate_timeout(timeout) if not isinstance(robj.key, six.string_types): @@ -708,7 +711,8 @@ def get(self, transport, robj, r=None, pr=None, timeout=None, return transport.get(robj, r=r, pr=pr, timeout=timeout, basic_quorum=basic_quorum, - notfound_ok=notfound_ok) + notfound_ok=notfound_ok, + head_only=head_only) @retryable def delete(self, transport, robj, rw=None, r=None, w=None, dw=None, diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 04515f9f..9a21aaa5 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -895,7 +895,8 @@ def decode_preflist(self, item): return result def encode_get(self, robj, r=None, pr=None, timeout=None, - basic_quorum=None, notfound_ok=None): + basic_quorum=None, notfound_ok=None, + head_only=False): bucket = robj.bucket req = riak.pb.riak_kv_pb2.RpbGetReq() if r: @@ -914,6 +915,7 @@ def encode_get(self, robj, r=None, pr=None, timeout=None, req.bucket = str_to_bytes(bucket.name) self._add_bucket_type(req, bucket.bucket_type) req.key = str_to_bytes(robj.key) + req.head = head_only mc = riak.pb.messages.MSG_CODE_GET_REQ rc = riak.pb.messages.MSG_CODE_GET_RESP return Msg(mc, req.SerializeToString(), rc) diff --git a/riak/content.py b/riak/content.py index d885827b..6b3f080d 100644 --- a/riak/content.py +++ b/riak/content.py @@ -90,6 +90,8 @@ def _serialize(self, value): format(self.content_type)) def _deserialize(self, value): + if not value: + return value decoder = self._robject.bucket.get_decoder(self.content_type) if decoder: return decoder(value) diff --git a/riak/riak_object.py b/riak/riak_object.py index ab7dd375..7657e2a4 100644 --- a/riak/riak_object.py +++ b/riak/riak_object.py @@ -269,7 +269,7 @@ def store(self, w=None, dw=None, pw=None, return_body=True, return self def reload(self, r=None, pr=None, timeout=None, basic_quorum=None, - notfound_ok=None): + notfound_ok=None, head_only=False): """ Reload the object from Riak. When this operation completes, the object could contain new metadata and a new value, if the object @@ -293,10 +293,13 @@ def reload(self, r=None, pr=None, timeout=None, basic_quorum=None, :type basic_quorum: bool :param notfound_ok: whether to treat not-found responses as successful :type notfound_ok: bool + :param head_only: whether to fetch without value, so only metadata + (only available on PB transport) + :type head_only: bool :rtype: :class:`RiakObject` """ - self.client.get(self, r=r, pr=pr, timeout=timeout) + self.client.get(self, r=r, pr=pr, timeout=timeout, head_only=head_only) return self def delete(self, r=None, w=None, dw=None, pr=None, pw=None, diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 67dd901f..7db30a08 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -8,7 +8,7 @@ from time import sleep from riak import ConflictError, RiakBucket, RiakError from riak.resolver import default_resolver, last_written_resolver -from riak.tests import RUN_KV, RUN_RESOLVE +from riak.tests import RUN_KV, RUN_RESOLVE, PROTOCOL from riak.tests.base import IntegrationTestBase from riak.tests.comparison import Comparison @@ -79,6 +79,19 @@ def test_no_returnbody(self): o = bucket.new(self.key_name, "bar").store(return_body=False) self.assertEqual(o.vclock, None) + @unittest.skipUnless(PROTOCOL == 'pbc', 'Only available on pbc') + def test_get_no_returnbody(self): + bucket = self.client.bucket(self.bucket_name) + o = bucket.new(self.key_name, "Ain't no body") + o.store() + + stored_object = bucket.get(self.key_name, head_only=True) + self.assertFalse(stored_object.data) + + list_of_objects = bucket.multiget([self.key_name], head_only=True) + for stored_object in list_of_objects: + self.assertFalse(stored_object.data) + def test_many_link_headers_should_work_fine(self): bucket = self.client.bucket(self.bucket_name) o = bucket.new("lots_of_links", "My god, it's full of links!") diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index 1599d368..c2d7c6dc 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -107,7 +107,7 @@ def get_resources(self): return {} def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, - notfound_ok=None): + notfound_ok=None, head_only=False): """ Get a bucket/key from the server """ diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index d8173402..39c0b456 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -133,7 +133,7 @@ def _set_client_id(self, client_id): doc="""the client ID for this connection""") def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, - notfound_ok=None): + notfound_ok=None, head_only=False): """ Serialize get request and deserialize response """ @@ -141,7 +141,7 @@ def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, codec = self._get_codec(msg_code) msg = codec.encode_get(robj, r, pr, timeout, basic_quorum, - notfound_ok) + notfound_ok, head_only) resp_code, resp = self._request(msg, codec) return codec.decode_get(robj, resp) diff --git a/riak/transports/transport.py b/riak/transports/transport.py index bda18e35..ba413865 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -54,7 +54,7 @@ def ping(self): raise NotImplementedError def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, - notfound_ok=None): + notfound_ok=None, head_only=False): """ Fetches an object. """ From a663b9205b66d10443ea7ce37b0aff53796a66df Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 12 Dec 2016 10:26:17 -0800 Subject: [PATCH 307/324] Add contributor, 2.7.0 release notes --- README.md | 1 + RELNOTES.md | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/README.md b/README.md index bd57bc81..9a15864d 100644 --- a/README.md +++ b/README.md @@ -142,6 +142,7 @@ Contributors * Soren Hansen * Sreejith Kesavan * Timothée Peignier +* [`tobixx`](https://github.com/tobixx) * [Tin Tvrtković](https://github.com/Tinche) * [Vitaly Shestovskiy](https://github.com/lamp0chka) * William Kral diff --git a/RELNOTES.md b/RELNOTES.md index 9d21f8dd..bd567fde 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,5 +1,9 @@ # Riak Python Client Release Notes +## [`2.7.0` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.7.0) + * Riak TS 1.5 support + * Support for `head` parameter + ## [`2.6.1` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.6.0) * NOTE: Due to pypi upload errors, `2.6.1` takes the place of `2.6.0`. From 50c93b0b65901bd60e3eba10db7daf7e9fe97735 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 16 Dec 2016 16:50:12 -0800 Subject: [PATCH 308/324] Make docs a submodule of the gh-pages branch, add docs target in main Makefile to generate docs there --- .gitignore | 29 +++++++++++++------------ .gitmodules | 4 ++++ Makefile | 10 ++++++++- docs | 1 + {docs => docsrc}/Makefile | 0 {docs => docsrc}/_templates/layout.html | 0 {docs => docsrc}/advanced.rst | 0 {docs => docsrc}/bucket.rst | 0 {docs => docsrc}/client.rst | 0 {docs => docsrc}/conf.py | 0 {docs => docsrc}/datatypes.rst | 0 {docs => docsrc}/index.rst | 0 {docs => docsrc}/make.bat | 0 {docs => docsrc}/object.rst | 0 {docs => docsrc}/query.rst | 0 {docs => docsrc}/security.rst | 0 16 files changed, 29 insertions(+), 15 deletions(-) create mode 160000 docs rename {docs => docsrc}/Makefile (100%) rename {docs => docsrc}/_templates/layout.html (100%) rename {docs => docsrc}/advanced.rst (100%) rename {docs => docsrc}/bucket.rst (100%) rename {docs => docsrc}/client.rst (100%) rename {docs => docsrc}/conf.py (100%) rename {docs => docsrc}/datatypes.rst (100%) rename {docs => docsrc}/index.rst (100%) rename {docs => docsrc}/make.bat (100%) rename {docs => docsrc}/object.rst (100%) rename {docs => docsrc}/query.rst (100%) rename {docs => docsrc}/security.rst (100%) diff --git a/.gitignore b/.gitignore index 5cc03116..8d2441d7 100644 --- a/.gitignore +++ b/.gitignore @@ -1,19 +1,20 @@ -README.rst -*.pyc -.python-version -__pycache__/ -.tox/ -docs/_build -.*.swp -.coverage -riak-*/ -py-build/ +*~ +#*# +_build/ build/ +.coverage dist/ -riak.egg-info/ +docsrc/doctrees/ *.egg .eggs/ -#*# -*~ -.idea/ envs/ +.idea/ +py-build/ +*.pyc +__pycache__/ +.python-version +README.rst +riak-*/ +riak.egg-info/ +.*.swp +.tox/ diff --git a/.gitmodules b/.gitmodules index df75a761..510fba6e 100644 --- a/.gitmodules +++ b/.gitmodules @@ -4,3 +4,7 @@ [submodule "tools"] path = tools url = git://github.com/basho/riak-client-tools.git +[submodule "docs"] + path = docs + url = https://github.com/basho/riak-python-client.git + branch = gh-pages diff --git a/Makefile b/Makefile index 735bacfe..316389e4 100644 --- a/Makefile +++ b/Makefile @@ -15,7 +15,10 @@ unexport LC_TIME PANDOC_VERSION := $(shell pandoc --version) PROTOC_VERSION := $(shell protoc --version) -PROJDIR = $(realpath $(CURDIR)) +PROJDIR := $(realpath $(CURDIR)) +DOCSRC := $(PROJDIR)/docsrc +DOCTREES := $(DOCSRC)/doctrees +DOCSDIR := $(PROJDIR)/docs PYPI_REPOSITORY ?= pypi @@ -23,6 +26,11 @@ PYPI_REPOSITORY ?= pypi lint: $(PROJDIR)/.runner lint +.PHONY: docs +docs: + sphinx-build -b html -d $(DOCTREES) $(DOCSRC) $(DOCSDIR) + @echo "The HTML pages are in $(DOCSDIR)" + .PHONY: pb_clean pb_clean: @echo "==> Python (clean)" diff --git a/docs b/docs new file mode 160000 index 00000000..f8f1ae3b --- /dev/null +++ b/docs @@ -0,0 +1 @@ +Subproject commit f8f1ae3b2b8258ed494dec9530683fe29b381cf9 diff --git a/docs/Makefile b/docsrc/Makefile similarity index 100% rename from docs/Makefile rename to docsrc/Makefile diff --git a/docs/_templates/layout.html b/docsrc/_templates/layout.html similarity index 100% rename from docs/_templates/layout.html rename to docsrc/_templates/layout.html diff --git a/docs/advanced.rst b/docsrc/advanced.rst similarity index 100% rename from docs/advanced.rst rename to docsrc/advanced.rst diff --git a/docs/bucket.rst b/docsrc/bucket.rst similarity index 100% rename from docs/bucket.rst rename to docsrc/bucket.rst diff --git a/docs/client.rst b/docsrc/client.rst similarity index 100% rename from docs/client.rst rename to docsrc/client.rst diff --git a/docs/conf.py b/docsrc/conf.py similarity index 100% rename from docs/conf.py rename to docsrc/conf.py diff --git a/docs/datatypes.rst b/docsrc/datatypes.rst similarity index 100% rename from docs/datatypes.rst rename to docsrc/datatypes.rst diff --git a/docs/index.rst b/docsrc/index.rst similarity index 100% rename from docs/index.rst rename to docsrc/index.rst diff --git a/docs/make.bat b/docsrc/make.bat similarity index 100% rename from docs/make.bat rename to docsrc/make.bat diff --git a/docs/object.rst b/docsrc/object.rst similarity index 100% rename from docs/object.rst rename to docsrc/object.rst diff --git a/docs/query.rst b/docsrc/query.rst similarity index 100% rename from docs/query.rst rename to docsrc/query.rst diff --git a/docs/security.rst b/docsrc/security.rst similarity index 100% rename from docs/security.rst rename to docsrc/security.rst From f370367f11ff46b3aba9ebce1aa8f7a5803eda8d Mon Sep 17 00:00:00 2001 From: Alex Moore Date: Sat, 17 Dec 2016 23:34:56 -0500 Subject: [PATCH 309/324] Fix ts listkeys input of either Table object or table name as string --- riak/client/operations.py | 7 +++++-- riak/tests/test_timeseries_pbuf.py | 25 +++++++++++++++++-------- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/riak/client/operations.py b/riak/client/operations.py index bc6d4568..3bc9e06d 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -659,15 +659,18 @@ def ts_stream_keys(self, table, timeout=None): stream.close() :param table: the table from which to stream keys - :type table: Table + :type table: string or :class:`Table ` :param timeout: a timeout value in milliseconds :type timeout: int :rtype: iterator """ + t = table + if isinstance(t, six.string_types): + t = Table(self, table) _validate_timeout(timeout) resource = self._acquire() transport = resource.object - stream = transport.ts_stream_keys(table, timeout) + stream = transport.ts_stream_keys(t, timeout) stream.attach(resource) try: for keylist in stream: diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 51257a0b..8956dd98 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -436,16 +436,25 @@ def test_stream_keys(self): table = Table(self.client, table_name) streamed_keys = [] for keylist in table.stream_keys(): - self.assertNotEqual([], keylist) - streamed_keys += keylist - for key in keylist: - self.assertIsInstance(key, list) - self.assertEqual(len(key), 3) - self.assertEqual(bytes_to_str(key[0]), 'hash1') - self.assertEqual(bytes_to_str(key[1]), 'user2') - self.assertIsInstance(key[2], datetime.datetime) + self.validate_keylist(streamed_keys, keylist) self.assertGreater(len(streamed_keys), 0) + def test_stream_keys_from_string_table(self): + streamed_keys = [] + for keylist in self.client.ts_stream_keys(table_name): + self.validate_keylist(streamed_keys, keylist) + self.assertGreater(len(streamed_keys), 0) + + def validate_keylist(self, streamed_keys, keylist): + self.assertNotEqual([], keylist) + streamed_keys += keylist + for key in keylist: + self.assertIsInstance(key, list) + self.assertEqual(len(key), 3) + self.assertEqual(bytes_to_str(key[0]), 'hash1') + self.assertEqual(bytes_to_str(key[1]), 'user2') + self.assertIsInstance(key[2], datetime.datetime) + def test_delete_single_value(self): key = ['hash1', 'user2', self.twentyFiveMinsAgo] rslt = self.client.ts_delete(table_name, key) From b403e2c86108e7e2214100f0251e455471e5f9a6 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 10 Jan 2017 11:12:33 -0800 Subject: [PATCH 310/324] Add per-file headers --- NOTICE | 2 ++ commands.py | 14 ++++++++++++++ docsrc/conf.py | 14 ++++++++++++++ riak/__init__.py | 14 ++++++++++++++ riak/benchmark.py | 14 ++++++++++++++ riak/benchmarks/multiget.py | 14 ++++++++++++++ riak/benchmarks/timeseries.py | 14 ++++++++++++++ riak/bucket.py | 14 ++++++++++++++ riak/client/__init__.py | 14 ++++++++++++++ riak/client/index_page.py | 14 ++++++++++++++ riak/client/multi.py | 14 ++++++++++++++ riak/client/operations.py | 14 ++++++++++++++ riak/client/transport.py | 14 ++++++++++++++ riak/codecs/__init__.py | 14 ++++++++++++++ riak/codecs/http.py | 14 ++++++++++++++ riak/codecs/pbuf.py | 14 ++++++++++++++ riak/codecs/ttb.py | 14 ++++++++++++++ riak/codecs/util.py | 14 ++++++++++++++ riak/content.py | 14 ++++++++++++++ riak/datatypes/__init__.py | 14 ++++++++++++++ riak/datatypes/counter.py | 14 ++++++++++++++ riak/datatypes/datatype.py | 14 ++++++++++++++ riak/datatypes/errors.py | 14 ++++++++++++++ riak/datatypes/flag.py | 14 ++++++++++++++ riak/datatypes/hll.py | 14 ++++++++++++++ riak/datatypes/map.py | 14 ++++++++++++++ riak/datatypes/register.py | 14 ++++++++++++++ riak/datatypes/set.py | 14 ++++++++++++++ riak/datatypes/types.py | 14 ++++++++++++++ riak/mapreduce.py | 14 ++++++++++++++ riak/multidict.py | 14 ++++++++++++++ riak/node.py | 14 ++++++++++++++ riak/pb/__init__.py | 14 ++++++++++++++ riak/pb/messages.py | 14 ++++++++++++++ riak/pb/riak_dt_pb2.py | 14 ++++++++++++++ riak/pb/riak_kv_pb2.py | 14 ++++++++++++++ riak/pb/riak_pb2.py | 14 ++++++++++++++ riak/pb/riak_search_pb2.py | 14 ++++++++++++++ riak/pb/riak_ts_pb2.py | 14 ++++++++++++++ riak/pb/riak_yokozuna_pb2.py | 14 ++++++++++++++ riak/resolver.py | 14 ++++++++++++++ riak/riak_error.py | 14 ++++++++++++++ riak/riak_object.py | 14 ++++++++++++++ riak/security.py | 14 ++++++++++++++ riak/table.py | 14 ++++++++++++++ riak/test_server.py | 14 ++++++++++++++ riak/tests/__init__.py | 14 ++++++++++++++ riak/tests/base.py | 14 ++++++++++++++ riak/tests/comparison.py | 14 ++++++++++++++ riak/tests/suite.py | 14 ++++++++++++++ riak/tests/test_2i.py | 14 ++++++++++++++ riak/tests/test_btypes.py | 14 ++++++++++++++ riak/tests/test_client.py | 14 ++++++++++++++ riak/tests/test_comparison.py | 14 ++++++++++++++ riak/tests/test_datatypes.py | 14 ++++++++++++++ riak/tests/test_datetime.py | 14 ++++++++++++++ riak/tests/test_feature_detection.py | 14 ++++++++++++++ riak/tests/test_filters.py | 14 ++++++++++++++ riak/tests/test_kv.py | 14 ++++++++++++++ riak/tests/test_mapreduce.py | 14 ++++++++++++++ riak/tests/test_misc.py | 14 ++++++++++++++ riak/tests/test_pool.py | 14 ++++++++++++++ riak/tests/test_search.py | 14 ++++++++++++++ riak/tests/test_security.py | 14 ++++++++++++++ riak/tests/test_server_test.py | 14 ++++++++++++++ riak/tests/test_timeseries_pbuf.py | 14 ++++++++++++++ riak/tests/test_timeseries_ttb.py | 14 ++++++++++++++ riak/tests/test_util.py | 14 ++++++++++++++ riak/tests/test_yokozuna.py | 14 ++++++++++++++ riak/tests/yz_setup.py | 14 ++++++++++++++ riak/transports/__init__.py | 13 +++++++++++++ riak/transports/feature_detect.py | 14 ++++++++++++++ riak/transports/http/__init__.py | 14 ++++++++++++++ riak/transports/http/connection.py | 14 ++++++++++++++ riak/transports/http/resources.py | 14 ++++++++++++++ riak/transports/http/search.py | 15 +++++++++++++++ riak/transports/http/stream.py | 14 ++++++++++++++ riak/transports/http/transport.py | 14 ++++++++++++++ riak/transports/pool.py | 14 ++++++++++++++ riak/transports/security.py | 14 ++++++++++++++ riak/transports/tcp/__init__.py | 14 ++++++++++++++ riak/transports/tcp/connection.py | 14 ++++++++++++++ riak/transports/tcp/stream.py | 14 ++++++++++++++ riak/transports/tcp/transport.py | 14 ++++++++++++++ riak/transports/transport.py | 14 ++++++++++++++ riak/ts_object.py | 14 ++++++++++++++ riak/tz.py | 14 ++++++++++++++ riak/util.py | 14 ++++++++++++++ version.py | 14 +++++++++++++- 89 files changed, 1233 insertions(+), 1 deletion(-) create mode 100644 NOTICE diff --git a/NOTICE b/NOTICE new file mode 100644 index 00000000..37c556ce --- /dev/null +++ b/NOTICE @@ -0,0 +1,2 @@ +Riak Python Client +Copyright 2010-present Basho Technologies, Inc. diff --git a/commands.py b/commands.py index 3d1e13c0..a20557ab 100644 --- a/commands.py +++ b/commands.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import csv import os import os.path diff --git a/docsrc/conf.py b/docsrc/conf.py index 57d39b2e..7b5cb000 100644 --- a/docsrc/conf.py +++ b/docsrc/conf.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- # # Riak (Python binding) documentation build configuration file, created by diff --git a/riak/__init__.py b/riak/__init__.py index 9e761a91..de68354a 100644 --- a/riak/__init__.py +++ b/riak/__init__.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ The Riak API for Python allows you to connect to a Riak instance, create, modify, and delete Riak objects, add and remove links from diff --git a/riak/benchmark.py b/riak/benchmark.py index cfb220c1..e1f3e55c 100644 --- a/riak/benchmark.py +++ b/riak/benchmark.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from __future__ import print_function import os diff --git a/riak/benchmarks/multiget.py b/riak/benchmarks/multiget.py index 22c01c8b..87a97a6a 100644 --- a/riak/benchmarks/multiget.py +++ b/riak/benchmarks/multiget.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import binascii import os diff --git a/riak/benchmarks/timeseries.py b/riak/benchmarks/timeseries.py index 84960962..5d0f89c3 100644 --- a/riak/benchmarks/timeseries.py +++ b/riak/benchmarks/timeseries.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import datetime import random import sys diff --git a/riak/bucket.py b/riak/bucket.py index 70340f08..3379fa8e 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2010 Rusty Klophaus Copyright 2010 Justin Sheehy diff --git a/riak/client/__init__.py b/riak/client/__init__.py index f7e61446..ac33f8f2 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + try: import simplejson as json except ImportError: diff --git a/riak/client/index_page.py b/riak/client/index_page.py index 3d273e4b..9014d6c4 100644 --- a/riak/client/index_page.py +++ b/riak/client/index_page.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2013 Basho Technologies, Inc. diff --git a/riak/client/multi.py b/riak/client/multi.py index b7a1e11a..681d3ec3 100644 --- a/riak/client/multi.py +++ b/riak/client/multi.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from __future__ import print_function from collections import namedtuple from threading import Thread, Lock, Event diff --git a/riak/client/operations.py b/riak/client/operations.py index 3bc9e06d..1acf06e1 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import six import riak.client.multi diff --git a/riak/client/transport.py b/riak/client/transport.py index 4010f165..ffc705e4 100644 --- a/riak/client/transport.py +++ b/riak/client/transport.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from contextlib import contextmanager from riak.transports.pool import BadResource, ConnectionClosed from riak.transports.tcp import is_retryable as is_tcp_retryable diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index b155bdff..00324f14 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import collections import riak.pb.messages diff --git a/riak/codecs/http.py b/riak/codecs/http.py index 84fed2a7..bfc91f2c 100644 --- a/riak/codecs/http.py +++ b/riak/codecs/http.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import re import csv import six diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index 9a21aaa5..fe34ee15 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import datetime import six diff --git a/riak/codecs/ttb.py b/riak/codecs/ttb.py index 5cec7038..70a8b6fc 100644 --- a/riak/codecs/ttb.py +++ b/riak/codecs/ttb.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import datetime import six diff --git a/riak/codecs/util.py b/riak/codecs/util.py index 52aecb9f..1fa492bb 100644 --- a/riak/codecs/util.py +++ b/riak/codecs/util.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import riak.pb.messages diff --git a/riak/content.py b/riak/content.py index 6b3f080d..87b35594 100644 --- a/riak/content.py +++ b/riak/content.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2013 Basho Technologies, Inc. diff --git a/riak/datatypes/__init__.py b/riak/datatypes/__init__.py index 90d114e9..87062a74 100644 --- a/riak/datatypes/__init__.py +++ b/riak/datatypes/__init__.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/datatypes/counter.py b/riak/datatypes/counter.py index 33c69f81..d8c8fd24 100644 --- a/riak/datatypes/counter.py +++ b/riak/datatypes/counter.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import six from riak.datatypes.datatype import Datatype diff --git a/riak/datatypes/datatype.py b/riak/datatypes/datatype.py index 192b815e..bad20d2c 100644 --- a/riak/datatypes/datatype.py +++ b/riak/datatypes/datatype.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/datatypes/errors.py b/riak/datatypes/errors.py index 71353f8f..16be5589 100644 --- a/riak/datatypes/errors.py +++ b/riak/datatypes/errors.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from riak import RiakError diff --git a/riak/datatypes/flag.py b/riak/datatypes/flag.py index 494dd799..3ce78c81 100644 --- a/riak/datatypes/flag.py +++ b/riak/datatypes/flag.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/datatypes/hll.py b/riak/datatypes/hll.py index 16d006f0..1d962731 100644 --- a/riak/datatypes/hll.py +++ b/riak/datatypes/hll.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import six from .datatype import Datatype diff --git a/riak/datatypes/map.py b/riak/datatypes/map.py index a2bbdf2b..31d6aa22 100644 --- a/riak/datatypes/map.py +++ b/riak/datatypes/map.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/datatypes/register.py b/riak/datatypes/register.py index 1d6813b8..c8402743 100644 --- a/riak/datatypes/register.py +++ b/riak/datatypes/register.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/datatypes/set.py b/riak/datatypes/set.py index e0797ab8..19829cf3 100644 --- a/riak/datatypes/set.py +++ b/riak/datatypes/set.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import collections from .datatype import Datatype diff --git a/riak/datatypes/types.py b/riak/datatypes/types.py index f349de25..79b969fb 100644 --- a/riak/datatypes/types.py +++ b/riak/datatypes/types.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/mapreduce.py b/riak/mapreduce.py index fe8cd6e4..5e4e6cfc 100644 --- a/riak/mapreduce.py +++ b/riak/mapreduce.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2010 Rusty Klophaus Copyright 2010 Justin Sheehy diff --git a/riak/multidict.py b/riak/multidict.py index b13a65b2..0df28f80 100644 --- a/riak/multidict.py +++ b/riak/multidict.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # (c) 2005 Ian Bicking and contributors; written for Paste # (http://pythonpaste.org) Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license.php diff --git a/riak/node.py b/riak/node.py index 9a999ece..eecffe69 100644 --- a/riak/node.py +++ b/riak/node.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import math import time diff --git a/riak/pb/__init__.py b/riak/pb/__init__.py index e69de29b..9b867bc5 100644 --- a/riak/pb/__init__.py +++ b/riak/pb/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/riak/pb/messages.py b/riak/pb/messages.py index 76c25e82..b8f1e91e 100644 --- a/riak/pb/messages.py +++ b/riak/pb/messages.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # This is a generated file. DO NOT EDIT. """ diff --git a/riak/pb/riak_dt_pb2.py b/riak/pb/riak_dt_pb2.py index 1b640499..ba9a590d 100644 --- a/riak/pb/riak_dt_pb2.py +++ b/riak/pb/riak_dt_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from six import * # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_dt.proto diff --git a/riak/pb/riak_kv_pb2.py b/riak/pb/riak_kv_pb2.py index 09797052..f1832df6 100644 --- a/riak/pb/riak_kv_pb2.py +++ b/riak/pb/riak_kv_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from six import * # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_kv.proto diff --git a/riak/pb/riak_pb2.py b/riak/pb/riak_pb2.py index 8f4ac076..72dba122 100644 --- a/riak/pb/riak_pb2.py +++ b/riak/pb/riak_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from six import * # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak.proto diff --git a/riak/pb/riak_search_pb2.py b/riak/pb/riak_search_pb2.py index 788b7cda..b20adbfc 100644 --- a/riak/pb/riak_search_pb2.py +++ b/riak/pb/riak_search_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from six import * # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_search.proto diff --git a/riak/pb/riak_ts_pb2.py b/riak/pb/riak_ts_pb2.py index 13a06a14..5033db67 100644 --- a/riak/pb/riak_ts_pb2.py +++ b/riak/pb/riak_ts_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from six import * # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_ts.proto diff --git a/riak/pb/riak_yokozuna_pb2.py b/riak/pb/riak_yokozuna_pb2.py index 1673f538..6cc20395 100644 --- a/riak/pb/riak_yokozuna_pb2.py +++ b/riak/pb/riak_yokozuna_pb2.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from six import * # Generated by the protocol buffer compiler. DO NOT EDIT! # source: riak_yokozuna.proto diff --git a/riak/resolver.py b/riak/resolver.py index d56ae5f5..260740d6 100644 --- a/riak/resolver.py +++ b/riak/resolver.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2013 Basho Technologies, Inc. diff --git a/riak/riak_error.py b/riak/riak_error.py index b99eb7fe..ec70d54f 100644 --- a/riak/riak_error.py +++ b/riak/riak_error.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/riak_object.py b/riak/riak_object.py index 7657e2a4..ab9650ca 100644 --- a/riak/riak_object.py +++ b/riak/riak_object.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from riak import ConflictError from riak.content import RiakContent import base64 diff --git a/riak/security.py b/riak/security.py index d6adee91..d4dfdd1a 100644 --- a/riak/security.py +++ b/riak/security.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2014 Basho Technologies, Inc. diff --git a/riak/table.py b/riak/table.py index d026bf18..d4006503 100644 --- a/riak/table.py +++ b/riak/table.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from six import string_types, PY2 diff --git a/riak/test_server.py b/riak/test_server.py index 545f7e5e..99d68742 100644 --- a/riak/test_server.py +++ b/riak/test_server.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from __future__ import print_function import os.path import threading diff --git a/riak/tests/__init__.py b/riak/tests/__init__.py index 65aef57a..be547be6 100644 --- a/riak/tests/__init__.py +++ b/riak/tests/__init__.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging import os import socket diff --git a/riak/tests/base.py b/riak/tests/base.py index b2891b54..aa81c0da 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import logging import random diff --git a/riak/tests/comparison.py b/riak/tests/comparison.py index 30cde091..aa1d21cc 100644 --- a/riak/tests/comparison.py +++ b/riak/tests/comparison.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- from six import PY2, PY3 import collections diff --git a/riak/tests/suite.py b/riak/tests/suite.py index e317213a..782be4a0 100644 --- a/riak/tests/suite.py +++ b/riak/tests/suite.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import os.path import unittest diff --git a/riak/tests/test_2i.py b/riak/tests/test_2i.py index 6db10602..01f02aee 100644 --- a/riak/tests/test_2i.py +++ b/riak/tests/test_2i.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import unittest diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index ea427c4b..67cd6568 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import unittest from riak import RiakError, RiakObject diff --git a/riak/tests/test_client.py b/riak/tests/test_client.py index fa506c29..001520d2 100644 --- a/riak/tests/test_client.py +++ b/riak/tests/test_client.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import unittest from six import PY2 diff --git a/riak/tests/test_comparison.py b/riak/tests/test_comparison.py index b73d4bf5..8aac4ef8 100644 --- a/riak/tests/test_comparison.py +++ b/riak/tests/test_comparison.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import unittest diff --git a/riak/tests/test_datatypes.py b/riak/tests/test_datatypes.py index 2642837c..17aa4bf2 100644 --- a/riak/tests/test_datatypes.py +++ b/riak/tests/test_datatypes.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import unittest import riak.datatypes as datatypes diff --git a/riak/tests/test_datetime.py b/riak/tests/test_datetime.py index a3640105..f3367179 100644 --- a/riak/tests/test_datetime.py +++ b/riak/tests/test_datetime.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import datetime import unittest diff --git a/riak/tests/test_feature_detection.py b/riak/tests/test_feature_detection.py index b0ac63a6..bf0c0c7b 100644 --- a/riak/tests/test_feature_detection.py +++ b/riak/tests/test_feature_detection.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import unittest diff --git a/riak/tests/test_filters.py b/riak/tests/test_filters.py index e41eea6c..f4a77db0 100644 --- a/riak/tests/test_filters.py +++ b/riak/tests/test_filters.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import unittest diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 7db30a08..f8ffb76a 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import copy import os diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index 4b531368..5f5c5ed0 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- from __future__ import print_function diff --git a/riak/tests/test_misc.py b/riak/tests/test_misc.py index 15605114..3660720e 100644 --- a/riak/tests/test_misc.py +++ b/riak/tests/test_misc.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import unittest diff --git a/riak/tests/test_pool.py b/riak/tests/test_pool.py index 3825c23c..346b2645 100644 --- a/riak/tests/test_pool.py +++ b/riak/tests/test_pool.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import unittest diff --git a/riak/tests/test_search.py b/riak/tests/test_search.py index 17e2ea6a..efc5aa65 100644 --- a/riak/tests/test_search.py +++ b/riak/tests/test_search.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- from __future__ import print_function diff --git a/riak/tests/test_security.py b/riak/tests/test_security.py index 8a3db8f7..d9e1ee10 100644 --- a/riak/tests/test_security.py +++ b/riak/tests/test_security.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import sys import unittest diff --git a/riak/tests/test_server_test.py b/riak/tests/test_server_test.py index 45dcbd55..2b5cfc48 100644 --- a/riak/tests/test_server_test.py +++ b/riak/tests/test_server_test.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import sys import unittest diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 8956dd98..862911c2 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import datetime import six diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 765eba67..985ae3c5 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import datetime import logging diff --git a/riak/tests/test_util.py b/riak/tests/test_util.py index f3415557..766c82fa 100644 --- a/riak/tests/test_util.py +++ b/riak/tests/test_util.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import datetime import unittest diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index 5d1adf53..08a891e6 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # -*- coding: utf-8 -*- import unittest diff --git a/riak/tests/yz_setup.py b/riak/tests/yz_setup.py index 668e2ba8..78c44755 100644 --- a/riak/tests/yz_setup.py +++ b/riak/tests/yz_setup.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import logging from riak import RiakError diff --git a/riak/transports/__init__.py b/riak/transports/__init__.py index e69de29b..056d414e 100644 --- a/riak/transports/__init__.py +++ b/riak/transports/__init__.py @@ -0,0 +1,13 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. diff --git a/riak/transports/feature_detect.py b/riak/transports/feature_detect.py index 87e30fbf..a133655e 100644 --- a/riak/transports/feature_detect.py +++ b/riak/transports/feature_detect.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from distutils.version import LooseVersion from riak.util import lazy_property diff --git a/riak/transports/http/__init__.py b/riak/transports/http/__init__.py index 69c7de8c..68797b7b 100644 --- a/riak/transports/http/__init__.py +++ b/riak/transports/http/__init__.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import socket import select diff --git a/riak/transports/http/connection.py b/riak/transports/http/connection.py index c6b3b9c5..d1c16281 100644 --- a/riak/transports/http/connection.py +++ b/riak/transports/http/connection.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import base64 from six import PY2 diff --git a/riak/transports/http/resources.py b/riak/transports/http/resources.py index 2017f420..2b53ae7c 100644 --- a/riak/transports/http/resources.py +++ b/riak/transports/http/resources.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import re from six import PY2 diff --git a/riak/transports/http/search.py b/riak/transports/http/search.py index 4e6c69e6..d43688f6 100644 --- a/riak/transports/http/search.py +++ b/riak/transports/http/search.py @@ -1,3 +1,18 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + class XMLSearchResult(object): # Match tags that are document fields fieldtags = ['str', 'int', 'date'] diff --git a/riak/transports/http/stream.py b/riak/transports/http/stream.py index b5ec00d7..590565f2 100644 --- a/riak/transports/http/stream.py +++ b/riak/transports/http/stream.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import re diff --git a/riak/transports/http/transport.py b/riak/transports/http/transport.py index c2d7c6dc..7cba2681 100644 --- a/riak/transports/http/transport.py +++ b/riak/transports/http/transport.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + try: import simplejson as json except ImportError: diff --git a/riak/transports/pool.py b/riak/transports/pool.py index 63b9aa2d..38a87b43 100644 --- a/riak/transports/pool.py +++ b/riak/transports/pool.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from __future__ import print_function import threading diff --git a/riak/transports/security.py b/riak/transports/security.py index a2ac46d5..d7828b72 100644 --- a/riak/transports/security.py +++ b/riak/transports/security.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + """ Copyright 2015 Basho Technologies, Inc. diff --git a/riak/transports/tcp/__init__.py b/riak/transports/tcp/__init__.py index 32b0dbee..d58add2e 100644 --- a/riak/transports/tcp/__init__.py +++ b/riak/transports/tcp/__init__.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import errno import socket diff --git a/riak/transports/tcp/connection.py b/riak/transports/tcp/connection.py index 949999d3..13c02cf4 100644 --- a/riak/transports/tcp/connection.py +++ b/riak/transports/tcp/connection.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import errno import logging import socket diff --git a/riak/transports/tcp/stream.py b/riak/transports/tcp/stream.py index c73778bf..95436825 100644 --- a/riak/transports/tcp/stream.py +++ b/riak/transports/tcp/stream.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import json import riak.pb.messages diff --git a/riak/transports/tcp/transport.py b/riak/transports/tcp/transport.py index 39c0b456..5d3a1599 100644 --- a/riak/transports/tcp/transport.py +++ b/riak/transports/tcp/transport.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import six import riak.pb.messages diff --git a/riak/transports/transport.py b/riak/transports/transport.py index ba413865..258d24e8 100644 --- a/riak/transports/transport.py +++ b/riak/transports/transport.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import base64 import random import threading diff --git a/riak/ts_object.py b/riak/ts_object.py index 24eccbe1..2c7fddf5 100644 --- a/riak/ts_object.py +++ b/riak/ts_object.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + import collections from riak import RiakError diff --git a/riak/tz.py b/riak/tz.py index b20054ee..fc44e32d 100644 --- a/riak/tz.py +++ b/riak/tz.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from datetime import tzinfo, timedelta ZERO = timedelta(0) diff --git a/riak/util.py b/riak/util.py index e3124612..9101275b 100644 --- a/riak/util.py +++ b/riak/util.py @@ -1,3 +1,17 @@ +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + from __future__ import print_function import datetime diff --git a/version.py b/version.py index 6c802c58..ca6a019c 100644 --- a/version.py +++ b/version.py @@ -1,4 +1,16 @@ -# This program is placed into the public domain. +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. """ Gets the current version number. From f78cc64221e6a1dae475cc211fc036bf0b010902 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 10 Jan 2017 13:28:02 -0800 Subject: [PATCH 311/324] fix py2 --- riak/tests/test_kv.py | 2 +- riak/tests/test_mapreduce.py | 2 +- riak/tests/test_timeseries_pbuf.py | 2 +- riak/tests/test_timeseries_ttb.py | 2 +- riak/tests/test_yokozuna.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index f8ffb76a..56f55844 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright 2010-present Basho Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# -*- coding: utf-8 -*- import copy import os import sys diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index 5f5c5ed0..f8b90e98 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright 2010-present Basho Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# -*- coding: utf-8 -*- from __future__ import print_function import unittest diff --git a/riak/tests/test_timeseries_pbuf.py b/riak/tests/test_timeseries_pbuf.py index 862911c2..8cffa1c7 100644 --- a/riak/tests/test_timeseries_pbuf.py +++ b/riak/tests/test_timeseries_pbuf.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright 2010-present Basho Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# -*- coding: utf-8 -*- import datetime import six import unittest diff --git a/riak/tests/test_timeseries_ttb.py b/riak/tests/test_timeseries_ttb.py index 985ae3c5..d2434799 100644 --- a/riak/tests/test_timeseries_ttb.py +++ b/riak/tests/test_timeseries_ttb.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright 2010-present Basho Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# -*- coding: utf-8 -*- import datetime import logging import six diff --git a/riak/tests/test_yokozuna.py b/riak/tests/test_yokozuna.py index 08a891e6..a1823774 100644 --- a/riak/tests/test_yokozuna.py +++ b/riak/tests/test_yokozuna.py @@ -1,3 +1,4 @@ +# -*- coding: utf-8 -*- # Copyright 2010-present Basho Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,7 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -# -*- coding: utf-8 -*- import unittest from riak.tests import RUN_YZ From c096c4e573e1dc084ada42fb2a5ab74e01a01ca4 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Tue, 10 Jan 2017 14:54:48 -0800 Subject: [PATCH 312/324] clean up headers --- riak/bucket.py | 22 +++------------------- riak/client/index_page.py | 18 ------------------ riak/content.py | 17 ----------------- riak/datatypes/__init__.py | 18 ------------------ riak/datatypes/datatype.py | 19 ------------------- riak/datatypes/flag.py | 18 ------------------ riak/datatypes/map.py | 18 ------------------ riak/datatypes/register.py | 18 ------------------ riak/datatypes/types.py | 18 ------------------ riak/mapreduce.py | 23 +++-------------------- riak/resolver.py | 18 ------------------ riak/riak_error.py | 18 ------------------ riak/security.py | 18 ------------------ riak/tests/pool-grinder.py | 30 +++++++++++++----------------- riak/transports/security.py | 18 ------------------ 15 files changed, 19 insertions(+), 272 deletions(-) diff --git a/riak/bucket.py b/riak/bucket.py index 3379fa8e..7dde7351 100644 --- a/riak/bucket.py +++ b/riak/bucket.py @@ -1,3 +1,6 @@ +# Copyright 2010 Rusty Klophaus +# Copyright 2010 Justin Sheehy +# Copyright 2009 Jay Baird # Copyright 2010-present Basho Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,25 +15,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" from six import string_types, PY2 import mimetypes from riak.util import lazy_property diff --git a/riak/client/index_page.py b/riak/client/index_page.py index 9014d6c4..8e094a66 100644 --- a/riak/client/index_page.py +++ b/riak/client/index_page.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2013 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from collections import namedtuple, Sequence diff --git a/riak/content.py b/riak/content.py index 87b35594..6eb9e7df 100644 --- a/riak/content.py +++ b/riak/content.py @@ -12,23 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2013 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" from riak import RiakError from six import string_types diff --git a/riak/datatypes/__init__.py b/riak/datatypes/__init__.py index 87062a74..87575a33 100644 --- a/riak/datatypes/__init__.py +++ b/riak/datatypes/__init__.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from .types import TYPES from .datatype import Datatype from .counter import Counter diff --git a/riak/datatypes/datatype.py b/riak/datatypes/datatype.py index bad20d2c..2303dba7 100644 --- a/riak/datatypes/datatype.py +++ b/riak/datatypes/datatype.py @@ -12,25 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - - from .errors import ContextRequired from . import TYPES diff --git a/riak/datatypes/flag.py b/riak/datatypes/flag.py index 3ce78c81..bfb869a2 100644 --- a/riak/datatypes/flag.py +++ b/riak/datatypes/flag.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from riak.datatypes.datatype import Datatype from riak.datatypes import TYPES diff --git a/riak/datatypes/map.py b/riak/datatypes/map.py index 31d6aa22..b5b790bf 100644 --- a/riak/datatypes/map.py +++ b/riak/datatypes/map.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from collections import Mapping from riak.util import lazy_property from .datatype import Datatype diff --git a/riak/datatypes/register.py b/riak/datatypes/register.py index c8402743..247a2a52 100644 --- a/riak/datatypes/register.py +++ b/riak/datatypes/register.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from collections import Sized from riak.datatypes.datatype import Datatype from six import string_types diff --git a/riak/datatypes/types.py b/riak/datatypes/types.py index 79b969fb..b9761294 100644 --- a/riak/datatypes/types.py +++ b/riak/datatypes/types.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - #: A dict from :attr:`type names ` to the #: class that implements them. This is used inside :class:`Map` to #: initialize new values. diff --git a/riak/mapreduce.py b/riak/mapreduce.py index 5e4e6cfc..7d2f690e 100644 --- a/riak/mapreduce.py +++ b/riak/mapreduce.py @@ -1,3 +1,6 @@ +# Copyright 2010 Rusty Klophaus +# Copyright 2010 Justin Sheehy +# Copyright 2009 Jay Baird # Copyright 2010-present Basho Technologies, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -12,26 +15,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2010 Rusty Klophaus -Copyright 2010 Justin Sheehy -Copyright 2009 Jay Baird - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - from __future__ import print_function from collections import Iterable, namedtuple from riak import RiakError diff --git a/riak/resolver.py b/riak/resolver.py index 260740d6..6e245ff3 100644 --- a/riak/resolver.py +++ b/riak/resolver.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2013 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - def default_resolver(riak_object): """ diff --git a/riak/riak_error.py b/riak/riak_error.py index ec70d54f..97d0878c 100644 --- a/riak/riak_error.py +++ b/riak/riak_error.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - class RiakError(Exception): """ diff --git a/riak/security.py b/riak/security.py index d4dfdd1a..d048f008 100644 --- a/riak/security.py +++ b/riak/security.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2014 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import ssl import warnings from riak import RiakError diff --git a/riak/tests/pool-grinder.py b/riak/tests/pool-grinder.py index 4e39ead3..19cb71d7 100755 --- a/riak/tests/pool-grinder.py +++ b/riak/tests/pool-grinder.py @@ -1,21 +1,17 @@ #!/usr/bin/env python -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" +# Copyright 2010-present Basho Technologies, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. from __future__ import print_function from six import PY2 diff --git a/riak/transports/security.py b/riak/transports/security.py index d7828b72..01cf6315 100644 --- a/riak/transports/security.py +++ b/riak/transports/security.py @@ -12,24 +12,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -""" -Copyright 2015 Basho Technologies, Inc. - -This file is provided to you under the Apache License, -Version 2.0 (the "License"); you may not use this file -except in compliance with the License. You may obtain -a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, -software distributed under the License is distributed on an -"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -KIND, either express or implied. See the License for the -specific language governing permissions and limitations -under the License. -""" - import socket from riak.security import SecurityError, USE_STDLIB_SSL if USE_STDLIB_SSL: From 0f360924fc9260571d10c42f02d21f630973bbda Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 19 Jan 2017 08:41:39 -0800 Subject: [PATCH 313/324] Add Python 3.6 to the mix --- .travis.yml | 1 + build/pyenv-setup | 4 ++-- tools | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2013cbe3..90f86d41 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,7 @@ python: - '3.3' - '3.4' - '3.5' + - '3.6' - nightly addons: hosts: diff --git a/build/pyenv-setup b/build/pyenv-setup index 84657872..7759d45a 100755 --- a/build/pyenv-setup +++ b/build/pyenv-setup @@ -51,7 +51,7 @@ fi do_pip_upgrades='false' # NB: 2.7.8 is special-cased -for pyver in 2.7 3.3 3.4 3.5 +for pyver in 2.7 3.3 3.4 3.5 3.6 do riak_py_alias="riak_$pyver" if ! pyenv versions | fgrep -v 'riak_2.7.8' | fgrep -q "$riak_py_alias" @@ -78,7 +78,7 @@ then fi pushd $PROJDIR -pyenv local 'riak_3.5' 'riak_3.4' 'riak_3.3' 'riak_2.7' 'riak_2.7.8' +pyenv local 'riak_3.6' 'riak_3.5' 'riak_3.4' 'riak_3.3' 'riak_2.7' 'riak_2.7.8' pyenv rehash diff --git a/tools b/tools index 1fa90702..1f54803c 160000 --- a/tools +++ b/tools @@ -1 +1 @@ -Subproject commit 1fa9070257c3443f04337e7e37ef083ed7ca2fe4 +Subproject commit 1f54803ca7912a41a0ec47c0028c259b97475e1f From 488c22b9492cda4870999acedd2c28d9341e5c3b Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Fri, 27 Jan 2017 08:25:58 -0800 Subject: [PATCH 314/324] Add global disable_list_exceptions variable to disable exceptions thrown during expensive operations. Raise ListError if mapreduce over a bucket is attempted --- Makefile | 2 ++ riak/__init__.py | 12 +++++++++--- riak/client/operations.py | 23 +++++++++++++++++++++-- riak/mapreduce.py | 17 ++++++++++------- riak/riak_error.py | 11 +++++++++++ riak/tests/base.py | 3 +++ riak/tests/test_kv.py | 34 ++++++++++++++++++++++++++++++++-- riak/tests/test_mapreduce.py | 9 ++++++++- riak/tests/yz_setup.py | 4 ++++ 9 files changed, 100 insertions(+), 15 deletions(-) diff --git a/Makefile b/Makefile index 316389e4..166e4007 100644 --- a/Makefile +++ b/Makefile @@ -22,6 +22,8 @@ DOCSDIR := $(PROJDIR)/docs PYPI_REPOSITORY ?= pypi +all: lint test + .PHONY: lint lint: $(PROJDIR)/.runner lint diff --git a/riak/__init__.py b/riak/__init__.py index de68354a..306cf7a0 100644 --- a/riak/__init__.py +++ b/riak/__init__.py @@ -19,7 +19,7 @@ operations, and run Linkwalking operations. """ -from riak.riak_error import RiakError, ConflictError +from riak.riak_error import RiakError, ConflictError, ListError from riak.client import RiakClient from riak.bucket import RiakBucket, BucketType from riak.table import Table @@ -30,11 +30,17 @@ __all__ = ['RiakBucket', 'Table', 'BucketType', 'RiakNode', 'RiakObject', 'RiakClient', 'RiakMapReduce', 'RiakKeyFilter', - 'RiakLink', 'RiakError', 'ConflictError', - 'ONE', 'ALL', 'QUORUM', 'key_filter'] + 'RiakLink', 'RiakError', 'ConflictError', 'ListError', + 'ONE', 'ALL', 'QUORUM', 'key_filter', + 'disable_list_exceptions'] ONE = "one" ALL = "all" QUORUM = "quorum" key_filter = RiakKeyFilter() + +""" +Set to true to allow listing operations +""" +disable_list_exceptions = False diff --git a/riak/client/operations.py b/riak/client/operations.py index 1acf06e1..0d507f12 100644 --- a/riak/client/operations.py +++ b/riak/client/operations.py @@ -13,11 +13,11 @@ # limitations under the License. import six - import riak.client.multi +from riak import ListError from riak.client.transport import RiakClientTransport, \ - retryable, retryableHttpOnly + retryable, retryableHttpOnly from riak.client.index_page import IndexPage from riak.datatypes import TYPES from riak.table import Table @@ -55,7 +55,11 @@ def get_buckets(self, transport, bucket_type=None, timeout=None): :rtype: list of :class:`RiakBucket ` instances """ + if not riak.disable_list_exceptions: + raise ListError() + _validate_timeout(timeout) + if bucket_type: bucketfn = self._bucket_type_bucket_builder else: @@ -100,6 +104,9 @@ def stream_buckets(self, bucket_type=None, timeout=None): ` instances """ + if not riak.disable_list_exceptions: + raise ListError() + _validate_timeout(timeout) if bucket_type: @@ -467,7 +474,11 @@ def get_keys(self, transport, bucket, timeout=None): :type timeout: int :rtype: list """ + if not riak.disable_list_exceptions: + raise ListError() + _validate_timeout(timeout) + return transport.get_keys(bucket, timeout=timeout) def stream_keys(self, bucket, timeout=None): @@ -503,6 +514,9 @@ def stream_keys(self, bucket, timeout=None): :type timeout: int :rtype: iterator """ + if not riak.disable_list_exceptions: + raise ListError() + _validate_timeout(timeout) def make_op(transport): @@ -678,10 +692,15 @@ def ts_stream_keys(self, table, timeout=None): :type timeout: int :rtype: iterator """ + if not riak.disable_list_exceptions: + raise ListError() + t = table if isinstance(t, six.string_types): t = Table(self, table) + _validate_timeout(timeout) + resource = self._acquire() transport = resource.object stream = transport.ts_stream_keys(t, timeout) diff --git a/riak/mapreduce.py b/riak/mapreduce.py index 7d2f690e..1b604663 100644 --- a/riak/mapreduce.py +++ b/riak/mapreduce.py @@ -17,9 +17,9 @@ from __future__ import print_function from collections import Iterable, namedtuple -from riak import RiakError from six import string_types, PY2 -from riak.bucket import RiakBucket + +import riak #: Links are just bucket/key/tag tuples, this class provides a @@ -128,8 +128,10 @@ def add_bucket(self, bucket, bucket_type=None): :type bucket_type: string, None :rtype: :class:`RiakMapReduce` """ + if not riak.disable_list_exceptions: + raise riak.ListError() self._input_mode = 'bucket' - if isinstance(bucket, RiakBucket): + if isinstance(bucket, riak.RiakBucket): if bucket.bucket_type.is_default(): self._inputs = {'bucket': bucket.name} else: @@ -308,14 +310,15 @@ def run(self, timeout=None): try: result = self._client.mapred(self._inputs, query, timeout) - except RiakError as e: + except riak.RiakError as e: if 'worker_startup_failed' in e.value: for phase in self._phases: if phase._language == 'erlang': if type(phase._function) is str: - raise RiakError('May have tried erlang strfun ' - 'when not allowed\n' - 'original error: ' + e.value) + raise riak.RiakError( + 'May have tried erlang strfun ' + 'when not allowed\n' + 'original error: ' + e.value) raise e # If the last phase is NOT a link phase, then return the result. diff --git a/riak/riak_error.py b/riak/riak_error.py index 97d0878c..4fe0ce05 100644 --- a/riak/riak_error.py +++ b/riak/riak_error.py @@ -36,3 +36,14 @@ class ConflictError(RiakError): """ def __init__(self, message='Object in conflict'): super(ConflictError, self).__init__(message) + + +class ListError(RiakError): + """ + Raised when a list operation is attempted and + riak.disable_list_exceptions is false. + """ + def __init__(self, message='Bucket and key list operations ' + 'are expensive and should not be ' + 'used in production.'): + super(ListError, self).__init__(message) diff --git a/riak/tests/base.py b/riak/tests/base.py index aa81c0da..9aaf4e69 100644 --- a/riak/tests/base.py +++ b/riak/tests/base.py @@ -15,6 +15,7 @@ # -*- coding: utf-8 -*- import logging import random +import riak from riak.client import RiakClient from riak.tests import HOST, PROTOCOL, PB_PORT, HTTP_PORT, SECURITY_CREDS @@ -70,9 +71,11 @@ def create_client(cls, host=None, http_port=None, pb_port=None, **kwargs) def setUp(self): + riak.disable_list_exceptions = True self.bucket_name = self.randname() self.key_name = self.randname() self.client = self.create_client() def tearDown(self): + riak.disable_list_exceptions = False self.client.close() diff --git a/riak/tests/test_kv.py b/riak/tests/test_kv.py index 56f55844..63206c95 100644 --- a/riak/tests/test_kv.py +++ b/riak/tests/test_kv.py @@ -20,7 +20,8 @@ from six import string_types, PY2, PY3 from time import sleep -from riak import ConflictError, RiakBucket, RiakError +from riak import ConflictError, RiakError, ListError +from riak import RiakClient, RiakBucket, BucketType from riak.resolver import default_resolver, last_written_resolver from riak.tests import RUN_KV, RUN_RESOLVE, PROTOCOL from riak.tests.base import IntegrationTestBase @@ -63,7 +64,6 @@ def tearDownModule(): class NotJsonSerializable(object): - def __init__(self, *args, **kwargs): self.args = list(args) self.kwargs = kwargs @@ -86,6 +86,36 @@ def __eq__(self, other): return True +class KVUnitTests(unittest.TestCase): + def test_list_keys_exception(self): + c = RiakClient() + bt = BucketType(c, 'test') + b = RiakBucket(c, 'test', bt) + with self.assertRaises(ListError): + b.get_keys() + + def test_stream_buckets_exception(self): + c = RiakClient() + with self.assertRaises(ListError): + bs = [] + for bl in c.stream_buckets(): + bs.extend(bl) + + def test_stream_keys_exception(self): + c = RiakClient() + with self.assertRaises(ListError): + ks = [] + for kl in c.stream_keys('test'): + ks.extend(kl) + + def test_ts_stream_keys_exception(self): + c = RiakClient() + with self.assertRaises(ListError): + ks = [] + for kl in c.ts_stream_keys('test'): + ks.extend(kl) + + @unittest.skipUnless(RUN_KV, 'RUN_KV is 0') class BasicKVTests(IntegrationTestBase, unittest.TestCase, Comparison): def test_no_returnbody(self): diff --git a/riak/tests/test_mapreduce.py b/riak/tests/test_mapreduce.py index f8b90e98..bfdfc7dd 100644 --- a/riak/tests/test_mapreduce.py +++ b/riak/tests/test_mapreduce.py @@ -19,7 +19,7 @@ from six import PY2 from riak.mapreduce import RiakMapReduce -from riak import key_filter, RiakError +from riak import key_filter, RiakClient, RiakError, ListError from riak.tests import RUN_MAPREDUCE, RUN_SECURITY, RUN_YZ from riak.tests.base import IntegrationTestBase from riak.tests.test_yokozuna import wait_for_yz_index @@ -39,6 +39,13 @@ def tearDownModule(): yzTearDown(testrun_yz_mr) +class MapReduceUnitTests(unittest.TestCase): + def test_mapred_bucket_exception(self): + c = RiakClient() + with self.assertRaises(ListError): + c.add('bucket') + + @unittest.skipUnless(RUN_MAPREDUCE, 'RUN_MAPREDUCE is 0') class LinkTests(IntegrationTestBase, unittest.TestCase): def test_store_and_get_links(self): diff --git a/riak/tests/yz_setup.py b/riak/tests/yz_setup.py index 78c44755..88a7daee 100644 --- a/riak/tests/yz_setup.py +++ b/riak/tests/yz_setup.py @@ -13,6 +13,7 @@ # limitations under the License. import logging +import riak from riak import RiakError from riak.tests import RUN_YZ @@ -21,6 +22,7 @@ def yzSetUp(*yzdata): if RUN_YZ: + riak.disable_list_exceptions = True c = IntegrationTestBase.create_client() for yz in yzdata: logging.debug("yzSetUp: %s", yz) @@ -43,6 +45,7 @@ def yzSetUp(*yzdata): def yzTearDown(c, *yzdata): if RUN_YZ: + riak.disable_list_exceptions = True c = IntegrationTestBase.create_client() for yz in yzdata: logging.debug("yzTearDown: %s", yz) @@ -57,3 +60,4 @@ def yzTearDown(c, *yzdata): for key in keys: b.delete(key) c.close() + riak.disable_list_exceptions = False From 4fee03a6c0fd51a368c0364adcc5408dafe9c003 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Thu, 9 Feb 2017 08:43:56 -0800 Subject: [PATCH 315/324] Add note about exceptions being raised for list operations --- RELNOTES.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/RELNOTES.md b/RELNOTES.md index bd567fde..0cc58993 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,5 +1,9 @@ # Riak Python Client Release Notes +## [`2.8.0` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.8.0) + +* [Running expensive operations *now raise exceptions*](https://github.com/basho/riak-python-client/pull/518). You can disable these exceptions for development purposes but should not do so in production. + ## [`2.7.0` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.7.0) * Riak TS 1.5 support * Support for `head` parameter From b27163d4526fd6c8894e225e61f8d9de27223034 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 27 Feb 2017 12:54:35 -0800 Subject: [PATCH 316/324] Add a workaround for Python bug 19542 --- riak/client/__init__.py | 24 ++++++++++++++++-------- 1 file changed, 16 insertions(+), 8 deletions(-) diff --git a/riak/client/__init__.py b/riak/client/__init__.py index ac33f8f2..7015b48f 100644 --- a/riak/client/__init__.py +++ b/riak/client/__init__.py @@ -275,8 +275,9 @@ def bucket(self, name, bucket_type='default'): raise TypeError('bucket_type must be a string ' 'or riak.bucket.BucketType') - return self._buckets.setdefault((bucket_type, name), - RiakBucket(self, name, bucket_type)) + b = RiakBucket(self, name, bucket_type) + return self._setdefault_handle_none( + self._buckets, (bucket_type, name), b) def bucket_type(self, name): """ @@ -291,12 +292,9 @@ def bucket_type(self, name): if not isinstance(name, string_types): raise TypeError('BucketType name must be a string') - if name in self._bucket_types: - return self._bucket_types[name] - else: - btype = BucketType(self, name) - self._bucket_types[name] = btype - return btype + btype = BucketType(self, name) + return self._setdefault_handle_none( + self._bucket_types, name, btype) def table(self, name): """ @@ -390,6 +388,16 @@ def _error_rate(node): else: return random.choice(good) + def _setdefault_handle_none(self, wvdict, key, value): + # TODO FIXME FUTURE + # This is a workaround for Python issue 19542 + # http://bugs.python.org/issue19542 + rv = wvdict.setdefault(key, value) + if rv is None: + return value + else: + return rv + @lazy_property def _multiget_pool(self): if self._multiget_pool_size: From b0ee21aec32a584f07d2c377d904352e884b5cfe Mon Sep 17 00:00:00 2001 From: Ali Riza Keles Date: Tue, 21 Mar 2017 16:52:57 +0300 Subject: [PATCH 317/324] fix missing str to byte operation of fl params in encode_search_query method of codecs.pbuf module --- riak/codecs/pbuf.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/riak/codecs/pbuf.py b/riak/codecs/pbuf.py index fe34ee15..0b4de2a6 100644 --- a/riak/codecs/pbuf.py +++ b/riak/codecs/pbuf.py @@ -556,9 +556,9 @@ def encode_search_query(self, req, **kwargs): req.op = kwargs['q.op'] if 'fl' in kwargs: if isinstance(kwargs['fl'], list): - req.fl.extend(kwargs['fl']) + req.fl.extend([str_to_bytes(fl) for fl in kwargs['fl']]) else: - req.fl.append(kwargs['fl']) + req.fl.append(str_to_bytes(kwargs['fl'])) if 'presort' in kwargs: req.presort = kwargs['presort'] From a190e871eabf2f42df40c62e888282848b9b8505 Mon Sep 17 00:00:00 2001 From: Luke Bakken Date: Mon, 27 Mar 2017 16:29:29 -0700 Subject: [PATCH 318/324] 2.8.0 release is actually 3.0.0 --- RELNOTES.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/RELNOTES.md b/RELNOTES.md index 0cc58993..6722c3ec 100644 --- a/RELNOTES.md +++ b/RELNOTES.md @@ -1,6 +1,6 @@ # Riak Python Client Release Notes -## [`2.8.0` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-2.8.0) +## [`3.0.0` Release](https://github.com/basho/riak-python-client/issues?q=milestone%3Ariak-python-client-3.0.0) * [Running expensive operations *now raise exceptions*](https://github.com/basho/riak-python-client/pull/518). You can disable these exceptions for development purposes but should not do so in production. From 2c01ebb72c76b8f27ec4d2965f59a7a7165f2bf2 Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Sat, 28 Oct 2017 11:53:33 +1100 Subject: [PATCH 319/324] Expose facet_counts in riak http results --- riak/codecs/http.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/riak/codecs/http.py b/riak/codecs/http.py index bfc91f2c..5da99961 100644 --- a/riak/codecs/http.py +++ b/riak/codecs/http.py @@ -229,6 +229,8 @@ def _normalize_json_search_response(self, json): if u'response' in json: result['num_found'] = json[u'response'][u'numFound'] result['max_score'] = float(json[u'response'][u'maxScore']) + if 'facet_counts' in json: + result['facet_counts'] = json[u'facet_counts'] docs = [] for doc in json[u'response'][u'docs']: resdoc = {} From 648f9056d47332df890232e85d497623608e2964 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?R=C3=B4mulo=20A=2E=20Ceccon?= Date: Mon, 19 Mar 2018 15:05:41 +0100 Subject: [PATCH 320/324] Do not use deprecated argument `verbose` in namedtuple Argument `verbose` to nametuple constructor was deprecated in Python 3.3, and was removed in Python 3.7. --- riak/codecs/__init__.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/riak/codecs/__init__.py b/riak/codecs/__init__.py index 00324f14..b824fcc0 100644 --- a/riak/codecs/__init__.py +++ b/riak/codecs/__init__.py @@ -21,8 +21,7 @@ from riak.util import bytes_to_str Msg = collections.namedtuple('Msg', - ['msg_code', 'data', 'resp_code'], - verbose=False) + ['msg_code', 'data', 'resp_code']) class Codec(object): From 95e61a03ce6e51289aa184a618f7462ff7fd602f Mon Sep 17 00:00:00 2001 From: bryanhuntesl <31992054+bryanhuntesl@users.noreply.github.com> Date: Mon, 19 Mar 2018 18:25:02 +0000 Subject: [PATCH 321/324] disable spurious flake8 python style warnings --- .travis.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.sh b/.travis.sh index 497de259..40518170 100755 --- a/.travis.sh +++ b/.travis.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -o errexit -flake8 --exclude=riak/pb riak *.py +flake8 --ignore E722,E741 --exclude=riak/pb riak *.py sudo riak-admin security disable From c52b745a96115426205d7c8a6245902cf2441b3b Mon Sep 17 00:00:00 2001 From: bryanhuntesl <31992054+bryanhuntesl@users.noreply.github.com> Date: Mon, 19 Mar 2018 18:43:41 +0000 Subject: [PATCH 322/324] and again... add more exclusions.. --- .travis.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.sh b/.travis.sh index 40518170..739c66cd 100755 --- a/.travis.sh +++ b/.travis.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash set -o errexit -flake8 --ignore E722,E741 --exclude=riak/pb riak *.py +flake8 --ignore E123,E126,E226,E722,E741 --exclude=riak/pb riak *.py sudo riak-admin security disable From b3a7d2d2c2621efc3cbf7a339a2341ff4da882bd Mon Sep 17 00:00:00 2001 From: bryanhuntesl <31992054+bryanhuntesl@users.noreply.github.com> Date: Mon, 19 Mar 2018 18:54:49 +0000 Subject: [PATCH 323/324] Reduce test time - test only 2.7, 3.6, and nightly test only 2.7 (stable), 3.6 (3 series stable) and nightly - the matrix was too big - 3.2, 3.3, 3.4, and 3.5 are the least used. --- .travis.yml | 3 --- 1 file changed, 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 90f86d41..7c46a5cd 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,9 +3,6 @@ dist: trusty language: python python: - '2.7' - - '3.3' - - '3.4' - - '3.5' - '3.6' - nightly addons: From 3a2bf990db0792a3ba00719db5fa1651c89ce758 Mon Sep 17 00:00:00 2001 From: Steven Joseph Date: Tue, 20 Mar 2018 10:11:43 +1100 Subject: [PATCH 324/324] Expose more result types from solr results --- riak/codecs/http.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/riak/codecs/http.py b/riak/codecs/http.py index 5da99961..b981b77a 100644 --- a/riak/codecs/http.py +++ b/riak/codecs/http.py @@ -226,11 +226,15 @@ def _normalize_json_search_response(self, json): same return value """ result = {} + if 'facet_counts' in json: + result['facet_counts'] = json[u'facet_counts'] + if 'grouped' in json: + result['grouped'] = json[u'grouped'] + if 'stats' in json: + result['stats'] = json[u'stats'] if u'response' in json: result['num_found'] = json[u'response'][u'numFound'] result['max_score'] = float(json[u'response'][u'maxScore']) - if 'facet_counts' in json: - result['facet_counts'] = json[u'facet_counts'] docs = [] for doc in json[u'response'][u'docs']: resdoc = {}