From 31b67792fa4b949f4cb96b6d610ab527adb4e218 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 27 Oct 2015 11:05:59 -0700 Subject: [PATCH 001/829] Return with proper exception set on pipeline violation This was missed in some other tests, but I just got a crash for this. Change-Id: Ib1cad45822ec2c3bc792a189d5a782447d07c2af Reviewed-on: http://review.couchbase.org/56458 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- src/views.c | 1 + 1 file changed, 1 insertion(+) diff --git a/src/views.c b/src/views.c index 85eff9bc2..83127ec6c 100644 --- a/src/views.c +++ b/src/views.c @@ -248,6 +248,7 @@ pycbc_Bucket__view_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) PYCBC_EXC_WRAP(PYCBC_EXC_PIPELINE, 0, "HTTP/View Requests cannot be executed in " "pipeline context"); + goto GT_DONE; } mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); From d8880760099eb0e324ebb5b194d842c4d32b3394 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 27 Oct 2015 11:10:04 -0700 Subject: [PATCH 002/829] Fix PYCBC-295 test for Py3 Fake transcoder was returning a str value when it should be returning a bytes value. This is not an issue in Py2 where str==byte. Change-Id: I97adb4beac2676a12196f7e978c990c39e6bfa90 Reviewed-on: http://review.couchbase.org/56459 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- couchbase/tests/cases/transcoder_t.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/couchbase/tests/cases/transcoder_t.py b/couchbase/tests/cases/transcoder_t.py index 9a9d3b8c6..45e284046 100644 --- a/couchbase/tests/cases/transcoder_t.py +++ b/couchbase/tests/cases/transcoder_t.py @@ -215,11 +215,11 @@ def test_pycbc295(self): c = self.make_connection() c.transcoder = custom_tc custom_tc._op_next['encode_value'] = ( - json.dumps({'Hello': 'World'}), + json.dumps({'Hello': 'World'}).encode('utf8'), 0x02000001 ) key = self.gen_key('pycbc295') c.upsert(key, 'whatevs') c.transcoder = orig_tc rv = c.get(key) - self.assertIsInstance(rv.value, (dict,)) \ No newline at end of file + self.assertIsInstance(rv.value, (dict,)) From 4f4216b56bef952fda37fb2ef35765bb53cba54a Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 26 Oct 2015 12:06:15 -0700 Subject: [PATCH 003/829] PYCBC-300: Result.__repr__ should indicate properties in proper case This is per normal Python convention of showing the fields as-is within the output of __repr__. Change-Id: Ia7d4ad0763fa36f7f66be9d43833ec68b522b5c9 Reviewed-on: http://review.couchbase.org/56412 Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/_bootstrap.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/couchbase/_bootstrap.py b/couchbase/_bootstrap.py index 72351dfcb..8bb5e823f 100644 --- a/couchbase/_bootstrap.py +++ b/couchbase/_bootstrap.py @@ -44,29 +44,29 @@ def _result__repr__(self): details = [] flags = self.__class__._fldprops - rcstr = "RC=0x{0:X}".format(self.rc) + rcstr = "rc=0x{0:X}".format(self.rc) if self.rc != 0: rcstr += "[{0}]".format(self.errstr) details.append(rcstr) if flags & C.PYCBC_RESFLD_KEY and hasattr(self, 'key'): - details.append("Key={0}".format(repr(self.key))) + details.append("key={0}".format(repr(self.key))) if flags & C.PYCBC_RESFLD_VALUE and hasattr(self, 'value'): - details.append("Value={0}".format(repr(self.value))) + details.append("value={0}".format(repr(self.value))) if flags & C.PYCBC_RESFLD_CAS and hasattr(self, 'cas'): - details.append("CAS=0x{cas:x}".format(cas=self.cas)) + details.append("cas=0x{cas:x}".format(cas=self.cas)) if flags & C.PYCBC_RESFLD_CAS and hasattr(self, 'flags'): - details.append("Flags=0x{flags:x}".format(flags=self.flags)) + details.append("flags=0x{flags:x}".format(flags=self.flags)) if flags & C.PYCBC_RESFLD_HTCODE and hasattr(self, "http_status"): - details.append("HTTP={0}".format(self.http_status)) + details.append("http_status={0}".format(self.http_status)) if flags & C.PYCBC_RESFLD_URL and hasattr(self, "url"): - details.append("URL={0}".format(self.url)) + details.append("url={0}".format(self.url)) ret = "{0}<{1}>".format(self.__class__.__name__, ', '.join(details)) return ret From 3deaca5e778f511f244dd9a0cb1e4cc92a0aba80 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 23 Oct 2015 11:47:33 -0700 Subject: [PATCH 004/829] PYCBC-305: Add is_ssl property to determine SSL connection state Change-Id: I2a331796699fe711bceb765b8a87223270ca09e4 Reviewed-on: http://review.couchbase.org/56394 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- couchbase/bucket.py | 5 +++++ src/constants.c | 2 ++ 2 files changed, 7 insertions(+) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 3bb5f6257..3beb9ea24 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1300,6 +1300,11 @@ def views_timeout(self): def views_timeout(self, value): self._set_timeout_common(_LCB.LCB_CNTL_VIEW_TIMEOUT, value) + @property + def is_ssl(self): + mode = self._cntl(op=_LCB.LCB_CNTL_SSL_MODE, value_type='int') + return mode & _LCB.LCB_SSL_ENABLED != 0 + _OLDOPS = { 'set': 'upsert', 'add': 'insert', 'delete': 'remove'} for o, n in _OLDOPS.items(): for variant in ('', '_multi'): diff --git a/src/constants.c b/src/constants.c index 5256790da..062e18c23 100644 --- a/src/constants.c +++ b/src/constants.c @@ -159,6 +159,8 @@ do_all_constants(PyObject *module, void (*handler)(PyObject*, const char*, long) /* For CNTL constants */ ADD_MACRO(LCB_CNTL_OP_TIMEOUT); ADD_MACRO(LCB_CNTL_VIEW_TIMEOUT); + ADD_MACRO(LCB_CNTL_SSL_MODE); + ADD_MACRO(LCB_SSL_ENABLED); /* View options */ ADD_MACRO(LCB_CMDVIEWQUERY_F_INCLUDE_DOCS); From ebf62058513bdaa4dae3dfde041492da2d0da6db Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 26 Oct 2015 12:02:39 -0700 Subject: [PATCH 005/829] Additional view/n1ql documentation fixes This makes the n1ql methods first class citizens in terms of documentations. More examples have been added. The 'itercls' kwarg has been undocumented and made non-discreet, as this is an internal parameter as far as the API is concerned at the sync bucket level. The consistent_with* properties on the N1QLQuery class have been undocumented as they do not work in Couchbase 4.0 (they might work in a later release, so they have not been removed). Change-Id: I999521cbd88909dd9622e32f35d42582f5331d7f Reviewed-on: http://review.couchbase.org/56411 Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 32 ++++++++++++++++++++++++++------ couchbase/n1ql.py | 7 ++++++- docs/source/api/couchbase.rst | 9 +++++++++ docs/source/api/n1ql.rst | 2 -- docs/source/api/txcouchbase.rst | 2 ++ txcouchbase/bucket.py | 24 ++++++++++++++++++++++++ 6 files changed, 67 insertions(+), 9 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 3beb9ea24..f93ff2ec8 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1164,7 +1164,7 @@ def bucket_manager(self): """ return BucketManager(self) - def query(self, design, view, use_devmode=False, itercls=View, **kwargs): + def query(self, design, view, use_devmode=False, **kwargs): """ Query a pre-defined MapReduce view, passing parameters. @@ -1181,8 +1181,6 @@ def query(self, design, view, use_devmode=False, itercls=View, **kwargs): explanation. :param kwargs: Extra arguments passed to the :class:`~.View` object constructor. - :param itercls: Subclass of 'view' to use. This parameter is - mainly used by async modules :param kwargs: Additional parameters passed to the :class:`~.View` constructor. See that class' documentation for accepted parameters. @@ -1207,22 +1205,44 @@ def query(self, design, view, use_devmode=False, itercls=View, **kwargs): """ design = self._mk_devmode(design, use_devmode) + itercls = kwargs.pop('itercls', View) return itercls(self, design, view, **kwargs) - def n1ql_query(self, query, itercls=N1QLRequest, **kwargs): + def n1ql_query(self, query, *args, **kwargs): """ Execute a N1QL query. + This method is mainly a wrapper around the :class:`~.N1QLQuery` + and :class:`~.N1QLRequest` objects, which contain the inputs + and outputs of the query. + + Using an explicit :class:`~.N1QLQuery`:: + + query = N1QLQuery( + 'SELECT airportname FROM `travel-sample` WHERE city=$1', "Reno") + # Use this option for often-repeated queries + query.adhoc = False + for row in cb.n1ql_query(query): + print 'Name: {0}'.format(row['airportname']) + + Using an implicit :class:`~.N1QLQuery`:: + + for row in cb.n1ql_query( + 'SELECT airportname, FROM `travel-sample` WHERE city=$1', "Reno"): + print 'Name: {0}'.format(row['airportname']) + :param query: The query to execute. This may either be a :class:`.N1QLQuery` object, or a string (which will be implicitly converted to one). :param kwargs: Arguments for :class:`.N1QLRequest`. - :return: An iterator which yields rows. The returned + :return: An iterator which yields rows. Each row is a dictionary + representing a single result """ if not isinstance(query, N1QLQuery): query = N1QLQuery(query) - return itercls(query, self, **kwargs) + itercls = kwargs.pop('itercls', N1QLRequest) + return itercls(query, self, *args, **kwargs) def __repr__(self): return ('<{modname}.{cls} bucket={bucket}, nodes={nodes} at 0x{oid:x}>' diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 68cc1f61e..103cfc326 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -59,11 +59,16 @@ def __init__(self, query, *args, **kwargs): 'WHERE type=$1 AND id=$2', 'airline', 0) + for row in cb.n1ql_query(q): + print 'Got', row + Use named parameters:: q = N1QLQuery('SELECT * FROM `travel-sample` ' 'WHERE type=$type AND id=$id', type='airline', id=0) + for row in cb.n1ql_query(q): + print 'Got', row """ @@ -255,7 +260,7 @@ def __init__(self, params, parent, row_factory=lambda x: x): method (or one of its async derivatives). :param params: An :class:`N1QLQuery` object. - :param parent: The parent :class:`.Bucket` object + :param parent: The parent :class:`~.couchbase.bucket.Bucket` object :param row_factory: Callable which accepts the raw dictionary of each row, and can wrap them in a customized class. The default is simply to return the dictionary itself. diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index 7d7df48e7..587b86cf6 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -274,6 +274,15 @@ MapReduce/View Methods .. automethod:: query +N1QL Query Methods +================== + +.. currentmodule:: couchbase.bucket +.. class:: Bucket + + .. automethod:: n1ql_query + + Design Document Management ========================== diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index bfe1832dd..527f94cbb 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -16,8 +16,6 @@ N1QL Queries .. autoattribute:: consistency .. autoattribute:: encoded .. autoattribute:: adhoc - .. automethod:: consistent_with_ops - .. automethod:: consistent_with_all .. autodata:: CONSISTENCY_NONE .. autodata:: CONSISTENCY_REQUEST diff --git a/docs/source/api/txcouchbase.rst b/docs/source/api/txcouchbase.rst index 2eab73bc6..587a989ba 100644 --- a/docs/source/api/txcouchbase.rst +++ b/docs/source/api/txcouchbase.rst @@ -39,6 +39,8 @@ reasons (Deferreds result in a 3x performance slowdown). .. automethod:: __init__ .. automethod:: queryAll .. automethod:: queryEx + .. automethod:: n1qlQueryAll + .. automethod:: n1qlQueryEx .. class:: BatchedView diff --git a/txcouchbase/bucket.py b/txcouchbase/bucket.py index 7afeb4bf4..1575e9d36 100644 --- a/txcouchbase/bucket.py +++ b/txcouchbase/bucket.py @@ -304,6 +304,30 @@ def n1qlQueryEx(self, cls, *args, **kwargs): return o def n1qlQueryAll(self, *args, **kwargs): + """ + Execute a N1QL query, retrieving all rows. + + This method returns a :class:`Deferred` object which is executed + with a :class:`~.N1QLRequest` object. The object may be iterated + over to yield the rows in the result set. + + This method is similar to :meth:`~couchbase.bucket.Bucket.n1ql_query` + in its arguments. + + Example:: + + def handler(req): + for row in req: + # ... handle row + + d = cb.n1qlQueryAll('SELECT * from `travel-sample` WHERE city=$1`, + 'Reno') + d.addCallback(handler) + + :return: A :class:`Deferred` + + .. seealso:: :meth:`~couchbase.bucket.Bucket.n1ql_query` + """ if not self.connected: cb = lambda x: self.n1qlQueryAll(*args, **kwargs) return self.connect().addCallback(cb) From bc712ad328285259c50c5dcb750855fed3e2fce1 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 28 Oct 2015 11:58:38 -0700 Subject: [PATCH 006/829] PYCBC-307: Add n1ql timeout properties This adds n1ql timeout properties on both a per-query and global level. Change-Id: I88c8efd2039ca8e5875e2779329f258025ecf086 Reviewed-on: http://review.couchbase.org/56495 Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 8 ++++++++ couchbase/n1ql.py | 26 ++++++++++++++++++++++++++ couchbase/tests/cases/n1qlstrings_t.py | 15 +++++++++++++++ docs/source/api/n1ql.rst | 1 + src/constants.c | 1 + 5 files changed, 51 insertions(+) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index f93ff2ec8..83bcb0999 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1320,6 +1320,14 @@ def views_timeout(self): def views_timeout(self, value): self._set_timeout_common(_LCB.LCB_CNTL_VIEW_TIMEOUT, value) + @property + def n1ql_timeout(self): + return self._get_timeout_common(_LCB.LCB_CNTL_N1QL_TIMEOUT) + + @n1ql_timeout.setter + def n1ql_timeout(self, value): + self._set_timeout_common(_LCB.LCB_CNTL_N1QL_TIMEOUT, value) + @property def is_ssl(self): mode = self._cntl(op=_LCB.LCB_CNTL_SSL_MODE, value_type='int') diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 103cfc326..058bb8ac2 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -230,6 +230,32 @@ def adhoc(self): def adhoc(self, arg): self._adhoc = arg + @property + def timeout(self): + """ + Optional per-query timeout. If set, this will limit the amount + of time in which the query can be executed and waited for. + + .. note:: + + The effective timeout for the query will be either this property + or the value of :attr:`couchbase.bucket.Bucket.n1ql_timeout` + property, whichever is *lower*. + + .. seealso:: couchbase.bucket.Bucket.n1ql_timeout + """ + value = self._body.get('timeout', '0s') + value = value[:-1] + return float(value) + + @timeout.setter + def timeout(self, value): + if not value: + self._body.pop('timeout', 0) + else: + value = float(value) + self._body['timeout'] = '{0}s'.format(value) + @property def encoded(self): """ diff --git a/couchbase/tests/cases/n1qlstrings_t.py b/couchbase/tests/cases/n1qlstrings_t.py index e02ad16b8..4ecceb52e 100644 --- a/couchbase/tests/cases/n1qlstrings_t.py +++ b/couchbase/tests/cases/n1qlstrings_t.py @@ -89,3 +89,18 @@ def test_encode_scanvec(self): dval = json.loads(q.encoded) sv_exp['91'] = {'guard': '7779', 'value': 23} self.assertEqual(sv_exp, dval['scan_vector']) + + def test_timeout(self): + q = N1QLQuery('SELECT foo') + q.timeout = 3.75 + self.assertEqual('3.75s', q._body['timeout']) + self.assertEqual(3.75, q.timeout) + + def setfn(): + q.timeout = "blah" + + self.assertRaises(ValueError, setfn) + + # Unset the timeout + q.timeout = 0 + self.assertFalse('timeout' in q._body) \ No newline at end of file diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index 527f94cbb..e63141d4a 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -16,6 +16,7 @@ N1QL Queries .. autoattribute:: consistency .. autoattribute:: encoded .. autoattribute:: adhoc + .. autoattribute:: timeout .. autodata:: CONSISTENCY_NONE .. autodata:: CONSISTENCY_REQUEST diff --git a/src/constants.c b/src/constants.c index 062e18c23..d3c827676 100644 --- a/src/constants.c +++ b/src/constants.c @@ -161,6 +161,7 @@ do_all_constants(PyObject *module, void (*handler)(PyObject*, const char*, long) ADD_MACRO(LCB_CNTL_VIEW_TIMEOUT); ADD_MACRO(LCB_CNTL_SSL_MODE); ADD_MACRO(LCB_SSL_ENABLED); + ADD_MACRO(LCB_CNTL_N1QL_TIMEOUT); /* View options */ ADD_MACRO(LCB_CMDVIEWQUERY_F_INCLUDE_DOCS); From 1762d1df17dcafdde2e2e33b31891a17d02f6502 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 28 Oct 2015 12:06:28 -0700 Subject: [PATCH 007/829] Document some additional properties Change-Id: If2c03ba423c7f13e39a12bf17951a62bac5c4029 Reviewed-on: http://review.couchbase.org/56496 Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 38 +++++++++++++++++++++++++++++++++++ docs/source/api/couchbase.rst | 4 ++++ 2 files changed, 42 insertions(+) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 83bcb0999..448c87778 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1306,6 +1306,18 @@ def _set_timeout_common(self, op, value): @property def timeout(self): + """ + The timeout for key-value operations, in fractions of a second. + This timeout affects the :meth:`get` and :meth:`upsert` family + of methods. + + :: + + # Set timeout to 3.75 seconds + cb.timeout = 3.75 + + .. seealso:: :attr:`views_timeout`, :attr:`n1ql_timeout` + """ return self._get_timeout_common(_LCB.LCB_CNTL_OP_TIMEOUT) @timeout.setter @@ -1314,6 +1326,13 @@ def timeout(self, value): @property def views_timeout(self): + """ + The timeout for view query operations. This affects the + :meth:`query` method. + + Timeout may be specified in fractions of a second. + .. seealso:: :attr:`timeout` + """ return self._get_timeout_common(_LCB.LCB_CNTL_VIEW_TIMEOUT) @views_timeout.setter @@ -1322,6 +1341,16 @@ def views_timeout(self, value): @property def n1ql_timeout(self): + """ + The timeout for N1QL query operations. This affects the + :meth:`n1ql_query` method. + + Timeouts may also be adjusted on a per-query basis by setting the + :attr:`couchbase.n1ql.N1QLQuery.timeout` property. The effective + timeout is either the per-query timeout or the global timeout, + whichever is lower. + """ + return self._get_timeout_common(_LCB.LCB_CNTL_N1QL_TIMEOUT) @n1ql_timeout.setter @@ -1330,6 +1359,15 @@ def n1ql_timeout(self, value): @property def is_ssl(self): + """ + Read-only boolean property indicating whether SSL is used for + this connection. + + If this property is true, then all communication between this + object and the Couchbase cluster is encrypted using SSL. + + See :meth:`__init__` for more information on connection options. + """ mode = self._cntl(op=_LCB.LCB_CNTL_SSL_MODE, value_type='int') return mode & _LCB.LCB_SSL_ENABLED != 0 diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index 587b86cf6..d3db66f2b 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -405,10 +405,14 @@ Attributes .. autoattribute:: views_timeout + .. autoattribute:: n1ql_timeout + .. autoattribute:: bucket .. autoattribute:: server_nodes + .. autoattribute:: is_ssl + .. attribute:: default_format Specify the default format (default: :const:`~couchbase.FMT_JSON`) From c5dfc70353661cc9d6c0db07b1a681358c0f1fe5 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 29 Oct 2015 13:43:16 -0700 Subject: [PATCH 008/829] PYCBC-296: Fix constant truncation Make constants be PY_LONG_LONG rather than 'long'. Also force format flags as lcb_U32 before converting to long. Change-Id: I894ffd48762dd42e9fc9a92444cb3d3545f0dd6b Reviewed-on: http://review.couchbase.org/56538 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- src/constants.c | 24 +++++++++++++----------- 1 file changed, 13 insertions(+), 11 deletions(-) diff --git a/src/constants.c b/src/constants.c index d3c827676..69ef8ffeb 100644 --- a/src/constants.c +++ b/src/constants.c @@ -72,7 +72,8 @@ X(PREPEND) static void -do_all_constants(PyObject *module, void (*handler)(PyObject*, const char*, long)) +do_all_constants(PyObject *module, + void (*handler)(PyObject*, const char*, PY_LONG_LONG)) { #define ADD_MACRO(sym) handler(module, #sym, sym) #define ADD_CONSTANT(name, val) handler(module, name, val) @@ -110,12 +111,12 @@ do_all_constants(PyObject *module, void (*handler)(PyObject*, const char*, long) ADD_MACRO(PYCBC_RESFLD_HTCODE); ADD_MACRO(PYCBC_RESFLD_URL); - ADD_CONSTANT("FMT_JSON", PYCBC_FMT_JSON); - ADD_CONSTANT("FMT_BYTES", PYCBC_FMT_BYTES); - ADD_CONSTANT("FMT_UTF8", PYCBC_FMT_UTF8); - ADD_CONSTANT("FMT_PICKLE", PYCBC_FMT_PICKLE); - ADD_CONSTANT("FMT_LEGACY_MASK", PYCBC_FMT_LEGACY_MASK); - ADD_CONSTANT("FMT_COMMON_MASK", PYCBC_FMT_COMMON_MASK); + ADD_CONSTANT("FMT_JSON", (lcb_U32)PYCBC_FMT_JSON); + ADD_CONSTANT("FMT_BYTES", (lcb_U32)PYCBC_FMT_BYTES); + ADD_CONSTANT("FMT_UTF8", (lcb_U32)PYCBC_FMT_UTF8); + ADD_CONSTANT("FMT_PICKLE", (lcb_U32)PYCBC_FMT_PICKLE); + ADD_CONSTANT("FMT_LEGACY_MASK", (lcb_U32)PYCBC_FMT_LEGACY_MASK); + ADD_CONSTANT("FMT_COMMON_MASK", (lcb_U32)PYCBC_FMT_COMMON_MASK); ADD_CONSTANT("OBS_PERSISTED", LCB_OBSERVE_PERSISTED); ADD_CONSTANT("OBS_FOUND", LCB_OBSERVE_FOUND); @@ -169,8 +170,9 @@ do_all_constants(PyObject *module, void (*handler)(PyObject*, const char*, long) } static void -do_constmod(PyObject *module, const char *name, long value) { - PyModule_AddIntConstant(module, name, value); +do_constmod(PyObject *module, const char *name, PY_LONG_LONG value) { + PyObject *o = PyLong_FromLongLong(value); + PyModule_AddObject(module, name, o); } void @@ -194,9 +196,9 @@ pycbc_lcb_errstr(lcb_t instance, lcb_error_t err) } static void -do_printmod(PyObject *module, const char *name, long value) +do_printmod(PyObject *module, const char *name, PY_LONG_LONG value) { - printf("%s = %ld\n", name, value); + printf("%s = %lld\n", name, value); } PyObject * From 1b7427c0c81bb74ec409a4ba1c35550235ba4a61 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 6 Jul 2015 14:27:22 -0700 Subject: [PATCH 009/829] Reactor conversion encoding functions Rather than passing discreet objects with funky semantics, these new functions return their result in a newly defined pycbc_pybuffer type. Change-Id: I0a52249aa7aa13a41853ff40941032bdd4fbfaba Reviewed-on: http://review.couchbase.org/55572 Reviewed-by: Dave Rigby <daver@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/convert.c | 151 +++++++++++++++++++--------------------------- src/counter.c | 15 ++--- src/ctranscoder.c | 14 ++--- src/get.c | 15 ++--- src/miscops.c | 22 +++---- src/observe.c | 16 ++--- src/pycbc.h | 48 ++++++--------- src/store.c | 28 +++------ 8 files changed, 115 insertions(+), 194 deletions(-) diff --git a/src/convert.c b/src/convert.c index 9e50148a7..11a66a0b8 100644 --- a/src/convert.c +++ b/src/convert.c @@ -73,7 +73,7 @@ convert_to_string(const char *buf, size_t nbuf, int mode) } static int -encode_common(PyObject **o, void **buf, size_t *nbuf, lcb_uint32_t flags) +encode_common(PyObject *src, pycbc_pybuffer *dst, lcb_U32 flags) { PyObject *bytesobj; Py_ssize_t plen; @@ -81,29 +81,27 @@ encode_common(PyObject **o, void **buf, size_t *nbuf, lcb_uint32_t flags) if (flags == PYCBC_FMT_UTF8) { #if PY_MAJOR_VERSION == 2 - if (PyString_Check(*o)) { + if (PyString_Check(src)) { #else if (0) { #endif - bytesobj = *o; - Py_INCREF(*o); + bytesobj = src; + Py_INCREF(bytesobj); } else { - if (!PyUnicode_Check(*o)) { + if (!PyUnicode_Check(src)) { PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ENCODING, - 0, "Must be unicode or string", *o); + 0, "Must be unicode or string", src); return -1; } - bytesobj = PyUnicode_AsUTF8String(*o); + bytesobj = PyUnicode_AsUTF8String(src); } - } else if (flags == PYCBC_FMT_BYTES) { - if (PyBytes_Check(*o) || PyByteArray_Check(*o)) { - bytesobj = *o; - Py_INCREF(*o); - + if (PyBytes_Check(src) || PyByteArray_Check(src)) { + bytesobj = src; + Py_INCREF(bytesobj); } else { PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ENCODING, 0, - "Must be bytes or bytearray", *o); + "Must be bytes or bytearray", src); return -1; } @@ -122,13 +120,13 @@ encode_common(PyObject **o, void **buf, size_t *nbuf, lcb_uint32_t flags) return -1; } - args = PyTuple_Pack(1, *o); + args = PyTuple_Pack(1, src); bytesobj = PyObject_CallObject(helper, args); Py_DECREF(args); if (!bytesobj) { PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ENCODING, - 0, "Couldn't encode value", *o); + 0, "Couldn't encode value", src); return -1; } @@ -143,11 +141,11 @@ encode_common(PyObject **o, void **buf, size_t *nbuf, lcb_uint32_t flags) } if (PyByteArray_Check(bytesobj)) { - *buf = PyByteArray_AS_STRING(bytesobj); + dst->buffer = PyByteArray_AS_STRING(bytesobj); plen = PyByteArray_GET_SIZE(bytesobj); rv = 0; } else { - rv = PyBytes_AsStringAndSize(bytesobj, (char**)buf, &plen); + rv = PyBytes_AsStringAndSize(bytesobj, (char**)&dst->buffer, &plen); } if (rv < 0) { @@ -156,8 +154,8 @@ encode_common(PyObject **o, void **buf, size_t *nbuf, lcb_uint32_t flags) return -1; } - *nbuf = plen; - *o = bytesobj; + dst->pyobj = bytesobj; + dst->length = plen; return 0; } @@ -233,19 +231,13 @@ decode_common(PyObject **vp, const char *buf, size_t nbuf, lcb_uint32_t flags) } int -pycbc_tc_simple_encode(PyObject **p, - void *buf, - size_t *nbuf, - lcb_uint32_t flags) +pycbc_tc_simple_encode(PyObject *src, pycbc_pybuffer *dst, lcb_U32 flags) { - return encode_common(p, buf, nbuf, flags); + return encode_common(src, dst, flags); } int -pycbc_tc_simple_decode(PyObject **vp, - const char *buf, - size_t nbuf, - lcb_uint32_t flags) +pycbc_tc_simple_decode(PyObject **vp, const char *buf, size_t nbuf, lcb_U32 flags) { return decode_common(vp, buf, nbuf, flags); } @@ -257,11 +249,8 @@ enum { DECODE_VALUE }; static int -do_call_tc(pycbc_Bucket *conn, - PyObject *obj, - PyObject *flags, - PyObject **result, - int mode) +do_call_tc(pycbc_Bucket *conn, PyObject *obj, PyObject *flags, + PyObject **result, int mode) { PyObject *meth = NULL; PyObject *args = NULL; @@ -307,6 +296,7 @@ do_call_tc(pycbc_Bucket *conn, PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ENCODING, 0, "User-Defined transcoder failed", obj); + ret = -1; } GT_DONE: @@ -317,41 +307,39 @@ do_call_tc(pycbc_Bucket *conn, int -pycbc_tc_encode_key(pycbc_Bucket *conn, - PyObject **key, - void **buf, - size_t *nbuf) +pycbc_tc_encode_key(pycbc_Bucket *conn, PyObject *src, pycbc_pybuffer *dst) { int rv; Py_ssize_t plen; - PyObject *orig_key; - PyObject *new_key = NULL; - if (!conn->tc) { - return encode_common(key, buf, nbuf, PYCBC_FMT_UTF8); + rv = encode_common(src, dst, PYCBC_FMT_UTF8); + if (rv == 0 && dst->length == 0) { + PYCBC_EXCTHROW_EMPTYKEY(); + rv = -1; + } + return rv; } - orig_key = *key; - pycbc_assert(orig_key); + /* Swap out key and new key. Assign back later on */ - rv = do_call_tc(conn, orig_key, NULL, &new_key, ENCODE_KEY); + rv = do_call_tc(conn, src, NULL, &dst->pyobj, ENCODE_KEY); - if (new_key == NULL || rv < 0) { + if (dst->pyobj == NULL || rv < 0) { + dst->pyobj = NULL; return -1; } - rv = PyBytes_AsStringAndSize(new_key, (char**)buf, &plen); + rv = PyBytes_AsStringAndSize(dst->pyobj, (char**)&dst->buffer, &plen); if (rv == -1) { PYCBC_EXC_WRAP_KEY(PYCBC_EXC_ENCODING, 0, "Couldn't convert encoded key to bytes. It is " "possible that the Transcoder.encode_key method " - "returned an unexpected value", - new_key); + "returned an unexpected value", dst->pyobj); - Py_XDECREF(new_key); + PYCBC_PYBUF_RELEASE(dst); return -1; } @@ -359,21 +347,17 @@ pycbc_tc_encode_key(pycbc_Bucket *conn, PYCBC_EXC_WRAP_KEY(PYCBC_EXC_ENCODING, 0, "Transcoder.encode_key returned an empty string", - new_key); - Py_XDECREF(new_key); + dst->pyobj); + PYCBC_PYBUF_RELEASE(dst); return -1; } - - *nbuf = plen; - *key = new_key; + dst->length = plen; return 0; } int -pycbc_tc_decode_key(pycbc_Bucket *conn, - const void *key, - size_t nkey, - PyObject **pobj) +pycbc_tc_decode_key(pycbc_Bucket *conn, const void *key, size_t nkey, + PyObject **pobj) { PyObject *bobj; int rv = 0; @@ -437,50 +421,40 @@ pycbc_tc_determine_format(PyObject *value) } int -pycbc_tc_encode_value(pycbc_Bucket *conn, - PyObject **value, - PyObject *flag_v, - void **buf, - size_t *nbuf, - lcb_uint32_t *flags) +pycbc_tc_encode_value(pycbc_Bucket *conn, PyObject *srcbuf, PyObject *srcflags, + pycbc_pybuffer *dstbuf, lcb_U32 *dstflags) { PyObject *flags_obj; - PyObject *orig_value; PyObject *new_value = NULL; PyObject *result_tuple = NULL; - lcb_uint32_t flags_stackval; + lcb_U32 flags_stackval; int rv; Py_ssize_t plen; - orig_value = *value; - - if (!flag_v) { - flag_v = conn->dfl_fmt; + if (!srcflags) { + srcflags = conn->dfl_fmt; } if (!conn->tc) { - - if (flag_v == pycbc_helpers.fmt_auto) { - flag_v = pycbc_tc_determine_format(*value); + if (srcflags == pycbc_helpers.fmt_auto) { + srcflags = pycbc_tc_determine_format(srcbuf); } - rv = pycbc_get_u32(flag_v, &flags_stackval); + rv = pycbc_get_u32(srcflags, &flags_stackval); if (rv < 0) { PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ARGUMENTS, 0, - "Bad value for flags", - flag_v); + "Bad value for flags", srcflags); return -1; } - *flags = flags_stackval; - return encode_common(value, buf, nbuf, flags_stackval); + *dstflags = flags_stackval; + return encode_common(srcbuf, dstbuf, flags_stackval); } /** * Calling into Transcoder */ - - rv = do_call_tc(conn, orig_value, flag_v, &result_tuple, ENCODE_VALUE); + rv = do_call_tc(conn, srcbuf, srcflags, &result_tuple, ENCODE_VALUE); if (rv < 0) { return -1; } @@ -488,8 +462,7 @@ pycbc_tc_encode_value(pycbc_Bucket *conn, if (!PyTuple_Check(result_tuple) || PyTuple_GET_SIZE(result_tuple) != 2) { PYCBC_EXC_WRAP_EX(PYCBC_EXC_ENCODING, 0, "Expected return of (bytes, flags)", - orig_value, - result_tuple); + srcbuf, result_tuple); Py_XDECREF(result_tuple); return -1; @@ -512,24 +485,22 @@ pycbc_tc_encode_value(pycbc_Bucket *conn, Py_XDECREF(result_tuple); PYCBC_EXC_WRAP_VALUE(PYCBC_EXC_ENCODING, 0, "Transcoder.encode_value() returned a bad " - "value for flags", orig_value); + "value for flags", srcbuf); return -1; } - *flags = flags_stackval; - rv = PyBytes_AsStringAndSize(new_value, (char**)buf, &plen); + *dstflags = flags_stackval; + rv = PyBytes_AsStringAndSize(new_value, (char**)&dstbuf->buffer, &plen); if (rv == -1) { Py_XDECREF(result_tuple); - PYCBC_EXC_WRAP_VALUE(PYCBC_EXC_ENCODING, 0, "Value returned by Transcoder.encode_value() " - "could not be converted to bytes", - orig_value); + "could not be converted to bytes", srcbuf); return -1; } - *value = new_value; - *nbuf = plen; + dstbuf->pyobj = new_value; + dstbuf->length = plen; Py_INCREF(new_value); Py_XDECREF(result_tuple); diff --git a/src/counter.c b/src/counter.c index 2a112695e..6800a02bf 100644 --- a/src/counter.c +++ b/src/counter.c @@ -28,29 +28,22 @@ handle_single_arith(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *item, void *arg) { - void *key; - size_t nkey; int rv = 0; lcb_error_t err; lcb_CMDCOUNTER cmd; struct arithmetic_common_vars my_params; static char *kwlist[] = { "delta", "initial", "ttl", NULL }; + pycbc_pybuffer keybuf = { 0 }; my_params = *(struct arithmetic_common_vars *)arg; (void)item; memset(&cmd, 0, sizeof cmd); - rv = pycbc_tc_encode_key(self, &curkey, &key, &nkey); + rv = pycbc_tc_encode_key(self, curkey, &keybuf); if (rv < 0) { return -1; } - if (!nkey) { - PYCBC_EXCTHROW_EMPTYKEY(); - rv = -1; - goto GT_DONE; - } - if (options) { curvalue = options; } @@ -83,7 +76,7 @@ handle_single_arith(pycbc_Bucket *self, struct pycbc_common_vars *cv, } } - LCB_CMD_SET_KEY(&cmd, key, nkey); + LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); cmd.delta = my_params.delta; cmd.create = my_params.create; cmd.initial = my_params.initial; @@ -97,7 +90,7 @@ handle_single_arith(pycbc_Bucket *self, struct pycbc_common_vars *cv, } GT_DONE: - Py_XDECREF(curkey); + PYCBC_PYBUF_RELEASE(&keybuf); return rv; } diff --git a/src/ctranscoder.c b/src/ctranscoder.c index 23e6de924..58a4823ef 100644 --- a/src/ctranscoder.c +++ b/src/ctranscoder.c @@ -25,8 +25,7 @@ static PyObject * encode_key(PyObject *self, PyObject *args) { int rv; - char *buf; - size_t nbuf; + pycbc_pybuffer keybuf; PyObject *kobj; rv = PyArg_ParseTuple(args, "O", &kobj); @@ -34,13 +33,13 @@ encode_key(PyObject *self, PyObject *args) return NULL; } - rv = pycbc_tc_simple_encode(&kobj, &buf, &nbuf, PYCBC_FMT_UTF8); + rv = pycbc_tc_simple_encode(kobj, &keybuf,PYCBC_FMT_UTF8); if (rv < 0) { return NULL; } (void)self; - return kobj; + return keybuf.pyobj; } static PyObject * @@ -77,9 +76,8 @@ encode_value(PyObject *self, PyObject *args) int rv; PyObject *vobj; PyObject *flagsobj; - char *buf; + pycbc_pybuffer valbuf = { NULL }; PyObject *ret; - size_t nbuf; rv = PyArg_ParseTuple(args, "OO", &vobj, &flagsobj); if (!rv) { @@ -91,13 +89,13 @@ encode_value(PyObject *self, PyObject *args) return NULL; } - rv = pycbc_tc_simple_encode(&vobj, &buf, &nbuf, flags); + rv = pycbc_tc_simple_encode(vobj, &valbuf, flags); if (rv < 0) { return NULL; } ret = PyTuple_New(2); - PyTuple_SET_ITEM(ret, 0, vobj); + PyTuple_SET_ITEM(ret, 0, valbuf.pyobj); PyTuple_SET_ITEM(ret, 1, flagsobj); /** INCREF flags because we got it as an argument */ diff --git a/src/get.c b/src/get.c index 07126da86..9c3bbe815 100644 --- a/src/get.c +++ b/src/get.c @@ -37,12 +37,11 @@ handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, void *arg) { int rv; - char *key; - size_t nkey; unsigned int lock = 0; struct getcmd_vars_st *gv = (struct getcmd_vars_st *)arg; unsigned long ttl = gv->u.ttl; lcb_error_t err; + pycbc_pybuffer keybuf = { NULL }; union { lcb_CMDBASE base; @@ -54,18 +53,12 @@ handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, memset(&u_cmd, 0, sizeof u_cmd); (void)itm; - rv = pycbc_tc_encode_key(self, &curkey, (void**)&key, &nkey); + rv = pycbc_tc_encode_key(self, curkey, &keybuf); if (rv == -1) { return -1; } - if (!nkey) { - PYCBC_EXCTHROW_EMPTYKEY(); - rv = -1; - goto GT_DONE; - } - - LCB_CMD_SET_KEY(&u_cmd.base, key, nkey); + LCB_CMD_SET_KEY(&u_cmd.base, keybuf.buffer, keybuf.length); if (curval && gv->allow_dval && options == NULL) { options = curval; @@ -150,7 +143,7 @@ handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, } GT_DONE: - Py_XDECREF(curkey); + PYCBC_PYBUF_RELEASE(&keybuf); return rv; } diff --git a/src/miscops.c b/src/miscops.c index 49f7f375b..6b215c144 100644 --- a/src/miscops.c +++ b/src/miscops.c @@ -34,8 +34,7 @@ handle_single_keyop(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype void *arg) { int rv; - char *key; - size_t nkey; + pycbc_pybuffer keybuf = { NULL }; lcb_U64 cas = 0; lcb_error_t err; @@ -55,18 +54,11 @@ handle_single_keyop(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype curval = curkey; curkey = ((pycbc_OperationResult*)curkey)->key; } - - rv = pycbc_tc_encode_key(self, &curkey, (void**)&key, &nkey); + rv = pycbc_tc_encode_key(self, curkey, &keybuf); if (rv == -1) { return -1; } - if (!nkey) { - PYCBC_EXCTHROW_EMPTYKEY(); - rv = -1; - goto GT_DONE; - } - if (item) { cas = item->cas; @@ -90,17 +82,19 @@ handle_single_keyop(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype if (cas == (lcb_uint64_t)-1 && PyErr_Occurred()) { PyErr_Clear(); PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Invalid CAS specified"); - return -1; + rv = -1; + goto GT_DONE; } } - LCB_CMD_SET_KEY(&ucmd.base, key, nkey); + LCB_CMD_SET_KEY(&ucmd.base, keybuf.buffer, keybuf.length); ucmd.base.cas = cas; if (optype == PYCBC_CMD_UNLOCK) { if (!cas) { PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "CAS must be specified for unlock"); - return -1; + rv = -1; + goto GT_DONE; } err = lcb_unlock3(self->instance, cv->mres, &ucmd.unl); @@ -118,7 +112,7 @@ handle_single_keyop(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype } GT_DONE: - Py_XDECREF(curkey); + PYCBC_PYBUF_RELEASE(&keybuf); return rv; } diff --git a/src/observe.c b/src/observe.c index 9a59aad98..d58dfa0cf 100644 --- a/src/observe.c +++ b/src/observe.c @@ -92,22 +92,15 @@ handle_single_observe(pycbc_Bucket *self, PyObject *curkey, int master_only, struct pycbc_common_vars *cv) { int rv; - char *key; - size_t nkey; + pycbc_pybuffer keybuf = { NULL }; lcb_CMDOBSERVE cmd = { 0 }; lcb_error_t err; - rv = pycbc_tc_encode_key(self, &curkey, (void**)&key, &nkey); + rv = pycbc_tc_encode_key(self, curkey, &keybuf); if (rv < 0) { return -1; } - LCB_CMD_SET_KEY(&cmd, key, nkey); - - if (!nkey) { - PYCBC_EXCTHROW_EMPTYKEY(); - rv = -1; - goto GT_DONE; - } + LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); if (master_only) { cmd.cmdflags |= LCB_CMDOBSERVE_F_MASTER_ONLY; @@ -121,8 +114,7 @@ handle_single_observe(pycbc_Bucket *self, PyObject *curkey, int master_only, rv = -1; } - GT_DONE: - Py_XDECREF(curkey); + PYCBC_PYBUF_RELEASE(&keybuf); return rv; } diff --git a/src/pycbc.h b/src/pycbc.h index e4528abd5..d27c32108 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -961,6 +961,17 @@ PyObject *pycbc_exc_mktuple(void); #define PYCBC_EXCTHROW_EMPTYKEY() PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, \ "Empty key (i.e. '', empty string) passed") +typedef struct { + PyObject *pyobj; + const void *buffer; + size_t length; +} pycbc_pybuffer; + +#define PYCBC_PYBUF_RELEASE(buf) do { \ + Py_XDECREF((buf)->pyobj); \ + (buf)->pyobj = NULL; \ +} while (0) + /** * Encodes a key into a buffer. * @param conn the connection object @@ -975,10 +986,7 @@ PyObject *pycbc_exc_mktuple(void); * @return * 0 on success, nonzero on error */ -int pycbc_tc_encode_key(pycbc_Bucket *conn, - PyObject **key, - void **buf, - size_t *nbuf); +int pycbc_tc_encode_key(pycbc_Bucket *conn, PyObject *src, pycbc_pybuffer *dst); /** * Decodes a key buffer into a python object. @@ -991,9 +999,7 @@ int pycbc_tc_encode_key(pycbc_Bucket *conn, * @return * 0 on success, nonzero on error */ -int pycbc_tc_decode_key(pycbc_Bucket *conn, - const void *key, - size_t nkey, +int pycbc_tc_decode_key(pycbc_Bucket *conn, const void *key, size_t nkey, PyObject **pobj); /** @@ -1007,12 +1013,8 @@ int pycbc_tc_decode_key(pycbc_Bucket *conn, * @param flags pointer to a flags variable, will be set with the appropriate * flags */ -int pycbc_tc_encode_value(pycbc_Bucket *conn, - PyObject **value, - PyObject *flag_v, - void **buf, - size_t *nbuf, - lcb_uint32_t *flags); +int pycbc_tc_encode_value(pycbc_Bucket *conn, PyObject *srcbuf, PyObject *flag_v, + pycbc_pybuffer *dstbuf, lcb_U32 *dstflags); /** * Decode a value with flags @@ -1022,29 +1024,19 @@ int pycbc_tc_encode_value(pycbc_Bucket *conn, * @param flags flags as received from the server * @param pobj the pythonized value */ -int pycbc_tc_decode_value(pycbc_Bucket *conn, - const void *value, - size_t nvalue, - lcb_uint32_t flags, - PyObject **pobj); - - +int pycbc_tc_decode_value(pycbc_Bucket *conn, const void *value, size_t nvalue, + lcb_U32 flags, PyObject **pobj); /** * Like encode_value, but only uses built-in encoders */ -int pycbc_tc_simple_encode(PyObject **p, - void *buf, - size_t *nbuf, - lcb_uint32_t flags); +int pycbc_tc_simple_encode(PyObject *src, pycbc_pybuffer *dst, lcb_U32 flags); /** * Like decode_value, but only uses built-in decoders */ -int pycbc_tc_simple_decode(PyObject **vp, - const char *buf, - size_t nbuf, - lcb_uint32_t flags); +int pycbc_tc_simple_decode(PyObject **vp, const char *buf, size_t nbuf, + lcb_U32 flags); /** * Automatically determine the format for the object. diff --git a/src/store.c b/src/store.c index 643e109aa..6b4f09541 100644 --- a/src/store.c +++ b/src/store.c @@ -110,10 +110,7 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, int rv; const struct storecmd_vars *scv = (struct storecmd_vars *)arg; struct single_key_context skc = { NULL }; - const void *key, *value; - /* backing key/values for to-bytes conversion. Freed at the end */ - PyObject *encvalue = NULL, *enckey = NULL; - size_t nkey, nvalue; + pycbc_pybuffer keybuf = { NULL }, valbuf = { NULL }; lcb_error_t err; lcb_CMDSTORE cmd = { 0 }; @@ -122,17 +119,10 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, skc.value = curvalue; skc.cas = scv->single_cas; - rv = pycbc_tc_encode_key(self, &curkey, (void**)&key, &nkey); + rv = pycbc_tc_encode_key(self, curkey, &keybuf); if (rv < 0) { return -1; } - enckey = curkey; - - if (!nkey) { - PYCBC_EXCTHROW_EMPTYKEY(); - rv = -1; - goto GT_DONE; - } if (itm) { rv = handle_item_kv(itm, options, scv, &skc); @@ -142,14 +132,12 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, } } - rv = pycbc_tc_encode_value(self, &skc.value, skc.flagsobj, - (void**)&value, &nvalue, &cmd.flags); + rv = pycbc_tc_encode_value(self, skc.value, skc.flagsobj, + &valbuf, &cmd.flags); if (rv < 0) { rv = -1; goto GT_DONE; } - /* Set the encoded value */ - encvalue = skc.value; if (scv->operation == LCB_APPEND || scv->operation == LCB_PREPEND) { /* The server ignores these flags and libcouchbase will throw an error @@ -158,8 +146,8 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, cmd.flags = 0; } - LCB_CMD_SET_KEY(&cmd, key, nkey); - LCB_CMD_SET_VALUE(&cmd, value, nvalue); + LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); + LCB_CMD_SET_VALUE(&cmd, valbuf.buffer, valbuf.length); cmd.cas = skc.cas; cmd.operation = scv->operation; cmd.exptime = skc.ttl; @@ -174,8 +162,8 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, GT_DONE: /* Clean up our encoded keys and values */ - Py_XDECREF(enckey); - Py_XDECREF(encvalue); + PYCBC_PYBUF_RELEASE(&keybuf); + PYCBC_PYBUF_RELEASE(&valbuf); return rv; } From 7b4702d3e22232a751ecac004d381590b6115f9b Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 23 Nov 2015 15:45:46 -0800 Subject: [PATCH 010/829] PYCBC-309: Document placeholders as being native values And that they should not be already-serialized JSON values or weird string constructions. Change-Id: I608fe53c31436ef8836089dbc3b81646c9d3cbf9 Reviewed-on: http://review.couchbase.org/57201 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- couchbase/n1ql.py | 29 +++++++++++++++++++++++++---- 1 file changed, 25 insertions(+), 4 deletions(-) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 058bb8ac2..35a452e2d 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -48,10 +48,13 @@ def __init__(self, query, *args, **kwargs): `params` argument to :class:`N1QLRequest`. :param query: The query string to execute - :param args: Positional placeholder arguments. - :param kwargs: Named placeholder arguments. - (The client will automatically prepend a leading ``$`` - to each named parameter). + :param args: Positional placeholder arguments. These satisfy + the placeholder values for positional placeholders in the + query string, such as ``$1``, ``$2`` and so on. + :param kwargs: Named placeholder arguments. These satisfy + named placeholders in the query string, such as + ``$name``, ``$email`` and so on. For the placeholder + values, omit the leading sigil (``$``). Use positional parameters:: @@ -70,6 +73,24 @@ def __init__(self, query, *args, **kwargs): for row in cb.n1ql_query(q): print 'Got', row + + When using placeholders, ensure that the placeholder value is + the *unserialized* (i.e. native) Python value, not the JSON + serialized value. For example the query + ``SELECT * FROM products WHERE tags IN ["sale", "clearance"]`` + can be rewritten using placeholders: + + Correct:: + + N1QLQuery('SELECT * FROM products WHERE tags IN $1', + ['sale', 'clearance']) + + Incorrect:: + + N1QLQuery('SELECT * FROM products WHERE tags IN $1', + "[\\"sale\\",\\"clearance\\"]") + + Since the placeholders are serialized to JSON internally anyway. """ self._adhoc = True From cb59f68e2fe82b82bc21e30ab80c282c4ba49c09 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 10 Nov 2015 10:33:49 -0800 Subject: [PATCH 011/829] removed unused imports from base test Change-Id: Ib0f0e0854680cf73f403ab463c76c9e96f0fb6f8 Reviewed-on: http://review.couchbase.org/57073 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- couchbase/tests/base.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index 2100b4e85..d94a30746 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -15,7 +15,6 @@ from __future__ import absolute_import -import os import sys import types import platform @@ -33,13 +32,11 @@ from testresources import ResourcedTestCase, TestResourceManager -from couchbase.exceptions import CouchbaseError from couchbase.admin import Admin from couchbase.mockserver import CouchbaseMock, BucketSpec, MockControlClient from couchbase.result import ( MultiResult, ValueResult, OperationResult, ObserveInfo, Result) from couchbase._pyport import basestring -from couchbase.connstr import ConnectionString CONFIG_FILE = 'tests.ini' # in cwd From 0fd8d05b87f9b574788020c7d5e20fab17a93aac Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 10 Nov 2015 10:56:07 -0800 Subject: [PATCH 012/829] Allow fallback mode to run tests with simple filenames Because we allow multiple implementations using the same test cases, our test setup is a bit funny looking. This commit allows the tests to run as a standalone test - using the default of the sync couchbase module Change-Id: I4c18e0f2b08729e9c97936a7c3d340b1918037a7 Reviewed-on: http://review.couchbase.org/57074 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- couchbase/tests/base.py | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index d94a30746..1ac7ecbf5 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -18,6 +18,7 @@ import sys import types import platform +import warnings try: from unittest.case import SkipTest @@ -246,7 +247,6 @@ def mock_info(self): raise SkipTest("Mock server required") return self._mock_info - def setUp(self): super(CouchbaseTestCase, self).setUp() @@ -261,12 +261,27 @@ def tmp(self, a): self._key_counter = 0 + if not hasattr(self, 'factory'): + from couchbase.bucket import Bucket + from couchbase.views.iterator import View + from couchbase.result import ( + MultiResult, Result, OperationResult, ValueResult, + ObserveInfo) + + self.factory = Bucket + self.viewfactory = View + self.cls_Result = Result + self.cls_MultiResult = MultiResult + self.cls_OperationResult = OperationResult + self.cls_ObserveInfo = ObserveInfo + self.should_check_refcount = True + warnings.warn('Using fallback (couchbase module) defaults') + def skipLcbMin(self, vstr): """ Test requires a libcouchbase version of at least vstr. This may be a hex number (e.g. 0x020007) or a string (e.g. "2.0.7") """ - if isinstance(vstr, basestring): components = vstr.split('.') hexstr = "0x" From c1efb994de28aa85fd83fefeabb6120a3f689733 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 10 Nov 2015 10:57:09 -0800 Subject: [PATCH 013/829] PYCBC-308: Add tests to check for behavior when now rows are returned This adds some newer smaller n1ql test cases. Depends on a newer mock Change-Id: If66b760d56f0487782afe806e26568a7f995610a Reviewed-on: http://review.couchbase.org/57075 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- couchbase/tests/cases/n1ql_t.py | 31 +++++++++++++++++++++++++++++++ txcouchbase/bucket.py | 2 +- txcouchbase/tests/test_n1ql.py | 10 ++++++++++ 3 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 couchbase/tests/cases/n1ql_t.py diff --git a/couchbase/tests/cases/n1ql_t.py b/couchbase/tests/cases/n1ql_t.py new file mode 100644 index 000000000..fb83cb391 --- /dev/null +++ b/couchbase/tests/cases/n1ql_t.py @@ -0,0 +1,31 @@ +# +# Copyright 2015, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from __future__ import print_function + +from couchbase.tests.base import MockTestCase +from couchbase.n1ql import N1QLQuery + + +class N1QLTest(MockTestCase): + def test_onerow(self): + row = self.cb.n1ql_query('SELECT mockrow').get_single_result() + self.assertEqual('value', row['row']) + + def test_emptyrow(self): + rv = self.cb.n1ql_query('SELECT emptyrow').get_single_result() + self.assertEqual(None, rv) \ No newline at end of file diff --git a/txcouchbase/bucket.py b/txcouchbase/bucket.py index 1575e9d36..515960142 100644 --- a/txcouchbase/bucket.py +++ b/txcouchbase/bucket.py @@ -44,7 +44,7 @@ def __init__(self, *args, **kwargs): You will normally not need to construct this object manually. """ self._d = Deferred() - self.__rows = None # likely a superlcass might have this? + self.__rows = [] # likely a superlcass might have this? def _getDeferred(self): return self._d diff --git a/txcouchbase/tests/test_n1ql.py b/txcouchbase/tests/test_n1ql.py index 56ac620da..cba559ea6 100644 --- a/txcouchbase/tests/test_n1ql.py +++ b/txcouchbase/tests/test_n1ql.py @@ -68,4 +68,14 @@ def verify(o): rows = [r for r in o] self.assertEqual(1, len(rows)) + return d.addCallback(verify) + + def testEmpty(self): + cb = self.make_connection() + d = cb.n1qlQueryAll('SELECT emptyrow') + + def verify(o): + self.assertIsInstance(o, BatchedN1QLRequest) + rows = [r for r in o] + self.assertEqual(0, len(rows)) return d.addCallback(verify) \ No newline at end of file From f7c05fb36d8e6cb456fe2e904b0b745fe3807cef Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 23 Nov 2015 15:20:38 -0800 Subject: [PATCH 014/829] PYCBC-310: remove 'experimental' from n1ql and geo These features are stable from Couchbase 4.0 Change-Id: I72b3ac7a6a85fbf601b602422e1e77f57060369f Reviewed-on: http://review.couchbase.org/57200 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- docs/source/api/n1ql.rst | 7 +------ docs/source/api/views.rst | 7 ------- 2 files changed, 1 insertion(+), 13 deletions(-) diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index e63141d4a..c9f04c36c 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -2,11 +2,6 @@ N1QL Queries ############ -.. warning:: - At the time of writing, N1QL is still an experimental feature. As such, - both the server-side API as well as the client API are subject to change. - - .. currentmodule:: couchbase.n1ql .. class:: N1QLQuery @@ -27,4 +22,4 @@ N1QL Queries .. automethod:: __init__ .. automethod:: __iter__ .. automethod:: execute - .. automethod:: get_single_result \ No newline at end of file + .. automethod:: get_single_result diff --git a/docs/source/api/views.rst b/docs/source/api/views.rst index 35cde642c..8afbdf3c6 100644 --- a/docs/source/api/views.rst +++ b/docs/source/api/views.rst @@ -688,13 +688,6 @@ functions. Geospatial Views ================ -.. warning:: - - Geospatial views are considered an experimental feature in current - versions of Couchbase Server (the latest version at the time of - writing being 3.0.2). As such, the feature exposed in the SDK itself - is too inherently experimental - Geospatial views are views which can index and filter items based on one or more independent axes or coordinates. This allows greater application at query-time to filter based on more than a single attribute. From e6a40b1ce6afdb98b06267ceab31ce3950ec0310 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 25 Nov 2015 11:11:11 -0800 Subject: [PATCH 015/829] PYCBC-311: Raise exception on view meta access before iteration Change-Id: Ieb4dd93e4f057b5d8fb69a439c1a37073c816675 Reviewed-on: http://review.couchbase.org/57523 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/tests/cases/view_iterator_t.py | 12 +++++++++ couchbase/views/iterator.py | 34 ++++++++++++++++++++---- 2 files changed, 41 insertions(+), 5 deletions(-) diff --git a/couchbase/tests/cases/view_iterator_t.py b/couchbase/tests/cases/view_iterator_t.py index 844da6976..93a13128d 100644 --- a/couchbase/tests/cases/view_iterator_t.py +++ b/couchbase/tests/cases/view_iterator_t.py @@ -379,3 +379,15 @@ def test_pycbc_206(self): view = self.cb.query("beer", "with_value", streaming=True) rows = list(view) self.assertTrue(len(rows)) + + def test_props_before_rows(self): + it = self.cb.query('beer', 'brewery_beers', limit=1) + + def get_rowcount(): + return it.indexed_rows + + def get_errors(): + return it.errors + + self.assertRaises(RuntimeError, get_rowcount) + self.assertRaises(RuntimeError, get_errors) diff --git a/couchbase/views/iterator.py b/couchbase/views/iterator.py index 53ca56bf6..761aff7fb 100644 --- a/couchbase/views/iterator.py +++ b/couchbase/views/iterator.py @@ -236,9 +236,13 @@ def __init__(self, parent, design, view, row_processor=None, self._parent = parent self.design = design self.view = view - self.errors = [] + self._errors = [] self.rows_returned = 0 - self.indexed_rows = 0 + self._indexed_rows = 0 + + # Sentinel used to ensure confusing metadata properties don't get + # returned unless metadata is actually parsed (or query is complete) + self.__meta_received = False if query and params: raise ArgumentError.pyexc( @@ -315,23 +319,43 @@ def query(self): def raw(self): return self.__raw + def __meta_or_raise(self): + """ + Check if meta has been handled and raise an exception otherwise. + Thrown from 'meta-only' properties + """ + if not self.__meta_received: + raise RuntimeError( + 'This property only valid once all rows are received') + + @property + def indexed_rows(self): + self.__meta_or_raise() + return self._indexed_rows + + @property + def errors(self): + self.__meta_or_raise() + return self._errors + def _handle_errors(self, errors): if not errors: return - self.errors += [errors] + self._errors += [errors] if self._query.on_error != 'continue': raise ViewEngineError.pyexc("Error while executing view.", - self.errors) + self._errors) else: warn("Error encountered when executing view. Inspect 'errors' " "for more information") def _handle_meta(self, value): + self.__meta_received = True if not isinstance(value, dict): return - self.indexed_rows = value.get('total_rows', 0) + self._indexed_rows = value.get('total_rows', 0) self._handle_errors(value.get('errors')) def _process_payload(self, rows): From c02aad47cbc028fb51d308a38054a80a14e3e196 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 7 Dec 2015 16:55:49 -0800 Subject: [PATCH 016/829] Add N1QLRequest.meta property This allows easy access to query metadata without exposing the guts of the 'raw' object. Change-Id: I8dc0c5795ddd2105f6ff3311a7b9367ac5739443 Reviewed-on: http://review.couchbase.org/57524 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/n1ql.py | 26 ++++++++++++++++++++++++++ couchbase/tests/cases/n1ql_t.py | 8 +++++++- docs/source/api/n1ql.rst | 1 + 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 35a452e2d..9415be46c 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -325,6 +325,7 @@ def __init__(self, params, parent, row_factory=lambda x: x): self._mres = None self._do_iter = True self.__raw = False + self.__meta_received = False def _start(self): if self._mres: @@ -338,11 +339,36 @@ def _start(self): def raw(self): return self.__raw + @property + def meta(self): + """ + Get metadata from the query itself. This is guaranteed to only + return a Python dictionary. + + Note that if the query failed, the metadata might not be in JSON + format, in which case there may be additional, non-JSON data + which can be retrieved using the following + + .. code-block:: + + raw_meta = req.raw.value + + :return: A dictionary containing the query metadata + """ + if not self.__meta_received: + raise RuntimeError( + 'This property only valid once all rows are received!') + + if isinstance(self.raw.value, dict): + return self.raw.value + return {} + def _clear(self): del self._parent del self._mres def _handle_meta(self, value): + self.__meta_received = True if not isinstance(value, dict): return if 'errors' in value: diff --git a/couchbase/tests/cases/n1ql_t.py b/couchbase/tests/cases/n1ql_t.py index fb83cb391..bc5e8cb4d 100644 --- a/couchbase/tests/cases/n1ql_t.py +++ b/couchbase/tests/cases/n1ql_t.py @@ -28,4 +28,10 @@ def test_onerow(self): def test_emptyrow(self): rv = self.cb.n1ql_query('SELECT emptyrow').get_single_result() - self.assertEqual(None, rv) \ No newline at end of file + self.assertEqual(None, rv) + + def test_meta(self): + q = self.cb.n1ql_query('SELECT mockrow') + self.assertRaises(RuntimeError, getattr, q, 'meta') + q.execute() + self.assertIsInstance(q.meta, dict) \ No newline at end of file diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index c9f04c36c..e401997ab 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -22,4 +22,5 @@ N1QL Queries .. automethod:: __init__ .. automethod:: __iter__ .. automethod:: execute + .. automethod:: meta .. automethod:: get_single_result From 9bd46f4f4c27f9d48355d8aee4d9f16902ca5440 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 7 Dec 2015 16:56:44 -0800 Subject: [PATCH 017/829] Allow 'N1QLRequest.execute()' to return itself This allows for more idiomatic access such as execute().meta[...] Change-Id: I1728af5f64227dbb0288185bfa5abad7af1cdb42 Reviewed-on: http://review.couchbase.org/57525 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/n1ql.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 9415be46c..c552a2bc6 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -399,6 +399,8 @@ def execute(self): for _ in self: pass + return self + def get_single_result(self): """ Execute the statement and return its single result. From 185653502397f41f0ff81cc0d7319e32ac2567c5 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 8 Dec 2015 10:26:38 -0800 Subject: [PATCH 018/829] couchbase_version: Warn and generate dummy version for bad tag For some reason, this happens with Travis builds Change-Id: Ie8dd82a3a5aa6c0557cb974ca0b0a644ab017958 Reviewed-on: http://review.couchbase.org/57601 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase_version.py | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/couchbase_version.py b/couchbase_version.py index b82749446..f0411617d 100755 --- a/couchbase_version.py +++ b/couchbase_version.py @@ -16,6 +16,10 @@ class VersionNotFound(Exception): pass +class MalformedGitTag(Exception): + pass + + RE_XYZ = re.compile(r'(\d+)\.(\d+)\.(\d)(?:-(.*))?') VERSION_FILE = os.path.join( @@ -26,7 +30,11 @@ class VersionNotFound(Exception): class VersionInfo(object): def __init__(self, rawtext): self.rawtext = rawtext - vinfo, self.ncommits, self.sha = rawtext.rsplit('-', 2) + t = self.rawtext.rsplit('-', 2) + if len(t) != 3: + raise MalformedGitTag(self.rawtext) + + vinfo, self.ncommits, self.sha = t self.ncommits = int(self.ncommits) # Split up the X.Y.Z @@ -129,7 +137,7 @@ def gen_version(do_write=True, txt=None): try: info = VersionInfo(txt) vstr = info.package_version - except IndexError: + except MalformedGitTag: warnings.warn("Malformed input '{0}'".format(txt)) vstr = '0.0.0'+txt From f614cfc8f8f58537338332b5c2e99c5dcea13737 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 8 Dec 2015 10:07:20 -0800 Subject: [PATCH 019/829] Update travis to 'Container' infrastructure Change-Id: Icd6e957141593f536b4c00db375b52ff598d310c Reviewed-on: http://review.couchbase.org/57602 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- .ci_script.sh | 13 ------------- .travis.yml | 33 +++++++++++++++++++-------------- 2 files changed, 19 insertions(+), 27 deletions(-) delete mode 100755 .ci_script.sh diff --git a/.ci_script.sh b/.ci_script.sh deleted file mode 100755 index c915f2d47..000000000 --- a/.ci_script.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/sh -set -e -set -x - -# Ensure it loads OK -python -m couchbase.connection - -# Set up our test directory.. -cp .tests.ini.travis tests.ini - -nosetests -v couchbase.tests.test_sync -nosetests -v gcouchbase.tests.test_api || echo "Gevent tests failed" -nosetests -v txcouchbase || echo "Twisted tests failed" diff --git a/.travis.yml b/.travis.yml index e827df0dd..e45301e93 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,5 @@ language: python +sudo: false python: - "2.7" @@ -6,25 +7,29 @@ python: - "3.2" - "3.3" -before_install: - - sudo rm -rf /etc/apt/sources.list.d/* - - wget -O- http://packages.couchbase.com/ubuntu/couchbase.key | sudo apt-key add - - - echo deb http://packages.couchbase.com/ubuntu precise precise/main | sudo tee /etc/apt/sources.list.d/couchbase.list - - sudo apt-get update - - sudo apt-cache search libcouchbase +addons: + apt: + sources: + - couchbase-precise + packages: + - libcouchbase-dev + - libcouchbase2-libevent install: - - sudo apt-get -y install libcouchbase-dev libcouchbase2-core libcouchbase2-libevent libevent-dev - - pip -q install gevent || echo "Couldn't find gevent" - - pip -q install twisted - - pip -q install testresources - - export CFLAGS='-Wextra -Wno-long-long -Wno-missing-field-initializers' - - CFLAGS="$CFLAGS -fno-strict-aliasing -Wno-strict-aliasing" + - pip -q install -r dev_requirements.txt + - pip -q install -r requirements.txt + - pip -q install gevent || echo "Couldn't install gevent" + - pip -q install twisted || echo "Couldn't install Twisted" + - git describe --tags --long --always - python setup.py build_ext --inplace - - sudo $(which python) setup.py install + - python setup.py install script: - - ./.ci_script.sh + - python -m couchbase.bucket + - cp .tests.ini.travis tests.ini + - nosetests -v couchbase.tests.test_sync + - nosetests -v gcouchbase.tests.test_api || echo "Gevent tests failed" + - nosetests -v txcouchbase || echo "Twisted tests failed" notifications: email: From 21f53c7a329e43585f2efe762b33557acfd28c81 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 8 Dec 2015 11:31:59 -0800 Subject: [PATCH 020/829] travis: Temporary hack to work around bad testresources version 1.0.0 is broken for now Change-Id: I3ee3f302f4860df47ddc12ecf3b55785c8c06dab Reviewed-on: http://review.couchbase.org/57603 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- .travis.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.travis.yml b/.travis.yml index e45301e93..8d4443103 100644 --- a/.travis.yml +++ b/.travis.yml @@ -16,6 +16,7 @@ addons: - libcouchbase2-libevent install: + - pip -q install testresources==0.2.7 - pip -q install -r dev_requirements.txt - pip -q install -r requirements.txt - pip -q install gevent || echo "Couldn't install gevent" From 23715787015ca798b2c11ebb824725705d2cefc2 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 21 Dec 2015 13:05:56 -0800 Subject: [PATCH 021/829] PYCBC-317: Undocument ViewIterator.rows_returned This rather confusing attribute isn't very useful Change-Id: Id5c556a59f964a20e53d4541949a933a706867df Reviewed-on: http://review.couchbase.org/58015 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- docs/source/api/views.rst | 9 --------- 1 file changed, 9 deletions(-) diff --git a/docs/source/api/views.rst b/docs/source/api/views.rst index 8afbdf3c6..21ac533c0 100644 --- a/docs/source/api/views.rst +++ b/docs/source/api/views.rst @@ -61,15 +61,6 @@ Attributes Whether documents are fetched along with each row - .. attribute:: rows_returned - - How many actual rows were returned from the server. - - This is incremented each time a new request is made. Note this may - be different from the amount of rows yielded by iterator from - :meth:`RowProcessor.handle_rows` if a custom :attr:`row_processor` - is being used - ^^^^^^^^^^^^^^ Row Processing From fc1ddc85bbfe79f9235a1b0e64a396c41173dc18 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 21 Dec 2015 13:06:39 -0800 Subject: [PATCH 022/829] PYCBC-318: Add `acouchbase` to list of packages to install Change-Id: I8c96c3d320f16e84c4f2b8a0940d621a27fea7cb Reviewed-on: http://review.couchbase.org/58016 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- setup.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4ef3c4422..95ac633b7 100644 --- a/setup.py +++ b/setup.py @@ -132,7 +132,8 @@ 'couchbase.tests', 'couchbase.tests.cases', 'gcouchbase', - 'txcouchbase' + 'txcouchbase', + 'acouchbase' ], package_data = pkgdata, tests_require = [ 'nose', 'testresources>=0.2.7' ], From 4bd53cadc2528bbecb8c010eee94eb58dac6d610 Mon Sep 17 00:00:00 2001 From: Louis-Philippe Huberdeau <lp@huberdeau.info> Date: Wed, 30 Dec 2015 11:45:03 -0500 Subject: [PATCH 023/829] Improve query support for acouchbase * Added tests for existing functionality * Added support for query iteration on view queries and n1ql queries * Added support for async for iterations, allowing to process results before the query is done processing * Enabled travis.ci tests on Python 3.4 and 3.5 Change-Id: I1e832c505ae7dad430c5c4e3d5bbe41d12ae0721 Reviewed-on: http://review.couchbase.org/58161 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- .travis.yml | 3 ++ acouchbase/bucket.py | 11 +++++ acouchbase/iterator.py | 75 +++++++++++++++++++++++++++++++ acouchbase/tests/asyncio_tests.py | 16 +++++++ acouchbase/tests/fixtures.py | 21 +++++++++ acouchbase/tests/py34only.py | 72 +++++++++++++++++++++++++++++ acouchbase/tests/py35only.py | 21 +++++++++ dev_requirements.txt | 1 + setup.py | 1 + tests.ini.sample | 2 +- 10 files changed, 222 insertions(+), 1 deletion(-) create mode 100644 acouchbase/iterator.py create mode 100644 acouchbase/tests/asyncio_tests.py create mode 100644 acouchbase/tests/fixtures.py create mode 100644 acouchbase/tests/py34only.py create mode 100644 acouchbase/tests/py35only.py diff --git a/.travis.yml b/.travis.yml index 8d4443103..12f86212e 100644 --- a/.travis.yml +++ b/.travis.yml @@ -6,6 +6,8 @@ python: - "2.6" - "3.2" - "3.3" + - "3.4" + - "3.5" addons: apt: @@ -29,6 +31,7 @@ script: - python -m couchbase.bucket - cp .tests.ini.travis tests.ini - nosetests -v couchbase.tests.test_sync + - nosetests -v acouchbase - nosetests -v gcouchbase.tests.test_api || echo "Gevent tests failed" - nosetests -v txcouchbase || echo "Twisted tests failed" diff --git a/acouchbase/bucket.py b/acouchbase/bucket.py index 533ad6d6b..4391b42b1 100644 --- a/acouchbase/bucket.py +++ b/acouchbase/bucket.py @@ -4,6 +4,7 @@ import trollius as asyncio from acouchbase.asyncio_iops import IOPS +from acouchbase.iterator import AView, AN1QLRequest from couchbase.async.bucket import AsyncBucket from couchbase.experimental import enabled_or_raise; enabled_or_raise() @@ -42,6 +43,16 @@ def on_err(res, excls, excval, exctb): return ret + def query(self, *args, **kwargs): + if "itercls" not in kwargs: + kwargs["itercls"] = AView + return super().query(*args, **kwargs) + + def n1ql_query(self, *args, **kwargs): + if "itercls" not in kwargs: + kwargs["itercls"] = AN1QLRequest + return super().n1ql_query(*args, **kwargs) + locals().update(AsyncBucket._gen_memd_wrappers(_meth_factory)) def connect(self): diff --git a/acouchbase/iterator.py b/acouchbase/iterator.py new file mode 100644 index 000000000..d5f5e45d4 --- /dev/null +++ b/acouchbase/iterator.py @@ -0,0 +1,75 @@ +import asyncio +from couchbase.async.view import AsyncViewBase +from couchbase.async.n1ql import AsyncN1QLRequest + + +class AioBase: + def __init__(self): + self.__local_done = False + self.__accum = asyncio.Queue() + self._future = asyncio.Future() + self.start() + self.raw.rows_per_call = 1000 + + @property + @asyncio.coroutine + def future(self): + yield from self._future + self._future = None + + def __iter__(self): + if self._future is not None: + raise ValueError("yield from result.future before calling non-async for.") + + yield from iter(self.__accum.get_nowait, None) + + @asyncio.coroutine + def __aiter__(self): + if self._future is None: + raise ValueError("do not yield from result.future before calling async for.") + + return self + + @asyncio.coroutine + def __anext__(self): + try: + out = None + if self._future.done() and not self.__accum.empty(): + out = self.__accum.get_nowait() + elif not self._future.done(): + out = yield from self.__accum.get() + + if out is None: + raise StopAsyncIteration + + return out + except asyncio.queues.QueueEmpty: + raise StopAsyncIteration + + def on_rows(self, rowiter): + for row in rowiter: + self.__accum.put_nowait(row) + + def on_done(self): + self._future.set_result(None) + self.__accum.put_nowait(None) + + def on_error(self, ex): + self._future.set_exception(ex) + self.__accum.put_nowait(None) + + +class AView(AioBase, AsyncViewBase): + + def __init__(self, *args, **kwargs): + AsyncViewBase.__init__(self, *args, **kwargs) + AioBase.__init__(self) + + +class AN1QLRequest(AioBase, AsyncN1QLRequest): + + def __init__(self, *args, **kwargs): + AsyncN1QLRequest.__init__(self, *args, **kwargs) + AioBase.__init__(self) + + diff --git a/acouchbase/tests/asyncio_tests.py b/acouchbase/tests/asyncio_tests.py new file mode 100644 index 000000000..a62e2235d --- /dev/null +++ b/acouchbase/tests/asyncio_tests.py @@ -0,0 +1,16 @@ +# SyntaxError will trigger if yield or async is not supported +# ImportError will fail for python 3.3 because asyncio does not exist + +try: + from py34only import CouchbaseBeerTest, CouchbaseDefaultTest +except ImportError: + pass +except SyntaxError: + pass + +try: + from py35only import CouchbasePy35Test +except ImportError: + pass +except SyntaxError: + pass diff --git a/acouchbase/tests/fixtures.py b/acouchbase/tests/fixtures.py new file mode 100644 index 000000000..a3d4ae162 --- /dev/null +++ b/acouchbase/tests/fixtures.py @@ -0,0 +1,21 @@ +import asyncio +import unittest + +from couchbase.tests.base import ConnectionConfiguration, MockResourceManager, MockTestCase +from acouchbase.bucket import Bucket + +from functools import wraps + + +def asynct(f): + @wraps(f) + def wrapper(*args, **kwargs): + future = f(*args, **kwargs) + loop = asyncio.get_event_loop() + loop.run_until_complete(future) + + return wrapper + +class AioTestCase(MockTestCase): + factory = Bucket + should_check_refcount = False diff --git a/acouchbase/tests/py34only.py b/acouchbase/tests/py34only.py new file mode 100644 index 000000000..fc688672c --- /dev/null +++ b/acouchbase/tests/py34only.py @@ -0,0 +1,72 @@ +import asyncio +import unittest + +from couchbase.experimental import enable; enable() +from fixtures import asynct, AioTestCase +from couchbase.n1ql import N1QLQuery + + + +class CouchbaseBeerTest(AioTestCase): + + def make_connection(self): + try: + return super().make_connection(bucket='beer-sample') + except CouchbaseError: + raise SkipTest("Need 'beer-sample' bucket for this") + + @asynct + @asyncio.coroutine + def test_get_data(self): + beer_bucket = self.cb + yield from (beer_bucket.connect() or asyncio.sleep(0.01)) + + data = yield from beer_bucket.get('21st_amendment_brewery_cafe') + self.assertEqual("21st Amendment Brewery Cafe", data.value["name"]) + + @asynct + @asyncio.coroutine + def test_query(self): + beer_bucket = self.cb + + yield from (beer_bucket.connect() or asyncio.sleep(0.01)) + viewiter = beer_bucket.query("beer", "brewery_beers", limit=10) + yield from viewiter.future + + count = len(list(viewiter)) + + self.assertEqual(count, 10) + + +class CouchbaseDefaultTest(AioTestCase): + + @asynct + @asyncio.coroutine + def test_upsert(self): + import uuid + + expected = str(uuid.uuid4()) + + default_bucket = self.cb + yield from (default_bucket.connect() or asyncio.sleep(0.01)) + + yield from default_bucket.upsert('hello', {"key": expected}) + + obtained = yield from default_bucket.get('hello') + self.assertEqual({"key": expected}, obtained.value) + + + @asynct + @asyncio.coroutine + def test_n1ql(self): + + default_bucket = self.cb + yield from (default_bucket.connect() or asyncio.sleep(0.01)) + + q = N1QLQuery("SELECT mockrow") + it = default_bucket.n1ql_query(q) + yield from it.future + + data = list(it) + self.assertEqual('value', data[0]['row']) + diff --git a/acouchbase/tests/py35only.py b/acouchbase/tests/py35only.py new file mode 100644 index 000000000..c61835770 --- /dev/null +++ b/acouchbase/tests/py35only.py @@ -0,0 +1,21 @@ +from fixtures import asynct, AioTestCase + +class CouchbasePy35Test(AioTestCase): + + def make_connection(self): + try: + return super().make_connection(bucket='beer-sample') + except CouchbaseError: + raise SkipTest("Need 'beer-sample' bucket for this") + + @asynct + async def test_query_with_async_iterator(self): + beer_bucket = self.cb + await (beer_bucket.connect() or asyncio.sleep(0.01)) + viewiter = beer_bucket.query("beer", "brewery_beers", limit=10) + + count = 0 + async for row in viewiter: + count += 1 + + self.assertEqual(count, 10) diff --git a/dev_requirements.txt b/dev_requirements.txt index 6f181a027..c4c625966 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,4 +1,5 @@ nose==1.3.0 +pbr==1.8.1 numpydoc==0.4 sphinx==1.2b1 testresources>=0.2.7 diff --git a/setup.py b/setup.py index 95ac633b7..1e38fdfa1 100644 --- a/setup.py +++ b/setup.py @@ -125,6 +125,7 @@ "Topic :: Software Development :: Libraries :: Python Modules"], packages = [ + 'acouchbase', 'couchbase', 'couchbase.views', 'couchbase.iops', diff --git a/tests.ini.sample b/tests.ini.sample index c168508d7..03cffb3a2 100644 --- a/tests.ini.sample +++ b/tests.ini.sample @@ -22,4 +22,4 @@ enabled = True ; Local path for the mock path = /tmp/CouchbaseMock-1.0.0-g50cf222.jar ; Where to download it, if not available -url = http://packages.coucbase.com/clients/c/mock/CouchbaseMock-LATEST.jar +url = http://packages.couchbase.com/clients/c/mock/CouchbaseMock-LATEST.jar From c0e60acb2a3f61d0e10bdf6f228ab3f8739c0f62 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 9 Dec 2015 13:47:46 -0800 Subject: [PATCH 024/829] Replace deprecated assertEquals with assertEqual This gets annoying as we get warnings in the test output Change-Id: Ib123660e448b493f14c48bb3cd8f23c6f94ed4ef Reviewed-on: http://review.couchbase.org/57646 Well-Formed: buildbot <build@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/tests/cases/append_t.py | 2 +- couchbase/tests/cases/arithmetic_t.py | 16 ++++++++-------- couchbase/tests/cases/connection_t.py | 2 +- couchbase/tests/cases/encodings_t.py | 4 ++-- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/couchbase/tests/cases/append_t.py b/couchbase/tests/cases/append_t.py index eff934a40..b06a1a82e 100644 --- a/couchbase/tests/cases/append_t.py +++ b/couchbase/tests/cases/append_t.py @@ -32,7 +32,7 @@ def test_append_prepend(self): self.cb.upsert(key, vbase, format=FMT_UTF8) self.cb.prepend(key, "begin ") self.cb.append(key, " end") - self.assertEquals(self.cb.get(key).value, + self.assertEqual(self.cb.get(key).value, "begin middle end") diff --git a/couchbase/tests/cases/arithmetic_t.py b/couchbase/tests/cases/arithmetic_t.py index dd2488ad3..6298f588f 100644 --- a/couchbase/tests/cases/arithmetic_t.py +++ b/couchbase/tests/cases/arithmetic_t.py @@ -31,15 +31,15 @@ def test_trivial_incrdecr(self): self.assertEqual(int(rv_get.value), 1) rv = self.cb.counter(key) - self.assertEquals(rv.value, 2) + self.assertEqual(rv.value, 2) rv = self.cb.counter(key, delta=-1) - self.assertEquals(rv.value, 1) - self.assertEquals(int(self.cb.get(key).value), 1) + self.assertEqual(rv.value, 1) + self.assertEqual(int(self.cb.get(key).value), 1) rv = self.cb.counter(key, delta=-1) - self.assertEquals(rv.value, 0) - self.assertEquals(int(self.cb.get(key).value), 0) + self.assertEqual(rv.value, 0) + self.assertEqual(int(self.cb.get(key).value), 0) def test_incr_notfound(self): key = self.gen_key("incr_notfound") @@ -83,7 +83,7 @@ def test_incr_extended(self): key = self.gen_key("incr_extended") self.cb.remove(key, quiet=True) rv = self.cb.counter(key, initial=10) - self.assertEquals(rv.value, 10) + self.assertEqual(rv.value, 10) srv = self.cb.upsert(key, "42", cas=rv.cas) self.assertTrue(srv.success) @@ -91,8 +91,8 @@ def test_incr_extended(self): klist = self.gen_key_list(amount=5, prefix="incr_extended_list") self.cb.remove_multi(klist, quiet=True) rvs = self.cb.counter_multi(klist, initial=40) - [ self.assertEquals(x.value, 40) for x in rvs.values() ] - self.assertEquals(sorted(list(rvs.keys())), sorted(klist)) + [ self.assertEqual(x.value, 40) for x in rvs.values() ] + self.assertEqual(sorted(list(rvs.keys())), sorted(klist)) if __name__ == '__main__': diff --git a/couchbase/tests/cases/connection_t.py b/couchbase/tests/cases/connection_t.py index 000b1d342..141fc27ea 100644 --- a/couchbase/tests/cases/connection_t.py +++ b/couchbase/tests/cases/connection_t.py @@ -77,7 +77,7 @@ def test_configcache(self): cb2 = self.factory(**self.make_connargs(config_cache=cachefile.name)) self.assertTrue(cb2.upsert("foo", "bar").success) - self.assertEquals("bar", cb.get("foo").value) + self.assertEqual("bar", cb.get("foo").value) sb = os.stat(cachefile.name) diff --git a/couchbase/tests/cases/encodings_t.py b/couchbase/tests/cases/encodings_t.py index f550c8486..4e16fca24 100644 --- a/couchbase/tests/cases/encodings_t.py +++ b/couchbase/tests/cases/encodings_t.py @@ -34,7 +34,7 @@ def test_unicode(self): for f in (FMT_BYTES, FMT_PICKLE): cas = self.cb.upsert(txt, txt.encode('utf-16'), format=f).cas server_val = self.cb.get(txt).value - self.assertEquals(server_val, BLOB_ORIG) + self.assertEqual(server_val, BLOB_ORIG) def test_json_unicode(self): self.assertEqual(self.cb.default_format, FMT_JSON) @@ -66,7 +66,7 @@ def test_blob(self): cas = self.cb.upsert("key", blob, format=f).cas self.assertTrue(cas) rv = self.cb.get("key").value - self.assertEquals(rv, blob) + self.assertEqual(rv, blob) def test_bytearray(self): ba = bytearray(b"Hello World") From 302e3b8d64c8bc4432b0229ce8e877e747a58996 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 5 Jan 2016 14:19:17 -0800 Subject: [PATCH 025/829] Add acouchbase example to README Change-Id: Ib2cb2bdd0d377655dd618f671689f0e879843eeb Reviewed-on: http://review.couchbase.org/58287 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- README.rst | 29 +++++++++++++++++++++++++++++ 1 file changed, 29 insertions(+) diff --git a/README.rst b/README.rst index e888ff382..f2e5ce659 100644 --- a/README.rst +++ b/README.rst @@ -172,6 +172,35 @@ GEvent API The API functions exactly like the normal Bucket API, except that the implementation is significantly different. +------------------------ +Asynchronous (Tulip) API +------------------------ + +This module also supports Python 3.4/3.5 asynchronous I/O. To use this +functionality, import the `couchbase.experimental` module (since this +functionality is considered experimental) and then import the `acouchbase` +module. The `acouchbase` module offers an API similar to the synchronous +client: + +.. code-block:: python + + import asyncio + + import couchbase.experimental + couchbase.experimental.enable() + from acouchbase.bucket import Bucket + + + async def write_and_read(key, value): + cb = Bucket('couchbase://10.0.0.31/default') + await cb.connect() + await cb.upsert(key, value) + return await cb.get(key) + + loop = asyncio.get_event_loop() + rv = loop.run_until_complete(write_and_read('foo', 'bar')) + print(rv.value) + ~~~~ PyPy From a10df216239424e129e30ce4401fc011d6097a2d Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 5 Jan 2016 14:19:30 -0800 Subject: [PATCH 026/829] iops: allow events to be created when there is an exception This is needed for acouchbase support, as the signalling of the event loop to stop is done using an exception, and the asynchronous cancelling of events itself (via lcb_destroy_async) is done using timers Change-Id: I012f81ade6dec4a934e0099801c55aefbdbc10ff Reviewed-on: http://review.couchbase.org/58288 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/iops.c | 49 +++++++++++++++++++++++++++++++++++++++++++++++-- 1 file changed, 47 insertions(+), 2 deletions(-) diff --git a/src/iops.c b/src/iops.c index 30b8916f4..1b260569a 100644 --- a/src/iops.c +++ b/src/iops.c @@ -335,6 +335,51 @@ pycbc_IOPSWrapperType_init(PyObject **ptr) return PyType_Ready(p); } +static PyObject * +do_safecall(PyObject *callable, PyObject *args) +{ + int has_error = 0; + PyObject *exctype = NULL, *excval = NULL, *exctb = NULL; + PyObject *result; + + if (PyErr_Occurred()) { + /* Calling from within handler */ + has_error = 1; + PyErr_Fetch(&exctype, &excval, &exctb); + PyErr_Clear(); + } + + result = PyObject_CallObject(callable, args); + if (!has_error) { + /* No special handling here... */ + return result; + } + + if (!result) { + + #if PY_MAJOR_VERSION == 3 + PyObject *exctype2, *excval2, *exctb2; + PyErr_NormalizeException(&exctype, &excval, &exctb); + PyErr_Fetch(&exctype2, &excval2, &exctb2); + PyErr_NormalizeException(&exctype2, &excval, &exctb2); + /* Py3K has exception contexts we can use! */ + PyException_SetContext(excval2, excval); + excval = NULL; /* Since SetContext steals a reference */ + PyErr_Restore(exctype2, excval2, exctb2); + #else + PyErr_PrintEx(0); + #endif + + /* Clean up remaining variables */ + Py_XDECREF(exctype); + Py_XDECREF(excval); + Py_XDECREF(exctb); + } else { + PyErr_Restore(exctype, excval, exctb); + } + return result; +} + static int modify_event_python(pycbc_IOPSWrapper *pio, pycbc_Event *ev, pycbc_evaction_t action, lcb_socket_t newsock, void *arg) @@ -366,7 +411,7 @@ modify_event_python(pycbc_IOPSWrapper *pio, pycbc_Event *ev, } PyTuple_SET_ITEM(argtuple, 2, o_arg); - result = PyObject_CallObject(meth, argtuple); + result = do_safecall(meth, argtuple); Py_DECREF(argtuple); Py_XDECREF(result); @@ -411,7 +456,7 @@ create_event_python(lcb_io_opt_t io, pycbc_evtype_t evtype) } if (meth) { - ret = PyObject_CallObject(meth, NULL); + ret = do_safecall(meth, NULL); if (!ret) { PyErr_PrintEx(0); abort(); From 12467124a620c23317f64dc8c11c8495aeec0da3 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 1 Mar 2016 15:28:09 -0800 Subject: [PATCH 027/829] PYCBC-319: Allow multi-bucket at_plus consistency Change-Id: I433c05a42561e6d26a85685bcf3d0b9e2fcb61cd Reviewed-on: http://review.couchbase.org/60808 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> --- couchbase/__init__.py | 22 +++ couchbase/n1ql.py | 233 +++++++++++++++++-------- couchbase/tests/cases/n1qlstrings_t.py | 27 ++- docs/source/api/n1ql.rst | 4 +- src/callbacks.c | 5 +- 5 files changed, 204 insertions(+), 87 deletions(-) diff --git a/couchbase/__init__.py b/couchbase/__init__.py index 91d17c954..cc02a54bb 100644 --- a/couchbase/__init__.py +++ b/couchbase/__init__.py @@ -90,6 +90,28 @@ def set_pickle_converters(encode, decode): return (ret['pickle_encode'], ret['pickle_decode']) +def _to_json(*args): + """ + Utility function to encode an object to json using the user-defined + JSON encoder (see :meth:`set_json_converters`). + + :param args: Arguments passed to the encoder + :return: Serialized JSON string + """ + return _LCB._get_helper('json_encode').dumps(*args) + + +def _from_json(*args): + """ + Utility function to decode a JSON string to a Python object using + the user-defined JSON decoder (see :meth:`set_json_converters`). + + :param args: Arguments passed to the decoder + :return: Python object converted from JSON + """ + return _LCB._get_helper('json_decode').loads(*args) + + def enable_logging(): """ Enables integration with Python's `logging` module. diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index c552a2bc6..6f2895646 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -16,6 +16,7 @@ # import json +import couchbase from couchbase._pyport import basestring from couchbase.views.iterator import AlreadyQueriedError from couchbase.exceptions import CouchbaseError, NotSupportedError @@ -41,6 +42,150 @@ def n1ql_errcode(self): """ +class MissingTokenError(CouchbaseError): + pass + + +class MutationState(object): + """ + .. warning:: + + The API and implementation of this class are subject to change. + + This class acts as a container for one or more mutations. It may + then be used with the :meth:`~.N1QLQuery.consistent_with` method to + indicate that a given query should be bounded by the contained + mutations. + + Using `consistent_with` is similar to setting + :attr:`~.N1QLQuery.consistency` to :data:`CONSISTENCY_REQUEST`, + but is more optimal as the query will use cached data, *except* + when the given mutation(s) are concerned. This option is useful + for use patterns when an application has just performed a mutation, + and wishes to perform a query in which the newly-performed mutation + should reflect on the query results. + + .. note:: + + This feature requires Couchbase Server 4.5 or greater, + and also requires that `fetch_mutation_tokens=true` + be specified in the connection string when creating + a :class:`~couchbase.bucket.Bucket` + + .. code-block:: python + + cb = Bucket('couchbase://localhost/default?fetch_mutation_tokens=true') + + rvs = cb.upsert_multi({ + 'foo': {'type': 'user', 'value': 'a foo value'}, + 'bar': {'type': 'user', 'value': 'a bar value'} + }) + + nq = N1QLQuery('SELECT type, value FROM default WHERE type="user"') + ms = MutationToken() + ms.add_result(rv + nq.consistent_with_ops(*rvs.values()) + for row in cb.n1ql_query(nq): + # ... + """ + def __init__(self): + self._sv = {} + + def _add_scanvec(self, mutinfo): + """ + Internal method used to specify a scan vector. + :param mutinfo: A tuple in the form of + `(vbucket id, vbucket uuid, mutation sequence)` + """ + vb, uuid, seq, bktname = mutinfo + self._sv.setdefault(bktname, {})[vb] = (seq, str(uuid)) + + def encode(self): + """ + Encodes this state object to a string. This string may be passed + to the :meth:`decode` at a later time. The returned object is safe + for sending over the network. + :return: A serialized string representing the state + """ + return couchbase._to_json(self._sv) + + @classmethod + def decode(cls, s): + """ + Create a :class:`MutationState` from the encoded string + :param s: The encoded string + :return: A new MutationState restored from the string + """ + d = couchbase._from_json(s) + o = MutationState() + o._sv = d + # TODO: Validate + + def add_results(self, *rvs, **kwargs): + """ + Changes the state to reflect the mutation which yielded the given + result. + + In order to use the result, the `fetch_mutation_tokens` option must + have been specified in the connection string, _and_ the result + must have been successful. + + :param rvs: One or more :class:`~.OperationResult` which have been + returned from mutations + :param quiet: Suppress errors if one of the results does not + contain a convertible state. + :return: `True` if the result was valid and added, `False` if not + added (and `quiet` was specified + :raise: :exc:`~.MissingTokenError` if `result` does not contain + a valid token + """ + if not rvs: + raise MissingTokenError.pyexc(message='No results passed') + for rv in rvs: + mi = rv._mutinfo + if not mi: + if kwargs.get('quiet'): + return False + raise MissingTokenError.pyexc( + message='Result does not contain token') + self._add_scanvec(mi) + return True + + def add_all(self, bucket, quiet=False): + """ + Ensures the query result is consistent with all prior + mutations performed by a given bucket. + + Using this function is equivalent to keeping track of all + mutations performed by the given bucket, and passing them to + :meth:`~add_result` + + :param bucket: A :class:`~couchbase.bucket.Bucket` object + used for the mutations + :param quiet: If the bucket contains no valid mutations, this + option suppresses throwing exceptions. + :return: `True` if at least one mutation was added, `False` if none + were added (and `quiet` was specified) + :raise: :exc:`~.MissingTokenError` if no mutations were added and + `quiet` was not specified + """ + added = False + for mt in bucket._mutinfo(): + added = True + self._add_scanvec(mt) + if not added and not quiet: + raise MissingTokenError('Bucket object contains no tokens!') + return added + + def __repr__(self): + return repr(self._sv) + + def __nonzero__(self): + return bool(self._sv) + + __bool__ = __nonzero__ + + class N1QLQuery(object): def __init__(self, query, *args, **kwargs): """ @@ -152,84 +297,22 @@ def consistency(self): def consistency(self, value): self._body['scan_consistency'] = value - def _add_scanvec(self, mutinfo): - """ - Internal method used to specify a scan vector. - :param mutinfo: A tuple in the form of - `(vbucket id, vbucket uuid, mutation sequence)` + def consistent_with(self, state): """ - vb, uuid, seq = mutinfo - self._body.setdefault('scan_vector', {})[vb] = { - 'value': seq, - 'guard': str(uuid) - } - self.consistency = 'at_plus' + Indicate that the query should be consistent with one or more + mutations. - def consistent_with_ops(self, *ops, **kwargs): + :param state: The state of the mutations it should be consistent + with. """ - Ensure results reflect consistency of one or more mutations. - This is similar to setting :attr:`consistency` to - :data:`CONSISTENCY_REQUEST`, but is more optimal as the query - will use cached data, *except* when the given mutation(s) are - concerned. This option is useful for use patterns when an - application has just performed a mutation, and wishes to - perform a query in which the newly-performed mutation - should reflect on the query results. - - :param ops: One or more :class:`~.OperationResult` objects - :param bool quiet: Whether to suppress throwing an exception - if one or more operations are missing mutation information. - :raise: :exc:`~.NotSupportedError` if one of the mutations - - .. note:: - - This feature requires Couchbase Server 4.0 or greater, - and also requires that `fetch_mutation_tokens=true` - be specified in the connection string when creating - a :class:`~couchbase.bucket.Bucket` - - - .. code-block:: python - - cb = Bucket('couchbase://localhost/default?fetch_mutation_tokens=true') + if self.consistency not in (CONSISTENCY_NONE, 'at_plus'): + raise TypeError( + 'consistent_with not valid with other consistency options') - rvs = cb.upsert_multi({ - 'foo': {'type': 'user', 'value': 'a foo value'}, - 'bar': {'type': 'user', 'value': 'a bar value'} - }) - - nq = N1QLQuery('SELECT type, value FROM default WHERE type="user"') - nq.consistent_with_ops(*rvs.values()) - for row in cb.n1ql_query(nq): - # ... - - - .. seealso:: :meth:`~consistent_with_all` - """ - for op in ops: - if not op._mutinfo: - if kwargs.get('quiet'): - continue - raise NotSupportedError.pyexc( - "OperationResult object missing mutation information. " - "Ensure that `fetch_mutation_tokens=true` was specified " - "in the connection string") - self._add_scanvec(op._mutinfo) - - def consistent_with_all(self, bucket): - """ - Ensures the query result is consistent with all prior - mutations performed by a given bucket. - - Using this function is equivalent to keeping track of all - mutations performed by the given bucket, and passing them to - :meth:`~consistent_with_ops` - - :param bucket: A :class:`~couchbase.bucket.Bucket` object - used for the mutations - """ - for mt in bucket._mutinfo(): - self._add_scanvec(mt) + if not state: + raise TypeError('Passed empty or invalid state', state) + self.consistency = 'at_plus' + self._body['scan_vectors'] = state._sv # TODO: I really wish Sphinx were able to automatically # document instance vars @@ -349,7 +432,7 @@ def meta(self): format, in which case there may be additional, non-JSON data which can be retrieved using the following - .. code-block:: + :: raw_meta = req.raw.value diff --git a/couchbase/tests/cases/n1qlstrings_t.py b/couchbase/tests/cases/n1qlstrings_t.py index 4ecceb52e..458ddb3eb 100644 --- a/couchbase/tests/cases/n1qlstrings_t.py +++ b/couchbase/tests/cases/n1qlstrings_t.py @@ -20,6 +20,7 @@ from couchbase.tests.base import CouchbaseTestCase from couchbase.n1ql import N1QLQuery, CONSISTENCY_REQUEST, CONSISTENCY_NONE +from couchbase.n1ql import MutationState class N1QLStringTest(CouchbaseTestCase): @@ -69,26 +70,34 @@ def test_encode_scanvec(self): # and guard is a vbucket's UUID. q = N1QLQuery('SELECT * FROM default') + ms = MutationState() + ms._add_scanvec((42, 3004, 3, 'default')) + q.consistent_with(ms) - q._add_scanvec((42, 3004, 3)) dval = json.loads(q.encoded) sv_exp = { - '42': {'value': 3, 'guard': '3004'} + 'default': {'42': [3, '3004']} } self.assertEqual('at_plus', dval['scan_consistency']) - self.assertEqual(sv_exp, dval['scan_vector']) + self.assertEqual(sv_exp, dval['scan_vectors']) # Ensure the vb field gets updated. No duplicates! - q._add_scanvec((42, 3004, 4)) - sv_exp['42']['value'] = 4 + ms._add_scanvec((42, 3004, 4, 'default')) + sv_exp['default']['42'] = [4, '3004'] dval = json.loads(q.encoded) - self.assertEqual(sv_exp, dval['scan_vector']) + self.assertEqual(sv_exp, dval['scan_vectors']) - q._add_scanvec((91, 7779, 23)) + ms._add_scanvec((91, 7779, 23, 'default')) dval = json.loads(q.encoded) - sv_exp['91'] = {'guard': '7779', 'value': 23} - self.assertEqual(sv_exp, dval['scan_vector']) + sv_exp['default']['91'] = [23, '7779'] + self.assertEqual(sv_exp, dval['scan_vectors']) + + # Try with a second bucket + sv_exp['other'] = {'666': [99, '5551212']} + ms._add_scanvec((666, 5551212, 99, 'other')) + dval = json.loads(q.encoded) + self.assertEqual(sv_exp, dval['scan_vectors']) def test_timeout(self): q = N1QLQuery('SELECT foo') diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index e401997ab..304d67de8 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -8,6 +8,7 @@ N1QL Queries .. automethod:: __init__ .. automethod:: set_option + .. automethod:: consistent_with .. autoattribute:: consistency .. autoattribute:: encoded .. autoattribute:: adhoc @@ -16,11 +17,12 @@ N1QL Queries .. autodata:: CONSISTENCY_NONE .. autodata:: CONSISTENCY_REQUEST +.. autoclass:: MutationState .. class:: N1QLRequest .. automethod:: __init__ .. automethod:: __iter__ .. automethod:: execute - .. automethod:: meta + .. autoattribute:: meta .. automethod:: get_single_result diff --git a/src/callbacks.c b/src/callbacks.c index 8797ade61..6d2aa110e 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -240,10 +240,11 @@ durability_chain_common(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) mutinfo = lcb_resp_get_mutation_token(cbtype, resp); if (mutinfo && LCB_MUTATION_TOKEN_ISVALID(mutinfo)) { /* Create the mutation token tuple: (vb,uuid,seqno) */ - res->mutinfo = Py_BuildValue("HKK", + res->mutinfo = Py_BuildValue("HKKO", LCB_MUTATION_TOKEN_VB(mutinfo), LCB_MUTATION_TOKEN_ID(mutinfo), - LCB_MUTATION_TOKEN_SEQ(mutinfo)); + LCB_MUTATION_TOKEN_SEQ(mutinfo), + conn->bucket); } else { Py_INCREF(Py_None); res->mutinfo = Py_None; From b95dfe1435d01f283da4b7e9c528ee81a156de64 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 16 Mar 2016 10:11:56 -0700 Subject: [PATCH 028/829] PYCBC-335: Undocument 'flags' field from ValueResult This is not part of the 2.0 API Change-Id: Ie2e6388a2ade11f31b4ebf62e7f908c59c6c8240 Reviewed-on: http://review.couchbase.org/61731 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/opresult.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/opresult.c b/src/opresult.c index 03796f63f..865e18092 100644 --- a/src/opresult.c +++ b/src/opresult.c @@ -80,7 +80,7 @@ static struct PyMemberDef OperationResult_TABLE_members[] = { static struct PyMemberDef ValueResult_TABLE_members[] = { { "flags", T_ULONG, offsetof(pycbc_ValueResult, flags), - READONLY, PyDoc_STR("Flags for the value") + READONLY }, { NULL } }; From d9c7408979db50dd59fef8744da9917906d2a5bc Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 7 Jul 2015 10:01:44 -0700 Subject: [PATCH 029/829] PYCBC-320: Sub-Document API Change-Id: Iae3d5e9a722ff2acc2ae53eceec4fbab33f32e49 Reviewed-on: http://review.couchbase.org/55573 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/_bootstrap.py | 9 +- couchbase/bucket.py | 77 +++++++++- couchbase/exceptions.py | 94 +++++++++++- couchbase/result.py | 138 ++++++++++++++++- couchbase/subdocument.py | 240 ++++++++++++++++++++++++++++++ couchbase/tests/cases/subdoc_t.py | 234 +++++++++++++++++++++++++++++ docs/source/api/couchbase.rst | 21 +++ docs/source/api/exceptions.rst | 4 + docs/source/api/subdoc.rst | 79 ++++++++++ docs/source/conf.py | 4 +- docs/source/index.rst | 1 + docs/source/pycbc_xref.py | 39 +++++ src/bucket.c | 3 + src/callbacks.c | 185 +++++++++++++++++++---- src/constants.c | 26 +++- src/ext.c | 3 +- src/get.c | 74 +++++++++ src/opresult.c | 60 ++++++++ src/oputil.c | 174 ++++++++++++++++++++++ src/oputil.h | 23 +++ src/pycbc.h | 30 +++- src/store.c | 41 ++++- 22 files changed, 1515 insertions(+), 44 deletions(-) create mode 100644 couchbase/subdocument.py create mode 100644 couchbase/tests/cases/subdoc_t.py create mode 100644 docs/source/api/subdoc.rst create mode 100644 docs/source/pycbc_xref.py diff --git a/couchbase/_bootstrap.py b/couchbase/_bootstrap.py index 8bb5e823f..88edcfe0d 100644 --- a/couchbase/_bootstrap.py +++ b/couchbase/_bootstrap.py @@ -34,6 +34,8 @@ import couchbase.exceptions as E import couchbase._libcouchbase as C from couchbase.items import ItemCollection, ItemOptionDict, ItemSequence +from couchbase.result import SubdocResult +from couchbase.subdocument import MultiValue def _result__repr__(self): @@ -68,6 +70,9 @@ def _result__repr__(self): if flags & C.PYCBC_RESFLD_URL and hasattr(self, "url"): details.append("url={0}".format(self.url)) + if hasattr(self, '_pycbc_repr_extra'): + details += self._pycbc_repr_extra() + ret = "{0}<{1}>".format(self.__class__.__name__, ', '.join(details)) return ret @@ -141,7 +146,9 @@ def run_init(m): itmopts_dict_type=ItemOptionDict, itmopts_seq_type=ItemSequence, fmt_auto=_FMT_AUTO, - view_path_helper=_view_path_helper) + view_path_helper=_view_path_helper, + sd_result_type=SubdocResult, + sd_multival_type=MultiValue) run_init(C) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 448c87778..29baa272a 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -719,6 +719,79 @@ def counter(self, key, delta=1, initial=None, ttl=0): """ return _Base.counter(self, key, delta=delta, initial=initial, ttl=ttl) + def mutate_in(self, key, *specs, **kwargs): + """Perform multiple atomic modifications within a document. + + :param key: The key of the document to modify + :param specs: A list of specs (See :mod:`couchbase.subdocument`) + :param kwargs: CAS, etc. + :return: A :class:`~.couchbase.result.SubdocResult` object. + + Here's an example of adding a new tag to a "user" document + and incrementing a modification counter:: + + import couchbase.subdocument as SD + # .... + cb.mutate_in('user', + SD.add_unique('tags', 'dog'), + SD.counter('updates', 1)) + + .. seealso:: :mod:`couchbase.subdocument` + """ + return super(Bucket, self).mutate_in(key, specs, **kwargs) + + def lookup_in(self, key, *specs, **kwargs): + """Atomically retrieve one or more paths from a document. + + :param key: The key of the document to lookup + :param spec: A list of specs (see :mod:`couchbase.subdocument`) + :return: A :class:`.couchbase.result.SubdocResult` object. + This object contains the results and any errors of the + operation. + + Example:: + + import couchbase.subdocument as SD + rv = cb.lookup_in('user', + SD.get('email'), + SD.get('name'), + SD.exists('friends.therock')) + + email = rv[0] + name = rv[1] + friend_exists = rv.exists(2) + + .. seealso:: meth:`retrieve_in` which acts as a convenience wrapper + """ + return super(Bucket, self).lookup_in({key: specs}, **kwargs) + + def retrieve_in(self, key, *paths, **kwargs): + """Atomically fetch one or more paths from a document. + + Convenience method for retrieval operations. This functions + identically to :meth:`lookup_in`. As such, the following two + forms are equivalent: + + .. code-block:: python + + import couchbase.subdocument as SD + rv = cb.lookup_in(key, + SD.get('email'), + SD.get('name'), + SD.get('friends.therock') + + email, name, friend = rv + + .. code-block:: python + + rv = cb.retrieve_in(key, 'email', 'name', 'friends.therock') + email, name, friend = rv + + .. seealso:: :meth:`lookup_in` + """ + import couchbase.subdocument as SD + return self.lookup_in(key, *tuple(SD.get(x) for x in paths), **kwargs) + def incr(self, key, amount=1, **kwargs): _depr('incr', 'counter') return self.counter(key, delta=amount, **kwargs) @@ -1398,9 +1471,9 @@ def _tmpmeth(self, *args, **kwargs): 'replace', 'remove', 'counter', 'touch', 'lock', 'unlock', 'endure', 'observe', 'rget', 'stats', - 'set', 'add', 'delete') + 'set', 'add', 'delete', 'lookup_in', 'mutate_in') - _MEMCACHED_NOMULTI = ('stats') + _MEMCACHED_NOMULTI = ('stats', 'lookup_in', 'mutate_in') @classmethod def _gen_memd_wrappers(cls, factory): diff --git a/couchbase/exceptions.py b/couchbase/exceptions.py index 04813841f..611115e51 100644 --- a/couchbase/exceptions.py +++ b/couchbase/exceptions.py @@ -71,9 +71,19 @@ class CouchbaseError(Exception): True if errors were received during TCP transport. See :exc:`CouchbaseNetworkError` + .. py:attribute:: CODE + + This is a _class_ level attribute which contains the equivalent + libcouchbase error code which is mapped to this exception class. + + This is usually the :attr:`rc` value for an exception instance. Unlike + :attr:`rc` however, it may be used without an instantiated object, + possibly helping performance. """ + CODE = 0 + @classmethod def rc_to_exctype(cls, rc): """ @@ -106,7 +116,7 @@ def __init__(self, params=None): self.__dict__.update(params.__dict__) return - self.rc = params.get('rc', 0) + self.rc = params.get('rc', self.CODE) self.all_results = params.get('all_results', {}) self.result = params.get('result', None) self.inner_cause = params.get('inner_cause', None) @@ -453,6 +463,51 @@ class ObjectDestroyedError(CouchbaseError): class PipelineError(CouchbaseError): """Illegal operation within pipeline state""" + +class SubdocPathNotFoundError(CouchbaseError): + """Subdocument path does not exist""" + + +class SubdocPathExistsError(CouchbaseError): + """Subdocument path already exists (and shouldn't)""" + + +class SubdocPathInvalidError(CouchbaseError): + """Subdocument path is invalid""" + + +class DocumentNotJsonError(CouchbaseError): + """Document is not JSON and cannot be used for subdoc operations""" + +class SubdocPathMismatchError(CouchbaseError): + """Subdocument path conflicts with actual document structure""" + + +class DocumentTooDeepError(CouchbaseError): + """Document is too deep to be used for subdocument operations""" + + +class SubdocNumberTooBigError(CouchbaseError): + """Existing number is too big to be used for subdocument operations""" + + +class SubdocValueTooDeepError(CouchbaseError): + """Value is too deep to insert into document, or would cause the document + to be too deep""" + + +class SubdocCantInsertValueError(CouchbaseError): + """Cannot insert value for given operation""" + + +class SubdocBadDeltaError(CouchbaseError): + """Bad delta supplied for counter command""" + + +class SubdocMultipleErrors(CouchbaseError): + """One or more subcommands failed. Inspect the individual operation""" + CODE = C.LCB_SUBDOC_MULTI_FAILURE + _LCB_ERRCAT_MAP = { C.LCB_ERRTYPE_NETWORK: CouchbaseNetworkError, C.LCB_ERRTYPE_INPUT: CouchbaseInputError, @@ -489,9 +544,25 @@ class PipelineError(CouchbaseError): C.LCB_DURABILITY_ETOOMANY: ArgumentError, C.LCB_DUPLICATE_COMMANDS: ArgumentError, C.LCB_CLIENT_ETMPFAIL: ClientTemporaryFailError, - C.LCB_HTTP_ERROR: HTTPError + C.LCB_HTTP_ERROR: HTTPError, + C.LCB_SUBDOC_PATH_ENOENT: SubdocPathNotFoundError, + C.LCB_SUBDOC_PATH_EEXISTS: SubdocPathExistsError, + C.LCB_SUBDOC_PATH_EINVAL: SubdocPathInvalidError, + C.LCB_SUBDOC_DOC_E2DEEP: DocumentTooDeepError, + C.LCB_SUBDOC_DOC_NOTJSON: DocumentNotJsonError, + C.LCB_SUBDOC_VALUE_E2DEEP: SubdocValueTooDeepError, + C.LCB_SUBDOC_PATH_MISMATCH: SubdocPathMismatchError, + C.LCB_SUBDOC_VALUE_CANTINSERT: SubdocCantInsertValueError, + C.LCB_SUBDOC_BAD_DELTA: SubdocBadDeltaError, + C.LCB_SUBDOC_NUM_ERANGE: SubdocNumberTooBigError, + C.LCB_EMPTY_PATH: InvalidError } +for k, v in _LCB_ERRNO_MAP.iteritems(): + v.CODE = k + del k + del v + def _mk_lcberr(rc, name=None, default=CouchbaseError, docstr="", extrabase=[]): """ Create a new error class derived from the appropriate exceptions. @@ -534,6 +605,9 @@ def _mk_lcberr(rc, name=None, default=CouchbaseError, docstr="", extrabase=[]): _LCB_ERRNO_MAP[rc] = newcls + del newcls + del oldcls + _EXCTYPE_MAP = { C.PYCBC_EXC_ARGUMENTS: ArgumentError, C.PYCBC_EXC_ENCODING: ValueFormatError, @@ -543,3 +617,19 @@ def _mk_lcberr(rc, name=None, default=CouchbaseError, docstr="", extrabase=[]): C.PYCBC_EXC_DESTROYED: ObjectDestroyedError, C.PYCBC_EXC_PIPELINE: PipelineError } + + +def exc_from_rc(rc, msg=None, obj=None): + """ + .. warning:: INTERNAL + + For those rare cases when an exception needs to be thrown from + Python using a libcouchbase error code. + + :param rc: The error code + :param msg: Message (description) + :param obj: Context + :return: a raisable exception + """ + newcls = CouchbaseError.rc_to_exctype(rc) + return newcls(params={'rc': rc, 'objextra': obj, 'message': msg}) diff --git a/couchbase/result.py b/couchbase/result.py index 8f337ce59..95753a552 100644 --- a/couchbase/result.py +++ b/couchbase/result.py @@ -14,9 +14,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from platform import python_implementation +from itertools import izip -import couchbase._bootstrap from couchbase._libcouchbase import ( Result, ValueResult, @@ -25,3 +24,138 @@ MultiResult, ObserveInfo, AsyncResult) +import couchbase._libcouchbase as C +import couchbase.exceptions as E + + +class SubdocResult(C._SDResult): + """ + Class for objects returned via a subdoc operation. This may contain + one or more values depending on the number of input commands. + + The actual values from the result can be retrieved by iteration: + + Iteration:: + + for value in rv: + print(value) + + Index:: + + value = rv['some.path'] + value = rv[2] + + Or by using the :meth:`get()` method:: + + error, value = rv.get('some.path') + error, value = rv.get(2) + + + Iterator and index access will raise exceptions when encountering + a non-successful item. The :meth:`get()` usage will not throw an + exception, however + """ + + def _pycbc_repr_extra(self): + ret = ["specs={0}".format(repr(self._specs))] + if hasattr(self, '_results'): + ret.append('results={0}'.format(repr(self._results))) + return ret + + def __path2index(self, path): + if not hasattr(self, '__path_cache'): + self.__path_cache = {} + for x in xrange(len(self._specs)): + spec = self._specs[x] + self.__path_cache[spec[1]] = x + + return self.__path_cache[path] + + def _resolve(self, item): + if isinstance(item, (int, long)): + # Get by value + return self._results[item] + else: + return self._results[self.__path2index(item)] + + def __getitem__(self, item): + rv = self._resolve(item) + if rv[0]: + raise E.exc_from_rc(rv[0], obj=item) + else: + return rv[1] + + def __iter__(self): + for resinfo, specinfo in izip(self._results, self._specs): + err, value, path = resinfo[0], resinfo[1], specinfo[1] + if err: + raise E.exc_from_rc(err, obj=path) + yield value + + @property + def command_count(self): + """ + Total number of input commands received. + + For mutations (i.e. :cb_bmeth:`mutate_in`) this might be more + than :py:attr:`~.result_count`. + """ + return len(self._specs) + + @property + def result_count(self): + """ + Total number of results available. For mutations, this might be less + than the :py:attr:`~.command_count` + """ + try: + return len(self._results) + except AttributeError: + return 0 + + def get(self, path_or_index, default=None): + """ + Get details about a given result + + :param path_or_index: The path (or index) of the result to fetch. + :param default: If the given result does not exist, return this value + instead + :return: A tuple of `(error, value)`. If the entry does not exist + then `(0, default)` is returned. + """ + try: + return self._resolve(path_or_index) + except (KeyError, IndexError): + return 0, default + + def exists(self, path_or_index): + """ + Checks if a path exists in the document. This is meant to be used + for a corresponding :meth:`~couchbase.subdocument.exists` request. + + :param path_or_index: The path (or index) to check + :return: `True` if the path exists, `False` if the path does not exist + :raise: An exception if the server-side check failed for a reason other + than the path not existing. + """ + result = self._resolve(path_or_index) + if not result[0]: + return True + elif E.SubdocPathNotFoundError._can_derive(result[0]): + return False + else: + raise E.exc_from_rc(result[0]) + + @property + def access_ok(self): + """ + Dynamic property indicating if the document could be accessed (and thus + results can be retrieved) + + :return: True if the document is accessible, False otherwise + """ + return self.rc == 0 or self.rc == C.LCB_SUBDOC_MULTI_FAILRUE + + @property + def value(self): + raise AttributeError(".value not applicable in multiple result operation") \ No newline at end of file diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py new file mode 100644 index 000000000..6a3167117 --- /dev/null +++ b/couchbase/subdocument.py @@ -0,0 +1,240 @@ +from couchbase._libcouchbase import ( + LCB_SDCMD_REPLACE, LCB_SDCMD_DICT_ADD, LCB_SDCMD_DICT_UPSERT, + LCB_SDCMD_ARRAY_ADD_FIRST, LCB_SDCMD_ARRAY_ADD_LAST, + LCB_SDCMD_ARRAY_ADD_UNIQUE, LCB_SDCMD_EXISTS, LCB_SDCMD_GET, + LCB_SDCMD_COUNTER, LCB_SDCMD_REMOVE, LCB_SDCMD_ARRAY_INSERT +) + +_SPECMAP = {} +for k, v in globals().items(): + if not k.startswith('LCB_SDCMD_'): + continue + k = k.replace('LCB_SDCMD_', '') + _SPECMAP[v] = k + + +class Spec(tuple): + def __new__(cls, *args, **kwargs): + return super(Spec, cls).__new__(cls, tuple(args)) + + def __repr__(self): + details = [] + details.append(_SPECMAP.get(self[0])) + details.extend([repr(x) for x in self[1:]]) + return '{0}<{1}>'.format(self.__class__.__name__, + ', '.join(details)) + + +def _gen_2spec(op, path): + return Spec(op, path) + + +def _gen_4spec(op, path, value, create=False): + return Spec(op, path, value, int(create)) + + +class MultiValue(tuple): + def __new__(cls, *args, **kwargs): + return super(MultiValue, cls).__new__(cls, tuple(args)) + + def __repr__(self): + return 'MultiValue({0})'.format(tuple.__repr__(self)) + + +# The following functions return either 2-tuples or 4-tuples for operations +# which are converted into mutation or lookup specifications + +def get(path): + """ + Retrieve the value from the given path. The value is returned in the result. + Valid only in :cb_bmeth:`lookup_in` + + :param path: The path to retrieve + + .. seealso:: :meth:`exists` + """ + return _gen_2spec(LCB_SDCMD_GET, path) + + +def exists(path): + """ + Check if a given path exists. This is the same as :meth:`get()`, + but the result will not contain the value. + Valid only in :cb_bmeth:`lookup_in` + + :param path: The path to check + """ + return _gen_2spec(LCB_SDCMD_EXISTS, path) + + +def upsert(path, value, create_parents=False): + """ + Create or replace a dictionary path. + + :param path: The path to modify + :param value: The new value for the path. This should be a native Python + object which can be encoded into JSON (the SDK will do the encoding + for you). + :param create_parents: Whether intermediate parents should be created. + This means creating any additional levels of hierarchy not already + in the document, for example: + + .. code-block:: python + + {'foo': {}} + + Without `create_parents`, an operation such as + + .. code-block:: python + + cb.mutate_in("docid", SD.upsert("foo.bar.baz", "newValue") + + would fail with :cb_exc:`SubdocPathNotFoundError` because `foo.bar` + does not exist. However when using the `create_parents` option, the + server creates the new `foo.bar` dictionary and then inserts the + `baz` value. + + """ + return _gen_4spec(LCB_SDCMD_DICT_UPSERT, path, value, create_parents) + + +def replace(path, value): + """ + Replace an existing path. This works on any valid path if the path already + exists. Valid only in :cb_bmeth:`mutate_in` + + :param path: The path to replace + :param value: The new value + """ + return _gen_4spec(LCB_SDCMD_REPLACE, path, value, False) + + +def insert(path, value, create_parents=False): + """ + Create a new path in the document. The final path element points to a + dictionary key that should be created. Valid only in :cb_bmeth:`mutate_in` + + :param path: The path to create + :param value: Value for the path + :param create_parents: Whether intermediate parents should be created + """ + return _gen_4spec(LCB_SDCMD_DICT_ADD, path, value, create_parents) + + +def array_append(path, *values, **kwargs): + """ + Add new values to the end of an array. + + :param path: Path to the array. The path should contain the *array itself* + and not an element *within* the array + :param values: one or more values to append + :param create_parents: Create the array if it does not exist + + .. note:: + + Specifying multiple values in `values` is more than just syntactical + sugar. It allows the server to insert the values as one single unit. + If you have multiple values to append to the same array, ensure they + are specified as multiple arguments to `array_append` rather than + multiple `array_append` commands to :cb_bmeth:`mutate_in` + + This operation is only valid in :cb_bmeth:`mutate_in`. + + .. seealso:: :func:`array_prepend`, :func:`upsert` + """ + return _gen_4spec(LCB_SDCMD_ARRAY_ADD_LAST, path, + MultiValue(*values), kwargs.get('create_parents', False)) + + +def array_prepend(path, *values, **kwargs): + """ + Add new values to the beginning of an array. + + :param path: Path to the array. The path should contain the *array itself* + and not an element *within* the array + :param values: one or more values to append + :param create_parents: Create the array if it does not exist + + This operation is only valid in :cb_bmeth:`mutate_in`. + + .. seealso:: :func:`array_append`, :func:`upsert` + """ + return _gen_4spec(LCB_SDCMD_ARRAY_ADD_FIRST, path, + MultiValue(*values), kwargs.get('create_parents', False)) + + +def array_insert(path, *values): + """ + Insert items at a given position within an array. + + :param path: The path indicating where the item should be placed. The path + _should_ contain the desired position + :param values: Values to insert + + This operation is only valid in :cb_bmeth:`mutate_in`. + + .. seealso:: :func:`array_prepend`, :func:`upsert` + """ + return _gen_4spec(LCB_SDCMD_ARRAY_INSERT, path, + MultiValue(*values), False) + + +def array_addunique(path, value, create_parents=False): + """ + Add a new value to an array if the value does not exist. + + :param path: The path to the array + :param value: Value to add to the array if it does not exist. + Currently the value is restricted to primitives: strings, numbers, + booleans, and `None` values. + :param create_parents: Create the array if it does not exist + + .. note:: + + The actual position of the new item is unspecified. This means + it may be at the beginning, end, or middle of the existing + array) + + This operation is only valid in :cb_bmeth:`mutate_in`. + + .. seealso:: :func:`array_append`, :func:`upsert` + """ + return _gen_4spec(LCB_SDCMD_ARRAY_ADD_UNIQUE, path, value, create_parents) + + +def counter(path, delta, create_parents=False): + """ + Increment or decrement a counter in a document. + + :param path: Path to the counter + :param delta: Amount by which to modify the value. The delta + can be negative but not 0. It must be an integer (not a float) + as well. + :param create_parents: Create the counter (and apply the modification) if it + does not exist + + .. note:: + + Unlike :meth:`couchbase.bucket.Bucket.counter`, + there is no `initial` argument. If the counter does not exist + within the document (but its parent does, or `create_parents` + is true), it will be initialized with the value of the `delta`. + + This operation is only valid in :cb_bmeth:`mutate_in`. + + .. seealso:: :func:`upsert`, :cb_bmeth:`counter` (in `Bucket`) + """ + if not delta: + raise ValueError("Delta must be positive or negative!") + return _gen_4spec(LCB_SDCMD_COUNTER, path, delta, create_parents) + + +def remove(path): + """ + Remove an existing path in the document. + + This operation is only valid in :cb_bmeth:`mutate_in`. + + :param path: The path to remove + """ + return _gen_2spec(LCB_SDCMD_REMOVE, path) diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py new file mode 100644 index 000000000..c70a4bfc5 --- /dev/null +++ b/couchbase/tests/cases/subdoc_t.py @@ -0,0 +1,234 @@ +from couchbase.tests.base import ConnectionTestCase +from couchbase import FMT_UTF8 +import couchbase.subdocument as SD +import couchbase.exceptions as E + + +class SubdocTest(ConnectionTestCase): + def setUp(self): + super(SubdocTest, self).setUp() + cb = self.cb + k = self.gen_key('sd_precheck') + try: + cb.retrieve_in(k, 'pth') + except E.NotSupportedError, E.UnknownCommandError: + self.skipTest('Subdoc not supported on this server version') + except E.CouchbaseError: + pass + + def test_lookup_in(self): + cb = self.cb + # Create the document + key = self.gen_key('sdget') + cb.upsert(key, { + 'path1': 'value1' + }) + + result = cb.retrieve_in(key, 'path1') + self.assertEqual((0, 'value1'), result.get(0)) + self.assertEqual((0, 'value1'), result.get('path1')) + self.assertEqual('value1', result[0]) + self.assertEqual('value1', result['path1']) + self.assertTrue(result.cas) + + # Try when path is not found + rv = cb.retrieve_in(key, 'path2') + self.assertEqual(E.SubdocMultipleErrors.CODE, rv.rc) + self.assertEqual(E.SubdocPathNotFoundError.CODE, rv.get(0)[0]) + self.assertRaises(E.SubdocPathNotFoundError, rv.__getitem__, 0) + self.assertRaises(E.SubdocPathNotFoundError, rv.__getitem__, 'path2') + + # Try when there is a mismatch + self.assertRaises(E.SubdocPathMismatchError, + cb.retrieve_in, key, 'path1[0]') + + # Try existence + result = cb.lookup_in(key, SD.exists('path1')) + self.assertTrue(result.exists('path1')) + self.assertTrue(result.exists(0)) + + # Not found + result = cb.lookup_in(key, SD.exists('p')) + self.assertEqual(E.SubdocMultipleErrors.CODE, result.rc) + self.assertEqual(E.SubdocPathNotFoundError.CODE, result.get(0)[0]) + + # Test with quiet + result = cb.lookup_in(key, SD.exists('p'), quiet=True) + self.assertFalse(result.exists('p')) + self.assertFalse(result.exists(0)) + + # Insert a non-JSON document + bkey = self.gen_key('sdget_nonjson') + cb.upsert(bkey, 'value', format=FMT_UTF8) + self.assertRaises(E.DocumentNotJsonError, + cb.lookup_in, bkey, SD.exists('path')) + + # Empty paths fail for get_in + self.assertRaises(E.InvalidError, + cb.retrieve_in, key, '') + + # Try on non-existing document. Should fail + self.assertRaises(E.NotFoundError, + cb.retrieve_in, 'non-exist', 'path') + + def test_mutate_in(self): + cb = self.cb + key = self.gen_key('sdstore_upsert') + cb.upsert(key, {}) + + cb.mutate_in(key, SD.upsert('newDict', ['hello'])) + result = cb.retrieve_in(key, 'newDict') + self.assertEqual(['hello'], result[0]) + + # Create deep path without create_parents + self.assertRaises(E.SubdocPathNotFoundError, + cb.mutate_in, key, + SD.upsert('path.with.missing.parents', 'value')) + + # Create deep path using create_parents + cb.mutate_in(key, + SD.upsert('new.parent.path', 'value', create_parents=True)) + result = cb.retrieve_in(key, 'new.parent') + self.assertEqual('value', result[0]['path']) + + # Test CAS operations + self.assertTrue(result.cas) + self.assertRaises(E.KeyExistsError, cb.mutate_in, + key, SD.upsert('newDict', None), cas=result.cas+1) + + # Try it again, using the CAS + result2 = cb.mutate_in(key, SD.upsert('newDict', {}), cas=result.cas) + self.assertNotEqual(result.cas, result2.cas) + + # Test insert, should fail + self.assertRaises(E.SubdocPathExistsError, cb.mutate_in, + key, SD.insert('newDict', {})) + + # Test insert on new path, should succeed + cb.mutate_in(key, SD.insert('anotherDict', {})) + self.assertEqual({}, cb.retrieve_in(key, 'anotherDict')[0]) + + # Test replace, should not fail + cb.mutate_in(key, SD.replace('newDict', {'Hello': 'World'})) + self.assertEqual('World', cb.retrieve_in(key, 'newDict')[0]['Hello']) + + # Test replace with missing value, should fail + self.assertRaises(E.SubdocPathNotFoundError, + cb.mutate_in, key, SD.replace('nonexist', {})) + + # Test with empty string (should be OK) + cb.mutate_in(key, SD.upsert('empty', '')) + self.assertEqual('', cb.retrieve_in(key, 'empty')[0]) + + # Test with null (None). Should be OK + cb.mutate_in(key, SD.upsert('null', None)) + self.assertEqual(None, cb.retrieve_in(key, 'null')[0]) + + # Test with empty path. Should throw some kind of error? + self.assertRaises((E.SubdocCantInsertValueError, E.InvalidError), + cb.mutate_in, key, SD.upsert('', {})) + + cb.mutate_in(key, SD.upsert('array', [1, 2, 3])) + self.assertRaises(E.SubdocPathMismatchError, cb.mutate_in, key, + SD.upsert('array.newKey', 'newVal')) + self.assertRaises(E.SubdocPathInvalidError, cb.mutate_in, key, + SD.upsert('array[0]', 'newVal')) + self.assertRaises(E.SubdocPathNotFoundError, cb.mutate_in, key, + SD.upsert('array[3].bleh', 'newVal')) + + def test_counter_in(self): + cb = self.cb + key = self.gen_key('sdcounter') + cb.upsert(key, {}) + + rv = cb.mutate_in(key, SD.counter('counter', 100)) + self.assertTrue(rv.success) + self.assertFalse(rv.cas == 0) + self.assertEqual(100, rv[0]) + + self.assertRaises(E.SubdocBadDeltaError, cb.mutate_in, key, + SD.counter('not_a_counter', 'blah')) + + # Do an upsert + cb.mutate_in(key, SD.upsert('not_a_counter', 'blah')) + + self.assertRaises(E.SubdocPathMismatchError, cb.mutate_in, key, + SD.counter('not_a_counter', 25)) + + self.assertRaises(E.SubdocPathNotFoundError, cb.mutate_in, key, + SD.counter('path.to.newcounter', 99)) + rv = cb.mutate_in(key, + SD.counter('path.to.newcounter', 99, create_parents=True)) + self.assertEqual(99, rv[0]) + + # Increment first counter again + rv = cb.mutate_in(key, SD.counter('counter', -25)) + self.assertEqual(75, rv[0]) + + self.assertRaises(ValueError, SD.counter, 'counter', 0) + + def test_multi_lookup(self): + cb = self.cb + key = self.gen_key('sdmlookup') + cb.upsert(key, { + 'field1': 'value1', + 'field2': 'value2', + 'array': [1, 2, 3], + 'boolean': False + }) + + rvs = cb.lookup_in( + key, SD.get('field1'), SD.exists('field2'), SD.exists('field3'), + quiet=True + ) + + self.assertFalse(rvs.success) + self.assertEqual(3, rvs.result_count) + + self.assertEqual((0, 'value1'), rvs.get(0)) + self.assertEqual((0, 'value1'), rvs.get('field1')) + self.assertEqual('value1', rvs[0]) + self.assertEqual('value1', rvs['field1']) + + self.assertEqual((0, None), rvs.get(1)) + self.assertEqual((0, None), rvs.get('field2')) + self.assertEqual(None, rvs[1]) + self.assertEqual(None, rvs['field2']) + + self.assertTrue(rvs.exists('field2')) + self.assertTrue(rvs.exists(1)) + + self.assertEqual((E.SubdocPathNotFoundError.CODE, None), + rvs.get('field3')) + self.assertEqual((E.SubdocPathNotFoundError.CODE, None), + rvs.get(2)) + self.assertFalse(rvs.exists('field3')) + self.assertFalse(rvs.exists(2)) + + def _getix(rv_, ix): + return rv_[ix] + + self.assertRaises(E.SubdocPathNotFoundError, _getix, rvs, 2) + self.assertRaises(E.SubdocPathNotFoundError, _getix, rvs, 'field3') + self.assertFalse(rvs.exists('field3')) + + # See what happens when we mix operations + self.assertRaises(E.CouchbaseError, cb.lookup_in, key, + SD.get('field1'), SD.insert('a', 'b')) + + # Empty path (invalid) + self.assertRaises(E.CouchbaseError, cb.lookup_in, SD.get('')) + + def test_multi_value(self): + cb = self.cb + key = self.gen_key('sdArray') + + cb.upsert(key, {'array': []}) + cb.mutate_in(key, SD.array_append('array', True)) + self.assertEqual([True], cb.retrieve_in(key, 'array')[0]) + + cb.mutate_in(key, SD.array_append('array', 1, 2, 3)) + self.assertEqual([True, 1, 2, 3], cb.retrieve_in(key, 'array')[0]) + + cb.mutate_in(key, SD.array_prepend('array', [42])) + self.assertEqual([[42], True, 1, 2, 3], cb.retrieve_in(key, 'array')[0]) diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index d3db66f2b..33df89d97 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -202,6 +202,27 @@ or duration. .. automethod:: touch +Sub-Document Operations +----------------------- + +These methods provide entry points to modify *parts* of a document in +Couchbase. + +.. note:: + + Sub-Document API methods are available in Couchbase Server 4.5 + (currently in Developer Preview). + + The server and SDK implementations and APIs are subject to change + + +.. currentmodule:: couchbase.bucket +.. class:: Bucket + + .. automethod:: lookup_in + .. automethod:: mutate_in + .. automethod:: retrieve_in + Counter Operations ------------------ diff --git a/docs/source/api/exceptions.rst b/docs/source/api/exceptions.rst index 466a4b906..30100e0de 100644 --- a/docs/source/api/exceptions.rst +++ b/docs/source/api/exceptions.rst @@ -152,3 +152,7 @@ be inherited from multiple exception categories. :show-inheritance: .. autoexception:: HTTPError :show-inheritance: +.. autoexception:: SubdocPathNotFoundError + :show-inheritance: +.. autoexception:: SubdocPathExistsError + :show-inheritance: \ No newline at end of file diff --git a/docs/source/api/subdoc.rst b/docs/source/api/subdoc.rst new file mode 100644 index 000000000..79f399178 --- /dev/null +++ b/docs/source/api/subdoc.rst @@ -0,0 +1,79 @@ +================ +Sub-Document API +================ + +.. currentmodule:: couchbase.subdocument + +The functions in this module can be used to specify operations to the +:cb_bmeth:`lookup_in` and :cb_bmeth:`upsert_in` methods. Both the +`mutate_in` and `lookup_in` methods can take multiple operations. + +Any given operation is either valid in :cb_bmeth:`lookup_in` or +:cb_bmeth:`mutate_in`; never both. + +----------------- +Lookup Operations +----------------- + +.. autofunction:: get +.. autofunction:: exists + +------------------- +Mutation Operations +------------------- + +.. autofunction:: upsert +.. autofunction:: replace +.. autofunction:: insert +.. autofunction:: array_append +.. autofunction:: array_prepend +.. autofunction:: array_insert +.. autofunction:: array_addunique +.. autofunction:: remove +.. autofunction:: counter + + +------------- +Result Object +------------- + +.. autoclass:: couchbase.result.SubdocResult + :members: + +----------- +Path Syntax +----------- + +The path syntax is hierarchical and follows that of N1QL. Use a dot (`.`) +to separate between components. A backtick may be used to escape dots or +other special characters. Considering the dictionary: + +.. code-block:: python + + { + 'dict': { + 'nestedDict': { + 'value': 123 + }, + 'nestedArray': [1,2,3], + 'literal.dot': 'Hello', + 'literal[]brackets': 'World' + }, + 'array': [1,2,3], + 'primitive': True + } + +Accessing paths can be done as: + +- ``dict`` +- ``dict.nestedDict`` +- ``dict.nestedDict.value`` +- ``dict.nestedArray`` +- ``dict.nestedArray[0]`` +- ``dict.nestedArray[-1]`` (gets last element) +- ``dict.`literal.dot``` +- ``dict.`literal[]brackets``` +- ``array`` +- ``primitive`` + + diff --git a/docs/source/conf.py b/docs/source/conf.py index f8cd562af..ef88c6866 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -19,6 +19,7 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) sys.path.insert(0, os.path.abspath('../..')) +sys.path.insert(0, os.path.dirname(__file__)) #sys.path.insert(0, os.path.abspath('../../couchbase')) #sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, os.pardir, 'couchbase'))) import couchbase_version @@ -30,7 +31,8 @@ # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'numpydoc', 'sphinx.ext.autosummary'] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'numpydoc', 'sphinx.ext.autosummary', + 'pycbc_xref'] #extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'numpydoc'] # Add any paths that contain templates here, relative to this directory. diff --git a/docs/source/index.rst b/docs/source/index.rst index dcbe01498..8e18599e2 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -12,6 +12,7 @@ Contents: :maxdepth: 2 api/couchbase + api/subdoc api/views api/n1ql api/results diff --git a/docs/source/pycbc_xref.py b/docs/source/pycbc_xref.py new file mode 100644 index 000000000..656bf75c5 --- /dev/null +++ b/docs/source/pycbc_xref.py @@ -0,0 +1,39 @@ + +def gen_role(real_role, real_name, new_prefix): + def myrole(rtype, rawtext, text, lineno, inliner, options={}, content={}): + + # new_prefix=couchbase.bucket + # text=upsert + # rtype=cb_bmeth + + # e.g couchbase.bucket.upsert + new_text = '{0}.{1}'.format(new_prefix, text) + + # e.g. :meth:`couchbase.bucket.upsert + new_rawtext = ':{0}:`{1}`'.format(real_name, new_text) + + # e.g. py:meth + new_type = 'py:' + real_name + + return real_role(new_type, + new_rawtext, + new_text, + lineno, inliner, options, content) + return myrole + + +def on_inited(app): + from sphinx.domains.python import PythonDomain as p + fns = [ + ('cb_bmeth', '~couchbase.bucket.Bucket', 'meth'), + ('cb_sdmeth', '~couchbase.subdocument', 'func'), + ('cb_exc', 'couchbase.exceptions', 'exc') + ] + + for newname, target, pyrole_name in fns: + pyrole = p.roles[pyrole_name] + app.add_role(newname, gen_role(pyrole, pyrole_name, target)) + + +def setup(app): + app.connect('builder-inited', on_inited) diff --git a/src/bucket.c b/src/bucket.c index 4a2cb56db..484552ba2 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -482,6 +482,9 @@ static PyMethodDef Bucket_TABLE_methods[] = { OPFUNC(_rget, NULL), OPFUNC(_rgetix, NULL), + OPFUNC(mutate_in, "Perform mutations in document paths"), + OPFUNC(lookup_in, "Perform lookups in document paths"), + OPFUNC(remove, "Delete a key in Couchbase"), OPFUNC(unlock, "Unlock a previously-locked key in Couchbase"), OPFUNC(remove_multi, "Multi-key variant of delete"), diff --git a/src/callbacks.c b/src/callbacks.c index 6d2aa110e..3590eec3e 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -20,6 +20,13 @@ #ifdef CB_THREADS +static PyObject * mk_sd_tuple(const lcb_SDENTRY *ent); + +/* + * Add the sub-document error to the result list + */ +static void mk_sd_error(pycbc__SDResult *res, pycbc_MultiResult *mres, lcb_error_t rc, size_t ix); + static void cb_thr_end(pycbc_Bucket *self) { @@ -60,22 +67,24 @@ enum { RESTYPE_VARCOUNT = 1 << 4 }; -static void +/* Returns true if an error has been added... */ +static int maybe_push_operr(pycbc_MultiResult *mres, pycbc_Result *res, lcb_error_t err, int check_enoent) { if (err == LCB_SUCCESS || mres->errop) { - return; + return 0; } if (check_enoent && (mres->mropts & PYCBC_MRES_F_QUIET) && - err == LCB_KEY_ENOENT) { - return; + (err == LCB_KEY_ENOENT || err == LCB_SUBDOC_PATH_ENOENT)) { + return 0; } mres->errop = (PyObject*)res; Py_INCREF(mres->errop); + return 1; } @@ -102,6 +111,19 @@ operation_completed(pycbc_Bucket *self, pycbc_MultiResult *mres) } } +/** + * Call this function for each callback. Note that even if this function + * returns nonzero, CB_THR_BEGIN() must still be called, and the `conn` + * and `mres` out parameters are considered valid + * @param resp base response object + * @param[out] conn the bucket object + * @param[out] res the result object for the individual operation + * @param restype What type should `res` be if it needs to be created + * @param[out] mres the context for the current operation + * @return 0 if operation processing may proceed, nonzero if operation + * processing has completed. In both cases the `conn` and `mres` paramters + * are valid, however. + */ static int get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, pycbc_Result **res, int restype, pycbc_MultiResult **mres) @@ -154,13 +176,10 @@ get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, } else { Py_XDECREF(hkey); } - } if (*res == NULL) { - /** - * Now, get/set the result object - */ + /* Now, get/set the result object */ if ( (*mres)->mropts & PYCBC_MRES_F_ITEMS) { *res = (pycbc_Result*)pycbc_item_new(*conn); @@ -206,38 +225,22 @@ invoke_endure_test_notification(pycbc_Bucket *self, pycbc_Result *resp) Py_XDECREF(argtuple); } -/** - * Common handler for durability - */ static void -durability_chain_common(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) +dur_chain2(pycbc_Bucket *conn, + pycbc_MultiResult *mres, + pycbc_OperationResult *res, int cbtype, const lcb_RESPBASE *resp) { - pycbc_Bucket *conn; - pycbc_OperationResult *res = NULL; - pycbc_MultiResult *mres; + lcb_error_t err; lcb_durability_opts_t dopts = { 0 }; lcb_CMDENDURE cmd = { 0 }; lcb_MULTICMD_CTX *mctx = NULL; - int rv; - lcb_error_t err; int is_delete = cbtype == LCB_CALLBACK_REMOVE; - rv = get_common_objects(resp, &conn, (pycbc_Result**)&res, - RESTYPE_OPERATION|RESTYPE_VARCOUNT, &mres); - - if (rv == -1) { - operation_completed(conn, mres); - CB_THR_BEGIN(conn); - return; - } - res->rc = resp->rc; if (resp->rc == LCB_SUCCESS) { - const lcb_MUTATION_TOKEN *mutinfo = NULL; - + const lcb_MUTATION_TOKEN *mutinfo = lcb_resp_get_mutation_token(cbtype, resp); Py_XDECREF(res->mutinfo); - mutinfo = lcb_resp_get_mutation_token(cbtype, resp); if (mutinfo && LCB_MUTATION_TOKEN_ISVALID(mutinfo)) { /* Create the mutation token tuple: (vb,uuid,seqno) */ res->mutinfo = Py_BuildValue("HKKO", @@ -307,6 +310,32 @@ durability_chain_common(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) CB_THR_BEGIN(conn); } +/** + * Common handler for durability + */ +static void +durability_chain_common(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) +{ + pycbc_Bucket *conn; + pycbc_OperationResult *res = NULL; + pycbc_MultiResult *mres; + int restype = RESTYPE_VARCOUNT; + + if (cbtype == LCB_CALLBACK_COUNTER) { + restype |= RESTYPE_VALUE; + } else { + restype |= RESTYPE_OPERATION; + } + + if (get_common_objects(resp, &conn, (pycbc_Result**)&res, restype, &mres) != 0) { + operation_completed(conn, mres); + CB_THR_BEGIN(conn); + return; + } + + dur_chain2(conn, mres, res, cbtype, resp); +} + static void value_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) { @@ -357,6 +386,99 @@ value_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) (void)instance; } +static void +mk_sd_error(pycbc__SDResult *res, + pycbc_MultiResult *mres, lcb_error_t rc, size_t ix) +{ + PyObject *spec = PyTuple_GET_ITEM(res->specs, ix); + PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_LCBERR, rc, "Subcommand failure", spec); + pycbc_multiresult_adderr(mres); +} + +static PyObject * +mk_sd_tuple(const lcb_SDENTRY *ent) +{ + PyObject *val = NULL; + PyObject *ret; + if (ent->status == LCB_SUCCESS && ent->nvalue != 0) { + int rv = pycbc_tc_simple_decode(&val, ent->value, ent->nvalue, PYCBC_FMT_JSON); + if (rv != 0) { + return NULL; + } + } + + if (val == NULL) { + val = Py_None; + Py_INCREF(Py_None); + } + + ret = Py_BuildValue("(iO)", ent->status, val); + Py_DECREF(val); + return ret; + +} + +static void +subdoc_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *rb) +{ + int rv; + pycbc_Bucket *conn; + pycbc__SDResult *res; + pycbc_MultiResult *mres; + lcb_SDENTRY cur; + size_t vii = 0, oix = 0; + const lcb_RESPSUBDOC *resp = (const lcb_RESPSUBDOC *)rb; + + rv = get_common_objects(rb, &conn, + (pycbc_Result**)&res, RESTYPE_EXISTS_OK, &mres); + if (rv < 0) { + goto GT_ERROR; + } + + if (rb->rc == LCB_SUCCESS || rb->rc == LCB_SUBDOC_MULTI_FAILURE) { + res->cas = rb->cas; + } else { + maybe_push_operr(mres, (pycbc_Result*)res, rb->rc, 0); + goto GT_ERROR; + } + + while ((lcb_sdresult_next(resp, &cur, &vii))) { + size_t cur_index; + PyObject *cur_tuple = mk_sd_tuple(&cur); + + if (cbtype == LCB_CALLBACK_SDMUTATE) { + cur_index = cur.index; + } else { + cur_index = oix++; + } + + if (cur_tuple == NULL) { + pycbc_multiresult_adderr(mres); + goto GT_ERROR; + } + + if (cur.status != LCB_SUCCESS) { + if (cbtype == LCB_CALLBACK_SDMUTATE) { + mk_sd_error(res, mres, cur.status, cur_index); + } else if (cur.status != LCB_SUBDOC_PATH_ENOENT) { + mk_sd_error(res, mres, cur.status, cur_index); + } + } + + pycbc_sdresult_addresult(res, cur_index, cur_tuple); + Py_DECREF(cur_tuple); + } + if (rb->rc == LCB_SUCCESS) { + dur_chain2(conn, mres, (pycbc_OperationResult*)res, cbtype, (const lcb_RESPBASE*)resp); + return; + } + + GT_ERROR: + operation_completed(conn, mres); + CB_THR_BEGIN(conn); + (void)instance; +} + static void keyop_simple_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) { @@ -545,6 +667,11 @@ pycbc_callbacks_init(lcb_t instance) lcb_install_callback3(instance, LCB_CALLBACK_COUNTER, value_callback); lcb_install_callback3(instance, LCB_CALLBACK_OBSERVE, observe_callback); lcb_install_callback3(instance, LCB_CALLBACK_STATS, stats_callback); + + /* Subdoc */ + lcb_install_callback3(instance, LCB_CALLBACK_SDLOOKUP, subdoc_callback); + lcb_install_callback3(instance, LCB_CALLBACK_SDMUTATE, subdoc_callback); + lcb_set_bootstrap_callback(instance, bootstrap_callback); pycbc_http_callbacks_init(instance); diff --git a/src/constants.c b/src/constants.c index 69ef8ffeb..7eba69291 100644 --- a/src/constants.c +++ b/src/constants.c @@ -54,7 +54,19 @@ X(INVALID_CHAR) \ X(DURABILITY_ETOOMANY) \ X(DUPLICATE_COMMANDS) \ - X(HTTP_ERROR) + X(HTTP_ERROR) \ + X(SUBDOC_PATH_ENOENT) \ + X(SUBDOC_PATH_MISMATCH) \ + X(SUBDOC_PATH_EINVAL) \ + X(SUBDOC_DOC_E2DEEP) \ + X(SUBDOC_VALUE_E2DEEP) \ + X(SUBDOC_VALUE_CANTINSERT) \ + X(SUBDOC_DOC_NOTJSON) \ + X(SUBDOC_NUM_ERANGE) \ + X(SUBDOC_BAD_DELTA) \ + X(SUBDOC_PATH_EEXISTS) \ + X(SUBDOC_MULTI_FAILURE) \ + X(EMPTY_PATH) #define XHTTP(X) \ X(HTTP_METHOD_GET) \ @@ -167,6 +179,18 @@ do_all_constants(PyObject *module, /* View options */ ADD_MACRO(LCB_CMDVIEWQUERY_F_INCLUDE_DOCS); ADD_MACRO(LCB_CMDVIEWQUERY_F_SPATIAL); + + ADD_MACRO(LCB_SDCMD_REPLACE); + ADD_MACRO(LCB_SDCMD_DICT_ADD); + ADD_MACRO(LCB_SDCMD_DICT_UPSERT); + ADD_MACRO(LCB_SDCMD_ARRAY_ADD_FIRST); + ADD_MACRO(LCB_SDCMD_ARRAY_ADD_LAST); + ADD_MACRO(LCB_SDCMD_ARRAY_ADD_UNIQUE); + ADD_MACRO(LCB_SDCMD_EXISTS); + ADD_MACRO(LCB_SDCMD_GET); + ADD_MACRO(LCB_SDCMD_COUNTER); + ADD_MACRO(LCB_SDCMD_REMOVE); + ADD_MACRO(LCB_SDCMD_ARRAY_INSERT); } static void diff --git a/src/ext.c b/src/ext.c index e969ef140..de28c1afb 100644 --- a/src/ext.c +++ b/src/ext.c @@ -330,7 +330,8 @@ init_libcouchbase(void) X(IOEvent, pycbc_IOEventType_init) \ X(TimerEvent, pycbc_TimerEventType_init) \ X(AsyncResult, pycbc_AsyncResultType_init) \ - X(_IOPSWrapper, pycbc_IOPSWrapperType_init) + X(_IOPSWrapper, pycbc_IOPSWrapperType_init) \ + X(_SDResult, pycbc_SDResultType_init) #define X(name, inf) PyObject *cls_##name; X_PYTYPES(X) diff --git a/src/get.c b/src/get.c index 9c3bbe815..b47b71d8f 100644 --- a/src/get.c +++ b/src/get.c @@ -298,6 +298,80 @@ get_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, return cv.ret; } +static int +handle_single_lookup(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, + PyObject *curkey, PyObject *curval, PyObject *options, pycbc_Item *itm, + void *arg) +{ + pycbc_pybuffer keybuf = { NULL }; + lcb_CMDSUBDOC cmd = { 0 }; + int rv = 0; + + if (itm) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Items not supported for subdoc!"); + return -1; + } + if (pycbc_tc_encode_key(self, curkey, &keybuf) != 0) { + return -1; + } + LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); + rv = pycbc_sd_handle_speclist(self, cv->mres, curkey, curval, &cmd); + PYCBC_PYBUF_RELEASE(&keybuf); + return rv; +} + +static PyObject * +sdlookup_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts) +{ + Py_ssize_t ncmds; + PyObject *kobj = NULL; + PyObject *quiet_key = NULL; + pycbc_seqtype_t seqtype; + struct pycbc_common_vars cv = PYCBC_COMMON_VARS_STATIC_INIT; + static char *kwlist[] = { "ks", "quiet", NULL }; + + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, "O|O", kwlist, &kobj, &quiet_key)) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + + if (pycbc_oputil_check_sequence(kobj, 0, &ncmds, &seqtype) != 0) { + return NULL; + } + + if (pycbc_common_vars_init(&cv, self, argopts, ncmds, 1) != 0) { + return NULL; + } + + if (pycbc_oputil_iter_multi( + self, seqtype, kobj, &cv, 0, handle_single_lookup, NULL) != 0) { + goto GT_DONE; + } + + if (pycbc_maybe_set_quiet(cv.mres, quiet_key) != 0) { + goto GT_DONE; + } + + pycbc_common_vars_wait(&cv, self); + + GT_DONE: + pycbc_common_vars_finalize(&cv, self); + return cv.ret; +} + +PyObject * +pycbc_Bucket_lookup_in(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +{ + return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_SINGLE); +} + +PyObject * +pycbc_Bucket_lookup_in_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +{ + return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_MULTI); +} + #define DECLFUNC(name, operation, mode) \ PyObject *pycbc_Bucket_##name(pycbc_Bucket *self, \ PyObject *args, PyObject *kwargs) { \ diff --git a/src/opresult.c b/src/opresult.c index 865e18092..57960ba8c 100644 --- a/src/opresult.c +++ b/src/opresult.c @@ -45,6 +45,14 @@ ValueResult_dealloc(pycbc_ValueResult *self) OperationResult_dealloc((pycbc_OperationResult*)self); } +static void +SDResult_dealloc(pycbc__SDResult *self) +{ + Py_CLEAR(self->results); + Py_CLEAR(self->specs); + OperationResult_dealloc((pycbc_OperationResult*)self); +} + static void Item_dealloc(pycbc_Item *self) { @@ -94,6 +102,12 @@ static PyGetSetDef ValueResult_TABLE_getset[] = { { NULL } }; +static struct PyMemberDef SDResult_TABLE_members[] = { + { "_results", T_OBJECT_EX, offsetof(pycbc__SDResult, results), READONLY }, + { "_specs", T_OBJECT_EX, offsetof(pycbc__SDResult, specs), READONLY }, + { NULL } +}; + /** * We need to re-define all these fields again and indicate their permissions * as being writable @@ -154,6 +168,11 @@ PyTypeObject pycbc_ItemType = { 0 }; +PyTypeObject pycbc__SDResultType = { + PYCBC_POBJ_HEAD_INIT(NULL) + 0 +}; + int pycbc_ValueResultType_init(PyObject **ptr) { @@ -199,6 +218,23 @@ pycbc_OperationResultType_init(PyObject **ptr) return pycbc_ResultType_ready(p, PYCBC_OPRESULT_BASEFLDS); } +int pycbc_SDResultType_init(PyObject **ptr) +{ + PyTypeObject *p = &pycbc__SDResultType; + *ptr = (PyObject*)p; + if (p->tp_name) { + return 0; + } + + p->tp_name = "_SDResult"; + p->tp_basicsize = sizeof(pycbc__SDResult); + p->tp_base = &pycbc_OperationResultType; + p->tp_members = SDResult_TABLE_members; + p->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE; + p->tp_dealloc = (destructor)SDResult_dealloc; + return pycbc_ResultType_ready(p, PYCBC_OPRESULT_BASEFLDS); +} + int pycbc_ItemType_init(PyObject **ptr) { PyTypeObject *p = &pycbc_ItemType; @@ -247,3 +283,27 @@ pycbc_item_new(pycbc_Bucket *parent) return (pycbc_Item *) PyObject_CallFunction((PyObject*)&pycbc_ItemType, NULL, NULL); } + +pycbc__SDResult * +pycbc_sdresult_new(pycbc_Bucket *parent, PyObject *specs) +{ + pycbc__SDResult *res = + (pycbc__SDResult*)PyObject_CallFunction( + pycbc_helpers.sd_result_type, NULL, NULL); + if (res != NULL) { + res->specs = specs; + Py_INCREF(specs); + } + return res; +} + +void +pycbc_sdresult_addresult(pycbc__SDResult *obj, size_t ii, PyObject *item) +{ + if (obj->results == NULL) { + obj->results = PyList_New(PyTuple_GET_SIZE(obj->specs)); + } + pycbc_assert(ii < PyTuple_GET_SIZE(obj->specs)); + PyList_SetItem(obj->results, ii, item); + Py_INCREF(item); /* To normalize refcount semantics */ +} diff --git a/src/oputil.c b/src/oputil.c index 8bb27d9e8..2c823f6f3 100644 --- a/src/oputil.c +++ b/src/oputil.c @@ -494,3 +494,177 @@ pycbc_handle_durability_args(pycbc_Bucket *self, return 0; } + +int +pycbc_encode_sd_keypath(pycbc_Bucket *conn, PyObject *src, + pycbc_pybuffer *keybuf, pycbc_pybuffer *pathbuf) +{ + PyObject *kobj, *pthobj; + int rv; + + if (!PyTuple_Check(src) || PyTuple_GET_SIZE(src) != 2) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, + "Sub-document key must be a 2-tuple"); + return -1; + } + + kobj = PyTuple_GET_ITEM(src, 0); + pthobj = PyTuple_GET_ITEM(src, 1); + + rv = pycbc_tc_encode_key(conn, kobj, keybuf); + if (rv != 0) { + return rv; + } + rv = pycbc_tc_simple_encode(pthobj, pathbuf, PYCBC_FMT_UTF8); + if (rv != 0) { + PYCBC_PYBUF_RELEASE(keybuf); + } + return rv; +} + +static int +sd_convert_spec(PyObject *pyspec, lcb_SDSPEC *sdspec, + pycbc_pybuffer *pathbuf, pycbc_pybuffer *valbuf) +{ + PyObject *path = NULL; + PyObject *val = NULL; + int op = 0, create = 0; + + if (!PyTuple_Check(pyspec)) { + PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ARGUMENTS, 0, "Expected tuple for spec", pyspec); + return -1; + } + + if (!PyArg_ParseTuple(pyspec, "iO|Oi", &op, &path, &val, &create)) { + PYCBC_EXCTHROW_ARGS(); + return -1; + } + if (pycbc_tc_simple_encode(path, pathbuf, PYCBC_FMT_UTF8) != 0) { + goto GT_ERROR; + } + + sdspec->sdcmd = op; + sdspec->options = create ? LCB_SDSPEC_F_MKINTERMEDIATES : 0; + LCB_SDSPEC_SET_PATH(sdspec, pathbuf->buffer, pathbuf->length); + if (val != NULL) { + int is_multival = 0; + + if (PyObject_IsInstance(val, pycbc_helpers.sd_multival_type)) { + /* Verify the operation allows it */ + switch (op) { + case LCB_SDCMD_ARRAY_ADD_FIRST: + case LCB_SDCMD_ARRAY_ADD_LAST: + case LCB_SDCMD_ARRAY_INSERT: + is_multival = 1; + break; + default: + PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ARGUMENTS, 0, + "MultiValue not supported for operation", pyspec); + goto GT_ERROR; + } + } + + if (pycbc_tc_simple_encode(val, valbuf, PYCBC_FMT_JSON) != 0) { + goto GT_ERROR; + } + + if (is_multival) { + /* Strip first and last [ */ + const char *buf = (const char *)valbuf->buffer; + size_t len = valbuf->length; + + for (; isspace(*buf) && len; len--, buf++) { + } + for (; len && isspace(buf[len-1]); len--) { + } + if (len < 3 || buf[0] != '[' || buf[len-1] != ']') { + PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ENCODING, 0, + "Serialized MultiValue shows invalid JSON (maybe empty?)", + pyspec); + goto GT_ERROR; + } + + buf++; + len -= 2; + valbuf->buffer = buf; + valbuf->length = len; + } + + LCB_SDSPEC_SET_VALUE(sdspec, valbuf->buffer, valbuf->length); + } + return 0; + + GT_ERROR: + PYCBC_PYBUF_RELEASE(valbuf); + PYCBC_PYBUF_RELEASE(pathbuf); + return -1; +} + +int +pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, + PyObject *key, PyObject *spectuple, lcb_CMDSUBDOC *cmd) +{ + int rv; + lcb_error_t err = LCB_SUCCESS; + Py_ssize_t nspecs = 0; + pycbc__SDResult *newitm = NULL; + lcb_SDSPEC *specs = NULL, spec_s = { 0 }; + + if (!PyTuple_Check(spectuple)) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Value must be a tuple!"); + return -1; + } + + nspecs = PyTuple_GET_SIZE(spectuple); + if (nspecs == 0) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Need one or more commands!"); + return -1; + } + + newitm = pycbc_sdresult_new(self, spectuple); + newitm->key = key; + Py_INCREF(newitm->key); + + if (nspecs == 1) { + pycbc_pybuffer pathbuf = { 0 }, valbuf = { 0 }; + PyObject *single_spec = PyTuple_GET_ITEM(spectuple, 0); + cmd->specs = &spec_s; + cmd->nspecs = 1; + rv = sd_convert_spec(single_spec, &spec_s, &pathbuf, &valbuf); + } else { + Py_ssize_t ii; + specs = calloc(nspecs, sizeof *specs); + cmd->specs = specs; + cmd->nspecs = nspecs; + + for (ii = 0; ii < nspecs; ++ii) { + PyObject *cur = PyTuple_GET_ITEM(spectuple, ii); + pycbc_pybuffer pathbuf = { NULL }, valbuf = { NULL }; + rv = sd_convert_spec(cur, specs + ii, &pathbuf, &valbuf); + PYCBC_PYBUF_RELEASE(&pathbuf); + PYCBC_PYBUF_RELEASE(&valbuf); + if (rv != 0) { + break; + } + } + } + + if (rv == 0) { + err = lcb_subdoc3(self->instance, mres, cmd); + if (err == LCB_SUCCESS) { + PyDict_SetItem((PyObject*)mres, key, (PyObject*)newitm); + pycbc_assert(Py_REFCNT(newitm) == 2); + } + } + + free(specs); + Py_DECREF(newitm); + + if (err != LCB_SUCCESS) { + PYCBC_EXCTHROW_SCHED(err); + return -1; + } else if (rv != 0) { + return -1; + } + return 0; +} diff --git a/src/oputil.h b/src/oputil.h index 0cf2e0c78..71cabf148 100644 --- a/src/oputil.h +++ b/src/oputil.h @@ -261,6 +261,22 @@ int pycbc_handle_durability_args(pycbc_Bucket *self, char persist_to, char replicate_to); +/** + * Handle the 'key' argument for sub-document paths + * @param conn The bucket + * @param src The input Python object (should be a tuple; this function checks) + * @param keybuf The output key + * @param pathbuf The output path + * @return 0 on success, nonzero on error + */ +int +pycbc_encode_sd_keypath(pycbc_Bucket *conn, PyObject *src, + pycbc_pybuffer *keybuf, pycbc_pybuffer *pathbuf); + +int +pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, + PyObject *key, PyObject *spectuple, lcb_CMDSUBDOC *cmd); + /** * Macro to declare prototypes for entry points. * If the entry point is foo, then it is expected that there exist a C @@ -284,6 +300,13 @@ PYCBC_DECL_OP(replace); PYCBC_DECL_OP(append); PYCBC_DECL_OP(prepend); +/* subdoc (store.c) */ +PYCBC_DECL_OP(mutate_in); + +/* subdoc (get.c) */ +PYCBC_DECL_OP(lookup_in); +PYCBC_DECL_OP(lookup_in_multi); + /* arithmetic.c */ PYCBC_DECL_OP(counter); PYCBC_DECL_OP(counter_multi); diff --git a/src/pycbc.h b/src/pycbc.h index d27c32108..e550f22d8 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -27,8 +27,8 @@ #include <libcouchbase/views.h> #include <libcouchbase/n1ql.h> -#if LCB_VERSION < 0x020503 -#error "Couchbase Python SDK requires libcouchbase 2.5.3 or greater" +#if LCB_VERSION < 0x020506 +#error "Couchbase Python SDK requires libcouchbase 2.5.6 or greater" #endif #include <pythread.h> @@ -208,7 +208,11 @@ enum { PYCBC_ARGOPT_SINGLE = 0x1, /** Entry point is a multi key variant */ - PYCBC_ARGOPT_MULTI = 0x2 + PYCBC_ARGOPT_MULTI = 0x2, + + PYCBC_ARGOPT_SUBDOC = 0x04, + + PYCBC_ARGOPT_SDMULTI = 0x08 }; /** @@ -381,6 +385,16 @@ typedef struct { PyObject* vdict; } pycbc_Item; +typedef struct { + pycbc_OpResult_HEAD + /* List of results. (value,errcode) */ + PyObject *results; + + /* Original list of specs passed. We can cache this later on if access + * by element is required. */ + PyObject *specs; +} pycbc__SDResult; + #define PYCBC_HTTP_HVIEW 1 #define PYCBC_HTTP_HRAW 2 @@ -597,6 +611,7 @@ extern PyTypeObject pycbc_OperationResultType; extern PyTypeObject pycbc_ValueResultType; extern PyTypeObject pycbc_HttpResultType; extern PyTypeObject pycbc_ItemType; +extern PyTypeObject pycbc__SDResultType; /* views.c */ extern PyTypeObject pycbc_ViewResultType; @@ -641,7 +656,9 @@ extern PyObject *pycbc_ExceptionType; X(itmopts_dict_type) \ X(itmopts_seq_type) \ X(fmt_auto) \ - X(view_path_helper) + X(view_path_helper) \ + X(sd_result_type) \ + X(sd_multival_type) #define PYCBC_XHELPERS_STRS(X) \ X(tcname_encode_key, PYCBC_TCNAME_ENCODE_KEY) \ @@ -724,6 +741,7 @@ int pycbc_BucketType_init(PyObject **ptr); int pycbc_MultiResultType_init(PyObject **ptr); int pycbc_ValueResultType_init(PyObject **ptr); int pycbc_OperationResultType_init(PyObject **ptr); +int pycbc_SDResultType_init(PyObject **ptr); int pycbc_HttpResultType_init(PyObject **ptr); int pycbc_TranscoderType_init(PyObject **ptr); int pycbc_ObserveInfoType_init(PyObject **ptr); @@ -749,6 +767,10 @@ PyObject *pycbc_multiresult_new(pycbc_Bucket *parent); pycbc_ValueResult *pycbc_valresult_new(pycbc_Bucket *parent); pycbc_OperationResult *pycbc_opresult_new(pycbc_Bucket *parent); pycbc_Item *pycbc_item_new(pycbc_Bucket *parent); +pycbc__SDResult *pycbc_sdresult_new(pycbc_Bucket *parent, PyObject *specs); + +/* Add a result to a list of multi results. Specify the index */ +void pycbc_sdresult_addresult(pycbc__SDResult *obj, size_t ii, PyObject *item); /* Not an allocator per-se, but rather an initializer */ void pycbc_httpresult_init(pycbc_HttpResult *self, pycbc_MultiResult *parent); diff --git a/src/store.c b/src/store.c index 6b4f09541..77feddfb6 100644 --- a/src/store.c +++ b/src/store.c @@ -18,6 +18,7 @@ struct storecmd_vars { int operation; + int argopts; unsigned long ttl; PyObject *flagsobj; lcb_U64 single_cas; @@ -102,6 +103,11 @@ handle_item_kv(pycbc_Item *itm, PyObject *options, const struct storecmd_vars *s return 0; } +static int +handle_multi_mutate(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, + PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, + void *arg); + static int handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, @@ -114,6 +120,10 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, lcb_error_t err; lcb_CMDSTORE cmd = { 0 }; + if (scv->argopts & PYCBC_ARGOPT_SDMULTI) { + return handle_multi_mutate(self, cv, optype, curkey, curvalue, options, itm, arg); + } + skc.ttl = scv->ttl; skc.flagsobj = scv->flagsobj; skc.value = curvalue; @@ -167,6 +177,32 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, return rv; } +static int +handle_multi_mutate(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, + PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, + void *arg) +{ + int rv; + const struct storecmd_vars *scv = arg; + pycbc_pybuffer keybuf = { NULL }; + lcb_CMDSUBDOC cmd = { 0 }; + + if (itm) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Item not supported in subdoc mode"); + return -1; + } + + if (pycbc_tc_encode_key(self, curkey, &keybuf) != 0) { + return -1; + } + + cmd.cas = scv->single_cas; + cmd.exptime = scv->ttl; + LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); + rv = pycbc_sd_handle_speclist(self, cv->mres, curkey, curvalue, &cmd); + PYCBC_PYBUF_RELEASE(&keybuf); + return rv; +} static int handle_append_flags(pycbc_Bucket *self, PyObject **flagsobj) @@ -201,7 +237,7 @@ handle_append_flags(pycbc_Bucket *self, PyObject **flagsobj) static PyObject * set_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, - const lcb_storage_t operation, int argopts) + int operation, int argopts) { int rv; Py_ssize_t ncmds = 0; @@ -228,6 +264,7 @@ set_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, }; scv.operation = operation; + scv.argopts = argopts; if (argopts & PYCBC_ARGOPT_MULTI) { rv = PyArg_ParseTupleAndKeywords(args, kwargs, "O|OOBB", kwlist_multi, @@ -326,3 +363,5 @@ DECLFUNC(replace, LCB_REPLACE, PYCBC_ARGOPT_SINGLE) DECLFUNC(append, LCB_APPEND, PYCBC_ARGOPT_SINGLE) DECLFUNC(prepend, LCB_PREPEND, PYCBC_ARGOPT_SINGLE) + +DECLFUNC(mutate_in, 0, PYCBC_ARGOPT_SINGLE | PYCBC_ARGOPT_SDMULTI) From b3e762751de5727c8686d2d4e6098b4e58f066a6 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 18 Mar 2016 15:46:53 -0700 Subject: [PATCH 030/829] Document counter_multi and remove_multi Change-Id: I7e93811c9e7bd06cb40082c60136722d2fd9184e Reviewed-on: http://review.couchbase.org/61778 Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 58 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 58 insertions(+) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 29baa272a..23ded189b 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1156,6 +1156,64 @@ def endure_multi(self, keys, persist_to=-1, replicate_to=-1, timeout=timeout, interval=interval, check_removed=check_removed) + def remove_multi(self, kvs, quiet=None): + """Remove multiple items from the cluster + + :param kvs: Iterable of keys to delete from the cluster. If you wish + to specify a CAS for each item, then you may pass a dictionary + of keys mapping to cas, like ``remove_multi({k1:cas1, k2:cas2}) + :param quiet: Whether an exception should be raised if one or more + items were not found + :return: A :class:`~.MultiResult` containing :class:`~.OperationResult` + values. + """ + return _Base.remove_multi(self, kvs, quiet=quiet) + + def counter_multi(self, kvs, initial=None, delta=1, ttl=0): + """Perform counter operations on multiple items + + :param kvs: Keys to operate on. See below for more options + :param initial: Initial value to use for all keys. + :param delta: Delta value for all keys. + :param ttl: Expiration value to use for all keys + + :return: A :class:`~.MultiResult` containing :class:`~.ValueResult` + values + + + The `kvs` can be a: + + - Iterable of keys + .. code-block:: python + + cb.counter_multi((k1, k2)) + + - A dictionary mapping a key to its delta + .. code-block:: python + + cb.counter_multi({ + k1: 42, + k2: 99 + }) + + - A dictionary mapping a key to its additional options + .. code-block:: python + + cb.counter_multi({ + k1: {'delta': 42, 'initial': 9, 'ttl': 300}, + k2: {'delta': 99, 'initial': 4, 'ttl': 700} + }) + + + When using a dictionary, you can override settings for each key on + a per-key basis (for example, the initial value). Global settings + (global here means something passed as a parameter to the method) + will take effect for those values which do not have a given option + specified. + """ + return _Base.counter_multi(self, kvs, initial=initial, delta=delta, + ttl=ttl) + def rget(self, key, replica_index=None, quiet=None): """Get an item from a replica node From ffd403bfcd35d30abbab55a75ffaf23aed2f91b6 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 31 Mar 2016 13:06:02 -0700 Subject: [PATCH 031/829] Fix clobbering of PATHBUF when encoding multiple subdoc values This also fixes a premature release of memory otherwise Change-Id: I4505974f49f5528ac894a687198b38f4f2f60401 Reviewed-on: http://review.couchbase.org/62256 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- src/oputil.c | 29 +++++++++++++++++++++++------ 1 file changed, 23 insertions(+), 6 deletions(-) diff --git a/src/oputil.c b/src/oputil.c index 2c823f6f3..27a01e587 100644 --- a/src/oputil.c +++ b/src/oputil.c @@ -609,6 +609,8 @@ pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, Py_ssize_t nspecs = 0; pycbc__SDResult *newitm = NULL; lcb_SDSPEC *specs = NULL, spec_s = { 0 }; + pycbc_pybuffer pathbuf_s = { NULL }, valbuf_s = { NULL }; + pycbc_pybuffer *pathbufs = NULL, *valbufs = NULL; if (!PyTuple_Check(spectuple)) { PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Value must be a tuple!"); @@ -626,23 +628,25 @@ pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, Py_INCREF(newitm->key); if (nspecs == 1) { - pycbc_pybuffer pathbuf = { 0 }, valbuf = { 0 }; PyObject *single_spec = PyTuple_GET_ITEM(spectuple, 0); + pathbufs = &pathbuf_s; + valbufs = &valbuf_s; + cmd->specs = &spec_s; cmd->nspecs = 1; - rv = sd_convert_spec(single_spec, &spec_s, &pathbuf, &valbuf); + rv = sd_convert_spec(single_spec, &spec_s, pathbufs, valbufs); } else { Py_ssize_t ii; specs = calloc(nspecs, sizeof *specs); + pathbufs = calloc(nspecs, sizeof *pathbufs); + valbufs = calloc(nspecs, sizeof *valbufs); + cmd->specs = specs; cmd->nspecs = nspecs; for (ii = 0; ii < nspecs; ++ii) { PyObject *cur = PyTuple_GET_ITEM(spectuple, ii); - pycbc_pybuffer pathbuf = { NULL }, valbuf = { NULL }; - rv = sd_convert_spec(cur, specs + ii, &pathbuf, &valbuf); - PYCBC_PYBUF_RELEASE(&pathbuf); - PYCBC_PYBUF_RELEASE(&valbuf); + rv = sd_convert_spec(cur, specs + ii, pathbufs + ii, valbufs + ii); if (rv != 0) { break; } @@ -658,6 +662,19 @@ pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, } free(specs); + { + size_t ii; + for (ii = 0; ii < nspecs; ++ii) { + PYCBC_PYBUF_RELEASE(pathbufs + ii); + PYCBC_PYBUF_RELEASE(valbufs + ii); + } + } + + if (nspecs > 1) { + free(pathbufs); + free(valbufs); + } + Py_DECREF(newitm); if (err != LCB_SUCCESS) { From d4ed74e1fa78e3a37912bb402111a293aa1cd755 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 31 Mar 2016 11:40:26 -0700 Subject: [PATCH 032/829] Fix py3 failure * `iteritems` isn't in Py3 * Use of `items` in Py3 doesn't copy the items, making modification of dict illegal * Use our own `_pyport` module for xrange and long * Fix except syntax Change-Id: I56603ef3f1d6a2983b63b1a09335b14d46259d0f Reviewed-on: http://review.couchbase.org/62255 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/exceptions.py | 11 +++++++---- couchbase/result.py | 2 +- couchbase/subdocument.py | 2 +- couchbase/tests/cases/subdoc_t.py | 2 +- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/couchbase/exceptions.py b/couchbase/exceptions.py index 611115e51..3bde96f8e 100644 --- a/couchbase/exceptions.py +++ b/couchbase/exceptions.py @@ -558,10 +558,13 @@ class SubdocMultipleErrors(CouchbaseError): C.LCB_EMPTY_PATH: InvalidError } -for k, v in _LCB_ERRNO_MAP.iteritems(): - v.CODE = k - del k - del v + +def _set_default_codes(): + for k, v in _LCB_ERRNO_MAP.items(): + v.CODE = k + +_set_default_codes() + def _mk_lcberr(rc, name=None, default=CouchbaseError, docstr="", extrabase=[]): """ diff --git a/couchbase/result.py b/couchbase/result.py index 95753a552..616a89eca 100644 --- a/couchbase/result.py +++ b/couchbase/result.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from itertools import izip from couchbase._libcouchbase import ( Result, @@ -24,6 +23,7 @@ MultiResult, ObserveInfo, AsyncResult) +from couchbase._pyport import long, xrange import couchbase._libcouchbase as C import couchbase.exceptions as E diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index 6a3167117..8dca47e9c 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -6,7 +6,7 @@ ) _SPECMAP = {} -for k, v in globals().items(): +for k, v in tuple(globals().items()): if not k.startswith('LCB_SDCMD_'): continue k = k.replace('LCB_SDCMD_', '') diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index c70a4bfc5..b1094d106 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -11,7 +11,7 @@ def setUp(self): k = self.gen_key('sd_precheck') try: cb.retrieve_in(k, 'pth') - except E.NotSupportedError, E.UnknownCommandError: + except (E.NotSupportedError, E.UnknownCommandError): self.skipTest('Subdoc not supported on this server version') except E.CouchbaseError: pass From 434cee5829205bf7d2333e0040e9652b119d57e9 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 7 Apr 2016 13:48:26 -0700 Subject: [PATCH 033/829] PYCBC-341: Check for error at last stats callback If the stats command failed because of a network error or similar, the SDK would not catch it or propagate it. Change-Id: I8dc4e1e1689bfa8e4b179850ce2e9933d65cc4cd Reviewed-on: http://review.couchbase.org/62576 Reviewed-by: Volker Mische <volker.mische@gmail.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/callbacks.c | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/src/callbacks.c b/src/callbacks.c index 3590eec3e..482456316 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -515,28 +515,30 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) PyObject *value; PyObject *skey, *knodes; PyObject *mrdict; + pycbc_Bucket *parent; const lcb_RESPSTATS *resp = (const lcb_RESPSTATS *)resp_base; - + int do_return = 0; mres = (pycbc_MultiResult*)resp->cookie; - CB_THR_END(mres->parent); - - if (!resp->server) { - pycbc_Bucket *parent = mres->parent; - operation_completed(mres->parent, mres); - CB_THR_BEGIN(parent); - return; - } + parent = mres->parent; + CB_THR_END(parent); if (resp->rc != LCB_SUCCESS) { + do_return = 1; if (mres->errop == NULL) { - pycbc_Result *res = - (pycbc_Result*)pycbc_result_new(mres->parent); + pycbc_Result *res = (pycbc_Result*)pycbc_result_new(parent); res->rc = resp->rc; res->key = Py_None; Py_INCREF(res->key); maybe_push_operr(mres, res, resp->rc, 0); } - CB_THR_BEGIN(mres->parent); + } + if (resp->rflags & LCB_RESP_F_FINAL) { + /* Note this can happen in both success and error cases! */ + do_return = 1; + operation_completed(parent, mres); + } + if (do_return) { + CB_THR_BEGIN(parent); return; } @@ -565,7 +567,7 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) Py_DECREF(skey); Py_DECREF(value); - CB_THR_BEGIN(mres->parent); + CB_THR_BEGIN(parent); (void)instance; } From b4347149153837d5aa333b8261f682e7d0bc77b5 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 11 Apr 2016 10:52:08 -0700 Subject: [PATCH 034/829] Fix broken couchbase.{_to,_from}_json These two methods were broken because they would just return the function and not the actual encoded/decoded values These methods were only used for serializing MutationStates, and therefore were not actually tested anywhere Change-Id: Icd74c88406525fdb8c2108e5fcdff62e67a2cff2 Reviewed-on: http://review.couchbase.org/62956 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/couchbase/__init__.py b/couchbase/__init__.py index cc02a54bb..38e29faa4 100644 --- a/couchbase/__init__.py +++ b/couchbase/__init__.py @@ -98,7 +98,7 @@ def _to_json(*args): :param args: Arguments passed to the encoder :return: Serialized JSON string """ - return _LCB._get_helper('json_encode').dumps(*args) + return _LCB._get_helper('json_encode')(*args) def _from_json(*args): @@ -109,7 +109,7 @@ def _from_json(*args): :param args: Arguments passed to the decoder :return: Python object converted from JSON """ - return _LCB._get_helper('json_decode').loads(*args) + return _LCB._get_helper('json_decode')(*args) def enable_logging(): From 55ac8ab561666d194a85dd68c73ecf8154805ee3 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 7 Apr 2016 13:48:08 -0700 Subject: [PATCH 035/829] small doc fixes This fixes some linking issues within the docs Change-Id: I28b6caf580048529a2c04fb0d201fca33d273ae4 Reviewed-on: http://review.couchbase.org/62575 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/bucket.py | 10 +++++----- docs/source/api/subdoc.rst | 35 ++++++++++++++++++++++++++++++++++- 2 files changed, 39 insertions(+), 6 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 23ded189b..69f139187 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -723,7 +723,7 @@ def mutate_in(self, key, *specs, **kwargs): """Perform multiple atomic modifications within a document. :param key: The key of the document to modify - :param specs: A list of specs (See :mod:`couchbase.subdocument`) + :param specs: A list of specs (See :mod:`.couchbase.subdocument`) :param kwargs: CAS, etc. :return: A :class:`~.couchbase.result.SubdocResult` object. @@ -733,10 +733,10 @@ def mutate_in(self, key, *specs, **kwargs): import couchbase.subdocument as SD # .... cb.mutate_in('user', - SD.add_unique('tags', 'dog'), + SD.array_addunique('tags', 'dog'), SD.counter('updates', 1)) - .. seealso:: :mod:`couchbase.subdocument` + .. seealso:: :mod:`.couchbase.subdocument` """ return super(Bucket, self).mutate_in(key, specs, **kwargs) @@ -744,7 +744,7 @@ def lookup_in(self, key, *specs, **kwargs): """Atomically retrieve one or more paths from a document. :param key: The key of the document to lookup - :param spec: A list of specs (see :mod:`couchbase.subdocument`) + :param spec: A list of specs (see :mod:`.couchbase.subdocument`) :return: A :class:`.couchbase.result.SubdocResult` object. This object contains the results and any errors of the operation. @@ -761,7 +761,7 @@ def lookup_in(self, key, *specs, **kwargs): name = rv[1] friend_exists = rv.exists(2) - .. seealso:: meth:`retrieve_in` which acts as a convenience wrapper + .. seealso:: :meth:`retrieve_in` which acts as a convenience wrapper """ return super(Bucket, self).lookup_in({key: specs}, **kwargs) diff --git a/docs/source/api/subdoc.rst b/docs/source/api/subdoc.rst index 79f399178..fa6495adc 100644 --- a/docs/source/api/subdoc.rst +++ b/docs/source/api/subdoc.rst @@ -1,3 +1,5 @@ +.. module:: couchbase.subdocument + ================ Sub-Document API ================ @@ -5,12 +7,43 @@ Sub-Document API .. currentmodule:: couchbase.subdocument The functions in this module can be used to specify operations to the -:cb_bmeth:`lookup_in` and :cb_bmeth:`upsert_in` methods. Both the +:cb_bmeth:`lookup_in` and :cb_bmeth:`mutate_in` methods. Both the `mutate_in` and `lookup_in` methods can take multiple operations. Any given operation is either valid in :cb_bmeth:`lookup_in` or :cb_bmeth:`mutate_in`; never both. + +Internally every function in this module returns an object +specifying the path, options, and value of the command, so for example: + +.. code-block:: python + + cb.mutate_in(key, + SD.upsert('path1', 'value1'), + SD.insert('path2', 'value2', create_parents=True)) + +really becomes + +.. code-block:: python + + cb.mutate_in(key, + (CMD_SUBDOC_UPSERT, 'path1', 'value1', 0), + (CMD_SUBDOC_INSERT, 'path2', 'value2', 1)) + + +Thus, the actual operations are performed when the `mutate_in` or `lookup_in` +methods are executed, the functions in this module just acting as an interface +to specify what sorts of operations are to be executed. + +Throughout the SDK documentation, this module is referred to as ``SD`` which +is significantly easier to type than ``couchbase.subdocument``. This is done +via + +.. code-block:: python + + import couchbase.subdocument as SD + ----------------- Lookup Operations ----------------- From acaeb133b2d11c0307e8879c0e87959629e575ec Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 20 Apr 2016 08:06:33 -0700 Subject: [PATCH 036/829] txview: Remove use of deprecated 'Connection' class Change-Id: I1aa27a9111f6523d13d1b49eb0aae1d2c944d60a Reviewed-on: http://review.couchbase.org/63091 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- examples/txview.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/txview.py b/examples/txview.py index e944689d9..56fef7741 100644 --- a/examples/txview.py +++ b/examples/txview.py @@ -1,12 +1,12 @@ from twisted.internet import reactor -from txcouchbase.connection import Connection +from txcouchbase.bucket import Bucket def on_view_rows(res): for row in res: print "Got row", row.key -cb = Connection(bucket='beer-sample') +cb = Bucket('couchbase://localhost/beer-sample') d = cb.queryAll("beer", "brewery_beers", limit=20) d.addCallback(on_view_rows) reactor.run() From 640be79f81c130765343bfe51a16939ae49ba8a5 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 18 Apr 2016 08:28:06 -0700 Subject: [PATCH 037/829] README: fix pypi url typo Change-Id: I0d13b191f81f60141e3ab5af6dc8b460666b4d03 Reviewed-on: http://review.couchbase.org/62959 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index f2e5ce659..3b8f38cdf 100644 --- a/README.rst +++ b/README.rst @@ -280,4 +280,4 @@ The Couchbase Python SDK is licensed under the Apache License 2.0. .. _libcouchbase: http://couchbase.com/develop/c/current .. _JIRA: http://couchbase.com/issues/browse/pycbc .. _freenode: http://freenode.net/irc_servers.shtml -.. _pypi: http://pypy.python.org/pypi/couchbase +.. _pypi: http://pypi.python.org/pypi/couchbase From f29976734e4e1fa2a279ea38ace3055729b0237c Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 18 Apr 2016 08:33:00 -0700 Subject: [PATCH 038/829] GH-25: Default lockmode not clear in rendered docs Change-Id: If7f6a09354ec521234f4fbf228fe0e08ddbbf28a Reviewed-on: http://review.couchbase.org/62961 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- docs/source/api/threads.rst | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/docs/source/api/threads.rst b/docs/source/api/threads.rst index d03095996..d821b280e 100644 --- a/docs/source/api/threads.rst +++ b/docs/source/api/threads.rst @@ -64,7 +64,7 @@ threads .. data:: LOCKMODE_EXC -This is the default lockmode. If it is detected that the object is being used +*This is the default lockmode*. If it is detected that the object is being used from multiple threads, an exception will be raised indicating such. Internally this uses the C equivalent of the ``threading.Lock`` object (i.e. @@ -72,8 +72,6 @@ Internally this uses the C equivalent of the ``threading.Lock`` object (i.e. to acquire the lock (without waiting). If the acquisition fails, the exception is raised. -*This is the default lockmode* - .. data:: LOCKMODE_WAIT In this mode, a lock is also used, only in this case an exception is not From 6864b933c6f1daffd434c168e8eb44f9ac79497c Mon Sep 17 00:00:00 2001 From: Will Gardner <will.gardner@couchbase.com> Date: Sat, 30 Apr 2016 01:45:29 +0100 Subject: [PATCH 039/829] Fix syntax error in subdocument.upsert example Change-Id: I448bd8f77183d41ac2506316227697887e5f7c22 Reviewed-on: http://review.couchbase.org/63557 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/subdocument.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index 8dca47e9c..352814048 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -87,7 +87,7 @@ def upsert(path, value, create_parents=False): .. code-block:: python - cb.mutate_in("docid", SD.upsert("foo.bar.baz", "newValue") + cb.mutate_in("docid", SD.upsert("foo.bar.baz", "newValue")) would fail with :cb_exc:`SubdocPathNotFoundError` because `foo.bar` does not exist. However when using the `create_parents` option, the From 8a439aa96e227c86db87b54e15716778d9490357 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 2 May 2016 07:58:27 -0700 Subject: [PATCH 040/829] PYCBC-343: SubdocResult: Add missing import This allows iteration, whereas before it resulted in an exception due to a missing `izip` import. Code has been added to _pyport since the `izip` is a builtin (`zip`) in Py3 Change-Id: I1c3d74b2b4c0a6e2a60ca728c36c11717012c4b6 Reviewed-on: http://review.couchbase.org/63577 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/_pyport.py | 2 ++ couchbase/result.py | 3 +-- couchbase/tests/cases/subdoc_t.py | 16 ++++++++++++++++ 3 files changed, 19 insertions(+), 2 deletions(-) diff --git a/couchbase/_pyport.py b/couchbase/_pyport.py index ac1621f17..966f3b40f 100644 --- a/couchbase/_pyport.py +++ b/couchbase/_pyport.py @@ -25,10 +25,12 @@ import urllib.parse as ulp from urllib.request import urlopen from urllib.parse import parse_qs + izip = zip else: import urllib as ulp from urllib2 import urlopen from urlparse import parse_qs + from itertools import izip long = long if V == 2 else int xrange = xrange if V == 2 else range diff --git a/couchbase/result.py b/couchbase/result.py index 616a89eca..dfa2b6ed5 100644 --- a/couchbase/result.py +++ b/couchbase/result.py @@ -14,7 +14,6 @@ # See the License for the specific language governing permissions and # limitations under the License. # - from couchbase._libcouchbase import ( Result, ValueResult, @@ -23,7 +22,7 @@ MultiResult, ObserveInfo, AsyncResult) -from couchbase._pyport import long, xrange +from couchbase._pyport import long, xrange, izip import couchbase._libcouchbase as C import couchbase.exceptions as E diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index b1094d106..2bb79d670 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -232,3 +232,19 @@ def test_multi_value(self): cb.mutate_in(key, SD.array_prepend('array', [42])) self.assertEqual([[42], True, 1, 2, 3], cb.retrieve_in(key, 'array')[0]) + + def test_result_iter(self): + cb = self.cb + key = self.gen_key('sditer') + cb.upsert(key, [1, 2, 3]) + vals = cb.retrieve_in(key, '[0]', '[1]', '[2]') + v1, v2, v3 = vals + self.assertEqual(1, v1) + self.assertEqual(2, v2) + self.assertEqual(3, v3) + + vals = cb.retrieve_in(key, '[0]', '[34]', '[3]') + self.assertFalse(vals.success) + it = iter(vals) + self.assertEqual(1, next(it)) + self.assertRaises(E.SubdocPathNotFoundError, next, it) \ No newline at end of file From a1203cb725321c3c6a022ee243da833802756b03 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 11 Apr 2016 10:56:55 -0700 Subject: [PATCH 041/829] PYCBC-321: CBFT support Change-Id: If975ebe429b08449b589bc977a7d16074ab3d10e Reviewed-on: http://review.couchbase.org/62957 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/bucket.py | 32 + couchbase/fulltext.py | 1012 ++++++++++++++++++++++++ couchbase/tests/cases/cbftstrings_t.py | 257 ++++++ docs/source/api/couchbase.rst | 8 + docs/source/api/fulltext.rst | 142 ++++ docs/source/index.rst | 1 + setup.py | 1 + src/bucket.c | 1 + src/fts.c | 93 +++ src/htresult.c | 2 + src/oputil.h | 3 + src/pycbc.h | 16 +- 12 files changed, 1562 insertions(+), 6 deletions(-) create mode 100644 couchbase/fulltext.py create mode 100644 couchbase/tests/cases/cbftstrings_t.py create mode 100644 docs/source/api/fulltext.rst create mode 100644 src/fts.c diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 69f139187..762ce5da4 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -29,6 +29,7 @@ from couchbase.views.params import make_dvpath, make_options_string from couchbase.views.iterator import View from couchbase.n1ql import N1QLQuery, N1QLRequest +import couchbase.fulltext as _FTS from couchbase._pyport import basestring @@ -1375,6 +1376,37 @@ def n1ql_query(self, query, *args, **kwargs): itercls = kwargs.pop('itercls', N1QLRequest) return itercls(query, self, *args, **kwargs) + def search(self, index, query, **kwargs): + """ + Perform full-text searches + + .. versionadded:: 2.0.9 + + .. warning:: + + The full-text search API is experimental and subject to change + + :param str index: Name of the index to query + :param couchbase.fulltext.SearchQuery query: Query to issue + :param couchbase.fulltext.Params params: Additional query options + :return: An iterator over query hits + + .. note:: You can avoid instantiating an explicit `Params` object + and instead pass the parameters directly to the `search` method. + + .. code-block:: python + + it = cb.search('name', ft.MatchQuery('nosql'), limit=10) + for hit in it: + print(hit) + + """ + itercls = kwargs.pop('itercls', _FTS.SearchRequest) + iterargs = itercls.mk_kwargs(kwargs) + params = kwargs.pop('params', _FTS.Params(**kwargs)) + body = _FTS.make_search_body(index, query, params) + return itercls(body, self, **iterargs) + def __repr__(self): return ('<{modname}.{cls} bucket={bucket}, nodes={nodes} at 0x{oid:x}>' ).format(modname=__name__, cls=self.__class__.__name__, diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py new file mode 100644 index 000000000..2deab1b65 --- /dev/null +++ b/couchbase/fulltext.py @@ -0,0 +1,1012 @@ +import abc + + +from couchbase.exceptions import CouchbaseError +from couchbase.views.iterator import AlreadyQueriedError +from couchbase import _to_json +from couchbase._pyport import unicode + + +def _genprop(converter, *apipaths, **kwargs): + """ + This internal helper method returns a property (similar to the + @property decorator). In additional to a simple Python property, + this also adds a type validator (`converter`) and most importantly, + specifies the path within a dictionary where the value should be + stored. + + Any object using this property *must* have a dictionary called ``_json`` + as a property + + :param converter: The converter to be used, e.g. `int` or `lambda x: x` + for passthrough + :param apipaths: + Components of a path to store, e.g. `foo, bar, baz` for + `foo['bar']['baz']` + :return: A property object + """ + if not apipaths: + raise TypeError('Must have at least one API path') + + def fget(self): + d = self._json_ + try: + for x in apipaths: + d = d[x] + return d + except KeyError: + return None + + def fset(self, value): + value = converter(value) + d = self._json_ + for x in apipaths[:-1]: + d = d.setdefault(x, {}) + d[apipaths[-1]] = value + + def fdel(self): + d = self._json_ + try: + for x in apipaths[:-1]: + d = d[x] + del d[apipaths[-1]] + except KeyError: + pass + + doc = kwargs.pop( + 'doc', 'Corresponds to the ``{}`` field'.format('.'.join(apipaths))) + return property(fget, fset, fdel, doc) + + +def _genprop_str(*apipaths, **kwargs): + """ + Convenience function to return a string property in which the value + is converted to a string + """ + return _genprop(unicode, *apipaths, **kwargs) + + +def _highlight(value): + """ + highlight 'type validator'. + """ + if value not in ('html', 'ansi'): + raise ValueError( + 'Highlight must be "html" or "ansi", got {0}'.format(value)) + return value + + +def _assign_kwargs(self, kwargs): + """ + Assigns all keyword arguments to a given instance, raising an exception + if one of the keywords is not already the name of a property. + """ + for k in kwargs: + if not hasattr(self, k): + raise AttributeError(k, 'Not valid for', self.__class__.__name__) + setattr(self, k, kwargs[k]) + + +class Facet(object): + """ + Base facet class. Each facet must have a field which it aggregates + """ + def __init__(self, field): + self._json_ = {'field': field} + + @property + def encodable(self): + """ + Returns a reprentation of the object suitable for serialization + """ + return self._json_ + + """ + Field upon which the facet will aggregate + """ + field = _genprop_str('field') + + def __repr__(self): + return '{0.__class__.__name__}<{0._json_!r}>'.format(self) + + +class TermFacet(Facet): + """ + Facet aggregating the most frequent terms used. + """ + def __init__(self, field, limit=0): + super(TermFacet, self).__init__(field) + if limit: + self.limit = limit + + limit = _genprop(int, 'size', + doc="Maximum number of terms/count pairs to return") + + +def _mk_range_bucket(name, n1, n2, r1, r2): + """ + Create a named range specification for encoding. + + :param name: The name of the range as it should appear in the result + :param n1: The name of the lower bound of the range specifier + :param n2: The name of the upper bound of the range specified + :param r1: The value of the lower bound (user value) + :param r2: The value of the upper bound (user value) + :return: A dictionary containing the range bounds. The upper and lower + bounds are keyed under ``n1`` and ``n2``. + + More than just a simple wrapper, this will not include any range bound + which has a user value of `None`. Likewise it will raise an exception if + both range values are ``None``. + """ + d = {} + if r1 is not None: + d[n1] = r1 + if r2 is not None: + d[n2] = r2 + if not d: + raise TypeError('Must specify at least one range boundary!') + d['name'] = name + return d + + +class DateFacet(Facet): + """ + Facet to aggregate results based on a date range. + This facet must have at least one invocation of :meth:`add_range` before + it is added to :attr:`~Params.facets`. + """ + def __init__(self, field): + super(DateFacet, self).__init__(field) + self._ranges = [] + + def add_range(self, name, start=None, end=None): + """ + Adds a date range to the given facet. + + :param str name: + The name by which the results within the range can be accessed + :param str start: Lower date range. Should be in RFC 3339 format + :param str end: Upper date range. + :return: The `DateFacet` object, so calls to this method may be + chained + """ + self._ranges.append(_mk_range_bucket(name, 'start', 'end', start, end)) + return self + + _ranges = _genprop(list, 'date_ranges') + + +class NumericFacet(Facet): + """ + Facet to aggregate results based on a numeric range. + This facet must have at least one invocation of :meth:`add_range` + before it is added to :attr:`Params.facets` + """ + def __init__(self, field): + super(NumericFacet, self).__init__(field) + self._ranges = [] + + def add_range(self, name, min=None, max=None): + """ + Add a numeric range. + + :param str name: + the name by which the range is accessed in the results + :param int | float min: Lower range bound + :param int | float max: Upper range bound + :return: This object; suitable for method chaining + """ + self._ranges.append(_mk_range_bucket(name, 'min', 'max', min, max)) + return self + + _ranges = _genprop(list, 'numeric_ranges') + + +class _FacetDict(dict): + """ + Internal dict subclass which ensures that facets added to this dictionary + have properly defined ranges. + """ + + # noinspection PyMissingConstructor + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def __setitem__(self, key, value): + if not isinstance(value, Facet): + raise ValueError('Can only add facet') + if hasattr(value, '_ranges') and not getattr(value, '_ranges'): + raise ValueError('{} object must have at least one range. Use ' + 'add_range'.format(value.__class__.__name__)) + super(_FacetDict, self).__setitem__(key, value) + + def update(self, *args, **kwargs): + if args: + if len(args) > 1: + raise TypeError('only one merge at a time!') + other = dict(args[0]) + for key in other: + self[key] = other[key] + for key in kwargs: + self[key] = kwargs[key] + + def setdefault(self, key, value=None): + if key not in self: + self[key] = value + return self[key] + + +class Params(object): + """ + Generic parameters and query modifiers. Keyword arguments may be used + to initialize instance attributes. See individual attributes for + more information. + + .. attribute:: facets + + A dictionary of :class:`Facet` objects. The dictionary uses the + facet name for keys. You can retrieve the facet results by their name + as well. + + You can add facets like so:: + + params.facets['term_analysis'] = TermFacet('author', limit=10) + params.facets['view_count'] = NumericFacet().add_range('low', max=50) + """ + def __init__(self, **kwargs): + self._json_ = {} + self.facets = _FacetDict(**kwargs.pop('facets', {})) + _assign_kwargs(self, kwargs) + + @property + def encodable(self): + if self.facets: + self._json_['facets'] = { + n: x.encodable for n, x in self.facets.items() + } + + return self._json_ + + limit = _genprop(int, 'size', doc='Maximum number of results to return') + + skip = _genprop(int, 'from', doc='Seek by this number of results') + + explain = _genprop( + bool, 'explain', doc='Whether to return the explanation of the search') + + fields = _genprop( + list, 'fields', doc='Return these fields for each document') + + timeout = _genprop(lambda x: int(x * 1000), 'ctl', 'timeout', + doc='Timeout for the query, in seconds') + + highlight_style = _genprop(_highlight, 'highlight', 'style', doc=""" + Highlight the results using a given style. + Can be either `ansi` or `html` + """) + + highlight_fields = _genprop( + list, 'highlight', 'fields', doc=""" + Highlight the results from these fields (list) + """) + + +class SearchQuery(object): + """ + Base query object. You probably want to use one of the subclasses. + + .. seealso:: :class:`MatchQuery`, :class:`BooleanQuery`, + :class:`RegexQuery`, :class:`PrefixQuery`, :class:`NumericRangeQuery`, + :class:`DateRangeQuery`, :class:`ConjunctionQuery`, + :class:`DisjunctionQuery`, and others in this module. + """ + def __init__(self): + self._json_ = {} + + boost = _genprop( + float, 'boost', doc=""" + When specifying multiple queries, you can give this query a + higher or lower weight in order to prioritize it among sibling + queries. See :class:`ConjunctionQuery` + """) + + @property + def encodable(self): + """ + Returns an object suitable for serializing to JSON + + .. code-block:: python + + json.dumps(query.encodable) + """ + self.validate() + return self._json_ + + def validate(self): + """ + Optional validation function. Invoked before encoding + """ + pass + + +class RawQuery(SearchQuery): + """ + This class is used to wrap a raw query payload. It should be used + for custom query parameters, or in cases where any of the other + query classes are insufficient. + """ + def __init__(self, obj): + super(RawQuery, self).__init__() + self._json_ = obj + + +class _SingleQuery(SearchQuery): + __metaclass__ = abc.ABCMeta + + @abc.abstractproperty + def _TERMPROP(self): + """Name of the JSON property that contains the mandatory match spec""" + + def __init__(self, term, **kwargs): + super(_SingleQuery, self).__init__() + if self._TERMPROP not in kwargs: + kwargs[self._TERMPROP] = term + _assign_kwargs(self, kwargs) + + +_COMMON_FIELDS = { + 'prefix_length': _genprop( + int, 'prefix_length', + doc=""" + When using :attr:`fuzziness`, this controls how much of the term + or phrase is *excluded* from fuzziness. This may help improve + performance at the expense of omitting potential fuzzy matches + at the beginning of the string. + """), + 'fuzziness': _genprop( + int, 'fuzziness', + doc=""" + Allow a given degree of fuzz in the match. Matches which are closer to + the original term will be scored higher. + + You can apply the fuzziness to only a portion of the string by + specifying :attr:`prefix_length` - indicating that only the part + of the field after the prefix length should be checked with fuzziness. + + This value is specified as a float + """), + 'field': _genprop( + str, 'field', + doc=""" + Restrict searching to a given document field + """), + 'analyzer': _genprop( + str, 'analyzer', + doc=""" + Use a defined server-side analyzer to process the input term prior to + executing the search + """ + ) +} + + +def _with_fields(*fields): + """ + Class decorator to include common query fields + :param fields: List of fields to include. These should be keys of the + `_COMMON_FIELDS` dict + """ + dd = {x: _COMMON_FIELDS[x] for x in fields} + + def wrap(cls): + dd.update(cls.__dict__) + return type(cls.__name__, cls.__bases__, dd) + + return wrap + + +class StringQuery(_SingleQuery): + """ + Query which allows users to describe a query in a query language. + The server will then execute the appropriate query based on the contents + of the query string: + + .. seealso:: + + `Query Language <http://www.blevesearch.com/docs/Query-String-Query/>`_ + + Example:: + + StringQuery('description:water and stuff') + """ + + _TERMPROP = 'query' + query = _genprop_str('query') + + """ + Actual query string + """ + + +@_with_fields('field') +class WildcardQuery(_SingleQuery): + """ + Query in which the characters `*` and `?` have special meaning, where + `?` matches 1 occurrence and `*` will match 0 or more occurrences of the + previous character + """ + _TERMPROP = 'wildcard' + wildcard = _genprop_str(_TERMPROP, doc='Wildcard pattern to use') + + +def _list_nonempty_conv(l): + if not l: + raise ValueError('Must have at least one value') + + +class DocIdQuery(_SingleQuery): + """ + Matches document IDs. This is must useful in a compound query + (for example, :class:`BooleanQuery`). When used as a criteria, only + documents with the specified IDs will be searched. + """ + _TERMPROP = 'ids' + ids = _genprop(list, 'ids', doc=""" + List of document IDs to use + """) + + def validate(self): + super(DocIdQuery, self).validate() + if not self.ids: + raise NoChildrenError('`ids` must contain at least one ID') + + +@_with_fields('prefix_length', 'fuzziness', 'field', 'analyzer') +class MatchQuery(_SingleQuery): + """ + Query which checks one or more fields for a match + """ + _TERMPROP = 'match' + match = _genprop_str( + 'match', doc=""" + String to search for + """) + + +@_with_fields('fuzziness', 'prefix_length', 'field') +class TermQuery(_SingleQuery): + """ + Searches for a given term in documents. Unlike :class:`MatchQuery`, + the term is not analyzed. + + Example:: + + TermQuery('lcb_cntl_string') + """ + _TERMPROP = 'term' + term = _genprop_str('term', doc='Exact term to search for') + + +@_with_fields('field', 'analyzer') +class MatchPhraseQuery(_SingleQuery): + """ + Search documents which match a given phrase. The phrase is composed + of one or more terms. + + Example:: + + MatchPhraseQuery("Hello world!") + """ + _TERMPROP = 'match_phrase' + match_phrase = _genprop_str(_TERMPROP, doc="Phrase to search for") + + +@_with_fields('field') +class PhraseQuery(_SingleQuery): + _TERMPROP = 'terms' + terms = _genprop(list, 'terms', doc='List of terms to search for') + + def __init__(self, *phrases, **kwargs): + super(PhraseQuery, self).__init__(phrases, **kwargs) + + def validate(self): + super(PhraseQuery, self).validate() + if not self.terms: + raise NoChildrenError('Missing terms') + + +@_with_fields('field') +class PrefixQuery(_SingleQuery): + """ + Search documents for fields beginning with a certain prefix. This is + most useful for type-ahead or lookup queries. + """ + _TERMPROP = 'prefix' + prefix = _genprop(str, 'prefix', doc='The prefix to match') + + +@_with_fields('field') +class RegexQuery(_SingleQuery): + """ + Search documents for fields matching a given regular expression + """ + _TERMPROP = 'regex' + regex = _genprop_str('regexp', doc="Regular expression to use") + + +RegexpQuery = RegexQuery + + +class _RangeQuery(SearchQuery): + __metaclass__ = abc.ABCMeta + + @abc.abstractproperty + def _MINMAX(self): + return 'min_name', 'max_name' + + def __init__(self, r1, r2, **kwargs): + super(_RangeQuery, self).__init__() + _assign_kwargs(self, kwargs) + if r1 is None and r2 is None: + raise TypeError('At least one of {0} or {1} should be specified', + *self._MINMAX) + if r1 is not None: + setattr(self, self._MINMAX[0], r1) + if r2 is not None: + setattr(self, self._MINMAX[1], r2) + + +@_with_fields('field') +class NumericRangeQuery(_RangeQuery): + """ + Search documents for fields containing a value within a given numerical + range. + + At least one of `min` or `max` must be specified. + """ + def __init__(self, min=None, max=None, **kwargs): + """ + :param float min: See :attr:`min` + :param float max: See :attr:`max` + """ + super(NumericRangeQuery, self).__init__(min, max, **kwargs) + + min = _genprop( + float, 'min', doc='Lower bound of range. See :attr:`min_inclusive`') + + min_inclusive = _genprop( + bool, 'min_inclusive', + doc='Whether matches are inclusive of lower bound') + + max = _genprop( + float, 'max', + doc='Upper bound of range. See :attr:`max_inclusive`') + + max_inclusive = _genprop( + bool, 'max_inclusive', + doc='Whether matches are inclusive of upper bound') + + _MINMAX = 'min', 'max' + + +@_with_fields('field') +class DateRangeQuery(_RangeQuery): + """ + Search documents for fields containing a value within a given date + range. + + The date ranges are parsed according to a given :attr:`datetime_parser`. + If no parser is specified, the RFC 3339 parser is used. See + `Generating an RFC 3339 Timestamp <http://goo.gl/LIkV7G>_`. + + The :attr:`start` and :attr:`end` parameters should be specified in the + constructor. Note that either `start` or `end` (but not both!) may be + omitted. + + .. code-block:: python + + DateRangeQuery(start='2014-12-25', end='2016-01-01') + """ + def __init__(self, start=None, end=None, **kwargs): + """ + :param str start: Start of date range + :param str end: End of date range + :param kwargs: Additional options: :attr:`field`, :attr:`boost` + """ + super(DateRangeQuery, self).__init__(start, end, **kwargs) + + start = _genprop_str('start', doc='Lower bound datetime') + end = _genprop_str('end', doc='Upper bound datetime') + + start_inclusive = _genprop( + bool, 'start_inclusive', doc='If :attr:`start` is inclusive') + + end_inclusive = _genprop( + bool, 'end_inclusive', doc='If :attr:`end` is inclusive') + + datetime_parser = _genprop_str( + 'datetime_parser', + doc=""" + Parser to use when analyzing the :attr:`start` and :attr:`end` fields + on the server. + + If not specified, the RFC 3339 parser is used. + Ensure to specify :attr:`start` and :attr:`end` in a format suitable + for the given parser. + """) + + _MINMAX = 'start', 'end' + + +class _CompoundQuery(SearchQuery): + __metaclass__ = abc.ABCMeta + + @abc.abstractproperty + def _COMPOUND_FIELDS(self): + """ + Field to contain the compound queries. should return an iterable of + `(srcname, tgtname)` of the attribute name and the target JSON + field that contains the actual list of queries + """ + return ('Dummy', 'Dummy'), + + def __init__(self, **kwargs): + super(_CompoundQuery, self).__init__() + _assign_kwargs(self, kwargs) + + @property + def encodable(self): + self.validate() + + # Requires special handling since the compound queries in themselves + # cannot be JSON unless they are properly encoded. + # Note that the 'encodable' property also triggers validation + js = self._json_.copy() + for src, tgt in self._COMPOUND_FIELDS: + objs = getattr(self, src) + if not objs: + continue + js[tgt] = [q.encodable for q in objs] + return js + + +class ConjunctionQuery(_CompoundQuery): + """ + Compound query in which all sub-queries passed must be satisfied + """ + _COMPOUND_FIELDS = ('conjuncts', 'conjuncts'), + + def __init__(self, *queries): + super(ConjunctionQuery, self).__init__() + self.conjuncts = list(queries) + + def validate(self): + super(ConjunctionQuery, self).validate() + if not self.conjuncts: + raise NoChildrenError('No sub-queries') + + +def _convert_gt0(value): + # Ensure value is greater than 0 + value = int(value) + if not value: + raise ValueError('Must be > 0') + return value + + +class DisjunctionQuery(_CompoundQuery): + """ + Compound query in which at least :attr:`min` or more queries must be + satisfied + """ + _COMPOUND_FIELDS = ('disjuncts', 'disjuncts'), + + def __init__(self, *queries, **kwargs): + super(DisjunctionQuery, self).__init__() + _assign_kwargs(self, kwargs) + self.disjuncts = list(queries) + if 'min' not in self._json_: + self.min = 1 + + min = _genprop( + _convert_gt0, 'min', doc='Number of queries which must be satisfied') + + def validate(self): + super(DisjunctionQuery, self).validate() + if not self.disjuncts or len(self.disjuncts) < self.min: + raise NoChildrenError('No children specified, or min is too big') + + +def _bprop_wrap(name, reqtype, doc): + """ + Helper function to generate properties + :param name: The name of the subfield in the JSON dictionary + :param reqtype: The compound query type the query + list should be coerced into + :param doc: Documentation for the field + :return: the property. + """ + def fget(self): + return self._subqueries.get(name) + + def fset(self, value): + if value is None: + if name in self._subqueries: + del self._subqueries[name] + elif isinstance(value, reqtype): + self._subqueries[name] = value + elif isinstance(value, SearchQuery): + self._subqueries[name] = reqtype(value) + else: + try: + it = iter(value) + except ValueError: + raise TypeError('Value must be instance of Query') + + l = [] + for q in it: + if not isinstance(q, SearchQuery): + raise TypeError('Item is not a query!', q) + l.append(q) + self._subqueries[name] = reqtype(*l) + + def fdel(self): + setattr(self, name, None) + + return property(fget, fset, fdel, doc) + + +class BooleanQuery(SearchQuery): + def __init__(self, must=None, should=None, must_not=None): + super(BooleanQuery, self).__init__() + self._subqueries = {} + self.must = must + self.should = should + self.must_not = must_not + + must = _bprop_wrap( + 'must', ConjunctionQuery, + """ + Queries which must be satisfied. When setting this attribute, the + SDK will convert value to a :class:`ConjunctionQuery` if the value + is a list of queries. + """) + + must_not = _bprop_wrap( + 'must_not', DisjunctionQuery, + """ + Queries which must not be satisfied. Documents found which satisfy + the queries in this clause are not returned in the match. + + When setting this attribute in the SDK, it will be converted to a + :class:`DisjunctionQuery` if the value is a list of queries. + """) + + should = _bprop_wrap( + 'should', DisjunctionQuery, + """ + Specify additional queries which should be satisfied. As opposed to + :attr:`must`, you can specify the number of queries in this field + which must be satisfied. + + The type of this attribute is :class:`DisjunctionQuery`, and you can + set the minimum number of queries to satisfy using:: + + boolquery.should.min = 1 + """) + + @property + def encodable(self): + # Overrides the default `encodable` implementation in order to + # serialize any sub-queries + for src, tgt in ((self.must, 'must'), + (self.must_not, 'must_not'), + (self.should, 'should')): + if src: + self._json_[tgt] = src.encodable + return super(BooleanQuery, self).encodable + + def validate(self): + super(BooleanQuery, self).validate() + if not self.must and not self.must_not and not self.should: + raise ValueError('No sub-queries specified', self) + + +class MatchAllQuery(_SingleQuery): + """ + Special query which matches all documents + """ + def __init__(self, **kwargs): + super(MatchAllQuery, self).__init__(None, **kwargs) + + _TERMFIELD = 'match_all' + + +class MatchNoneQuery(_SingleQuery): + """ + Special query which matches no documents + """ + def __init__(self, **kwargs): + super(MatchNoneQuery, self).__init__(None, **kwargs) + + _TERMFIELD = 'match_none' + + +@_with_fields('field') +class BooleanFieldQuery(_SingleQuery): + _TERMPROP = 'bool' + bool = _genprop(bool, 'bool', doc='Boolean value to search for') + + +class SearchError(CouchbaseError): + """ + Error during server execution + """ + + +class NoChildrenError(CouchbaseError): + """ + Compound query is missing children" + """ + def __init__(self, msg='No child queries'): + super(NoChildrenError, self).__init__({'message': msg}) + + +def make_search_body(index, query, params=None): + """ + Generates a dictionary suitable for encoding as the search body + :param index: The index name to query + :param query: The query itself + :param params: Modifiers for the query + :return: A dictionary suitable for serialization + """ + dd = {} + + if not isinstance(query, SearchQuery): + query = StringQuery(query) + + dd['query'] = query.encodable + if params: + dd.update(params.encodable) + dd['indexName'] = index + return dd + + +class SearchRequest(object): + """ + Object representing an active query on the cluster. + + .. warning:: + + You should call :cb_bmeth:`search` which will return an instance of + this class. Do *not* invoke the constructor directly. + + You can iterate over this object (i.e. ``__iter__``) to receive the + actual search results. + """ + def __init__(self, body, parent, row_factory=lambda x: x): + """ + :param str body: serialized JSON string + :param Bucket parent: + """ + self._body = _to_json(body) + self._parent = parent + self.row_factory = row_factory + self.errors = [] + self._mres = None + self._do_iter = True + self.__raw = False + self.__meta_received = False + + @classmethod + def mk_kwargs(cls, kwargs): + """ + Pop recognized arguments from a keyword list. + """ + ret = {} + kws = ['row_factory', 'body', 'parent'] + for k in kws: + if k in kwargs: + ret[k] = kwargs.pop(k) + + return ret + + def _start(self): + if self._mres: + return + + self._mres = self._parent._fts_query(self._body) + self.__raw = self._mres[None] + + @property + def raw(self): + return self.__raw + + @property + def meta(self): + """ + Get metadata from the query itself. This is guaranteed to only + return a Python dictionary. + + Note that if the query failed, the metadata might not be in JSON + format, in which case there may be additional, non-JSON data + which can be retrieved using the following + + :: + + raw_meta = req.raw.value + + :return: A dictionary containing the query metadata + """ + if not self.__meta_received: + raise RuntimeError( + 'This property only valid once all rows are received!') + + if isinstance(self.raw.value, dict): + return self.raw.value + return {} + + @property + def total_hits(self): + """ + The total number of hits that match the query. This may be greater + than the number of hits actually returned, as it is subject to the + :attr:`Params.limit` parameter + """ + return self.meta['total_hits'] + + @property + def took(self): + """ + The length of time the query took to execute + """ + return self.meta['took'] + + @property + def max_score(self): + return self.meta['max_score'] + + @property + def facets(self): + return self.meta['facets'] + + def _clear(self): + del self._parent + del self._mres + + def _handle_meta(self, value): + self.__meta_received = True + if not isinstance(value, dict): + return + if 'errors' in value: + for err in value['errors']: + raise SearchError.pyexc('N1QL Execution failed', err) + + def _process_payload(self, rows): + if rows: + return [self.row_factory(row) for row in rows] + + elif self.raw.done: + self._handle_meta(self.raw.value) + self._do_iter = False + return [] + else: + # We can only get here if another concurrent query broke out the + # event loop before we did. + return [] + + def __iter__(self): + if not self._do_iter: + raise AlreadyQueriedError() + + self._start() + while self._do_iter: + raw_rows = self.raw.fetch(self._mres) + for row in self._process_payload(raw_rows): + yield row + + def __repr__(self): + return ( + '<{0.__class__.__name__} body={0._body!r} response={1}>'.format( + self, self.raw.value if self.raw else '<PENDING>')) diff --git a/couchbase/tests/cases/cbftstrings_t.py b/couchbase/tests/cases/cbftstrings_t.py new file mode 100644 index 000000000..2944ce566 --- /dev/null +++ b/couchbase/tests/cases/cbftstrings_t.py @@ -0,0 +1,257 @@ +from __future__ import print_function + +from couchbase.tests.base import CouchbaseTestCase +import couchbase.fulltext as cbft + + +class FTStringsTest(CouchbaseTestCase): + def test_fuzzy(self): + q = cbft.TermQuery('someterm', field='field', boost=1.5, + prefix_length=23, fuzziness=12) + p = cbft.Params(explain=True) + + exp_json = { + 'query': { + 'term': 'someterm', + 'boost': 1.5, + 'fuzziness': 12, + 'prefix_length': 23, + 'field': 'field' + }, + 'indexName': 'someIndex', + 'explain': True + } + + self.assertEqual(exp_json, cbft.make_search_body('someIndex', q, p)) + + def test_match_phrase(self): + exp_json = { + 'query': { + 'match_phrase': 'salty beers', + 'analyzer': 'analyzer', + 'boost': 1.5, + 'field': 'field' + }, + 'size': 10, + 'indexName': 'ix' + } + + p = cbft.Params(limit=10) + q = cbft.MatchPhraseQuery('salty beers', boost=1.5, analyzer='analyzer', + field='field') + self.assertEqual(exp_json, cbft.make_search_body('ix', q, p)) + + def test_match_query(self): + exp_json = { + 'query': { + 'match': 'salty beers', + 'analyzer': 'analyzer', + 'boost': 1.5, + 'field': 'field', + 'fuzziness': 1234, + 'prefix_length': 4 + }, + 'size': 10, + 'indexName': 'ix' + } + + q = cbft.MatchQuery('salty beers', boost=1.5, analyzer='analyzer', + field='field', fuzziness=1234, prefix_length=4) + p = cbft.Params(limit=10) + self.assertEqual(exp_json, cbft.make_search_body('ix', q, p)) + + def test_string_query(self): + exp_json = { + 'query': { + 'query': 'q*ry', + 'boost': 2.0, + }, + 'explain': True, + 'size': 10, + 'indexName': 'ix' + } + q = cbft.StringQuery('q*ry', boost=2.0) + p = cbft.Params(limit=10, explain=True) + self.assertEqual(exp_json, cbft.make_search_body('ix', q, p)) + + def test_params(self): + self.assertEqual({}, cbft.Params().encodable) + self.assertEqual({'size': 10}, cbft.Params(limit=10).encodable) + self.assertEqual({'from': 100}, cbft.Params(skip=100).encodable) + + self.assertEqual({'explain': True}, + cbft.Params(explain=True).encodable) + + self.assertEqual({'highlight': {'style': 'html'}}, + cbft.Params(highlight_style='html').encodable) + + self.assertEqual({'highlight': {'style': 'ansi', + 'fields': ['foo', 'bar', 'baz']}}, + cbft.Params(highlight_style='ansi', + highlight_fields=['foo', 'bar', 'baz']) + .encodable) + + self.assertEqual({'fields': ['foo', 'bar', 'baz']}, + cbft.Params(fields=['foo', 'bar', 'baz']).encodable) + + p = cbft.Params(facets={ + 'term': cbft.TermFacet('somefield', limit=10), + 'dr': cbft.DateFacet('datefield').add_range('name', 'start', 'end'), + 'nr': cbft.NumericFacet('numfield').add_range('name2', 0.0, 99.99) + }) + exp = { + 'facets': { + 'term': { + 'field': 'somefield', + 'size': 10 + }, + 'dr': { + 'field': 'datefield', + 'date_ranges': [{ + 'name': 'name', + 'start': 'start', + 'end': 'end' + }] + }, + 'nr': { + 'field': 'numfield', + 'numeric_ranges': [{ + 'name': 'name2', + 'min': 0.0, + 'max': 99.99 + }] + }, + } + } + self.assertEqual(exp, p.encodable) + + def test_facets(self): + p = cbft.Params() + f = cbft.NumericFacet('numfield') + self.assertRaises(ValueError, p.facets.__setitem__, 'facetName', f) + self.assertRaises(TypeError, f.add_range, 'range1') + p.facets['facetName'] = f.add_range('range1', min=123, max=321) + self.assertTrue('facetName' in p.facets) + + f = cbft.DateFacet('datefield') + f.add_range('r1', start='2012', end='2013') + f.add_range('r2', start='2014') + f.add_range('r3', end='2015') + exp = { + 'field': 'datefield', + 'date_ranges': [ + {'name': 'r1', 'start': '2012', 'end': '2013'}, + {'name': 'r2', 'start': '2014'}, + {'name': 'r3', 'end': '2015'} + ] + } + self.assertEqual(exp, f.encodable) + + f = cbft.TermFacet('termfield') + self.assertEqual({'field': 'termfield'}, f.encodable) + f.limit = 10 + self.assertEqual({'field': 'termfield', 'size': 10}, f.encodable) + + def test_raw_query(self): + qq = cbft.RawQuery({'foo': 'bar'}) + self.assertEqual({'foo': 'bar'}, qq.encodable) + + def test_wildcard_query(self): + qq = cbft.WildcardQuery('f*o', field='wc') + self.assertEqual({'wildcard': 'f*o', 'field': 'wc'}, qq.encodable) + + def test_docid_query(self): + qq = cbft.DocIdQuery([]) + self.assertRaises(cbft.NoChildrenError, getattr, qq, 'encodable') + qq.ids = ['foo', 'bar', 'baz'] + self.assertEqual({'ids': ['foo', 'bar', 'baz']}, qq.encodable) + + def test_boolean_query(self): + prefix_q = cbft.PrefixQuery('someterm', boost=2) + bool_q = cbft.BooleanQuery( + must=prefix_q, must_not=prefix_q, should=prefix_q) + exp = {'prefix': 'someterm', 'boost': 2.0} + self.assertEqual({'conjuncts': [exp]}, + bool_q.must.encodable) + self.assertEqual({'min': 1, 'disjuncts': [exp]}, + bool_q.should.encodable) + self.assertEqual({'min': 1, 'disjuncts': [exp]}, + bool_q.must_not.encodable) + + # Test multiple criteria in must and must_not + pq_1 = cbft.PrefixQuery('someterm', boost=2) + pq_2 = cbft.PrefixQuery('otherterm') + bool_q = cbft.BooleanQuery(must=[pq_1, pq_2]) + exp = { + 'conjuncts': [ + {'prefix': 'someterm', 'boost': 2.0}, + {'prefix': 'otherterm'} + ] + } + self.assertEqual({'must': exp}, bool_q.encodable) + + def test_daterange_query(self): + self.assertRaises(TypeError, cbft.DateRangeQuery) + dr = cbft.DateRangeQuery(end='theEnd') + self.assertEqual({'end': 'theEnd'}, dr.encodable) + dr = cbft.DateRangeQuery(start='theStart') + self.assertEqual({'start': 'theStart'}, dr.encodable) + dr = cbft.DateRangeQuery(start='theStart', end='theEnd') + self.assertEqual({'start': 'theStart', 'end': 'theEnd'}, dr.encodable) + dr = cbft.DateRangeQuery('', '') # Empty strings should be ok + self.assertEqual({'start': '', 'end': ''}, dr.encodable) + + def test_numrange_query(self): + self.assertRaises(TypeError, cbft.NumericRangeQuery) + nr = cbft.NumericRangeQuery(0, 0) # Should be OK + self.assertEqual({'min': 0, 'max': 0}, nr.encodable) + nr = cbft.NumericRangeQuery(0.1, 0.9) + self.assertEqual({'min': 0.1, 'max': 0.9}, nr.encodable) + nr = cbft.NumericRangeQuery(max=0.9) + self.assertEqual({'max': 0.9}, nr.encodable) + nr = cbft.NumericRangeQuery(min=0.1) + self.assertEqual({'min': 0.1}, nr.encodable) + + def test_disjunction_query(self): + dq = cbft.DisjunctionQuery() + self.assertEqual(1, dq.min) + self.assertRaises(cbft.NoChildrenError, getattr, dq, 'encodable') + + dq.disjuncts.append(cbft.PrefixQuery('somePrefix')) + self.assertEqual({'min': 1, 'disjuncts': [{'prefix': 'somePrefix'}]}, + dq.encodable) + self.assertRaises(ValueError, setattr, dq, 'min', 0) + dq.min = 2 + self.assertRaises(cbft.NoChildrenError, getattr, dq, 'encodable') + + def test_conjunction_query(self): + cq = cbft.ConjunctionQuery() + self.assertRaises(cbft.NoChildrenError, getattr, cq, 'encodable') + cq.conjuncts.append(cbft.PrefixQuery('somePrefix')) + self.assertEqual({'conjuncts': [{'prefix': 'somePrefix'}]}, + cq.encodable) + + def match_all_none_queries(self): + self.assertEqual({'match_all': None}, cbft.MatchAllQuery().encodable) + self.assertEqual({'match_none': None}, cbft.MatchNoneQuery.encodable) + + def test_phrase_query(self): + pq = cbft.PhraseQuery('salty', 'beers') + self.assertEqual({'terms': ['salty', 'beers']}, pq.encodable) + + pq = cbft.PhraseQuery() + self.assertRaises(cbft.NoChildrenError, getattr, pq, 'encodable') + pq.terms.append('salty') + self.assertEqual({'terms': ['salty']}, pq.encodable) + + def test_prefix_query(self): + pq = cbft.PrefixQuery('someterm', boost=1.5) + self.assertEqual({'prefix': 'someterm', 'boost': 1.5}, pq.encodable) + + def test_regexp_query(self): + pq = cbft.RegexQuery('some?regex') + self.assertEqual({'regexp': 'some?regex'}, pq.encodable) + + def test_booleanfield_query(self): + bq = cbft.BooleanFieldQuery(True) + self.assertEqual({'bool': True}, bq.encodable) \ No newline at end of file diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index 33df89d97..b6a656731 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -304,6 +304,14 @@ N1QL Query Methods .. automethod:: n1ql_query +Full-Text Search Methods +======================== + +.. currentmodule:: couchbase.bucket +.. class:: Bucket + + .. automethod:: search + Design Document Management ========================== diff --git a/docs/source/api/fulltext.rst b/docs/source/api/fulltext.rst new file mode 100644 index 000000000..1e9e79e93 --- /dev/null +++ b/docs/source/api/fulltext.rst @@ -0,0 +1,142 @@ +################# +Full Text Queries +################# + +.. module:: couchbase.fulltext + +Full-text queries search the cluster for documents matching certain terms or +strings of texts. Unlike N1QL queries which are designed to be structured, i.e. +ensuring that a given document field matches a given value, full-text queries +are more free-form, allowing the ability to search multiple fields and to +add variance in queries. + +Couchbase offers full-text queries in version 4.5. To issue a query from the +Python SDK, use the :cb_bmeth:`search` bucket method. + +To perform an actual search, define a search index (via Couchbase web UI, or +using the REST interface), create a :class:`SearchQuery` object and then pass the +index name and the query object to the `search` method: + +.. code-block:: python + + query = ConjunctionQuery(TermQuery("couchbase"), MatchQuery("nosql")) + for hit in bucket.search('indexName', query): + print(hit) + + +The above query searches for any document which has both `nosql` and `couchbase` +in *any* of its fields. Note that :class:`PhraseQuery` may be better suited for +this kind of query + +.. code-block:: python + + query = PhraseQuery('couchbase', 'nosql') + for hit in bucket.search('indexName', query): + print(hit) + +----------- +Query Types +----------- + +You may issue simple match queries (:class:`MatchQuery`) to inspect a user +term; :class:`TermQuery` to match a field exactly, :class:`PrefixQuery` for +type-ahead queries, or a compound query type such as :class:`ConjunctionQuery` +for more complex queries. + +.. autoclass:: SearchQuery + :members: + +============= +Match Queries +============= + +.. autoclass:: MatchQuery + :members: + +.. autoclass:: MatchPhraseQuery + :members: + +.. autoclass:: PrefixQuery + :members: + +.. autoclass:: RegexQuery + :members: + +.. autoclass:: WildcardQuery + :members: + +.. autoclass:: BooleanFieldQuery + :members: + +.. autoclass:: StringQuery + :members: + +.. autoclass:: DocIdQuery + :members: + +============= +Range Queries +============= + +.. autoclass:: NumericRangeQuery + :members: + +.. autoclass:: DateRangeQuery + :members: + +================ +Compound Queries +================ + +.. autoclass:: ConjunctionQuery + :members: + +.. autoclass:: DisjunctionQuery + :members: + +.. autoclass:: BooleanQuery + :members: + +================= +Debugging Queries +================= + +.. autoclass:: MatchAllQuery + :members: + +.. autoclass:: MatchNoneQuery + :members: + +.. autoclass:: TermQuery + :members: + +.. autoclass:: PhraseQuery + :members: + + +---------- +Parameters +---------- + +Query parameters may be passed as the ``params`` keyword argument to +:cb_bmeth:`search`. + +.. autoclass:: Params + :members: + +------ +Facets +------ + +Facets allow additional aggregate information to be returned in the +results. You can count how many documents match specific criteria +based on ranges and matches. + +.. autoclass:: TermFacet + :members: + +.. autoclass:: DateFacet + :members: + +.. autoclass:: NumericFacet + :members: diff --git a/docs/source/index.rst b/docs/source/index.rst index 8e18599e2..77086169e 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -15,6 +15,7 @@ Contents: api/subdoc api/views api/n1ql + api/fulltext api/results api/exceptions api/transcoder diff --git a/setup.py b/setup.py index 1e38fdfa1..1b40ae03e 100644 --- a/setup.py +++ b/setup.py @@ -88,6 +88,7 @@ 'pipeline', 'views', 'n1ql', + 'fts' ] if platform.python_implementation() != 'PyPy': diff --git a/src/bucket.c b/src/bucket.c index 484552ba2..aa57a48fe 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -497,6 +497,7 @@ static PyMethodDef Bucket_TABLE_methods[] = { OPFUNC(_http_request, "Internal routine for HTTP requests"), OPFUNC(_view_request, "Internal routine for view requests"), OPFUNC(_n1ql_query, "Internal routine for N1QL queries"), + OPFUNC(_fts_query, "Internal routine for Fulltext queries"), OPFUNC(observe, "Get replication/persistence status for keys"), OPFUNC(observe_multi, "multi-key variant of observe"), diff --git a/src/fts.c b/src/fts.c new file mode 100644 index 000000000..1661fb3b9 --- /dev/null +++ b/src/fts.c @@ -0,0 +1,93 @@ +#include "pycbc.h" +#include "oputil.h" +#include "structmember.h" +#include <libcouchbase/cbft.h> + +static void +fts_row_callback(lcb_t instance, int ign, const lcb_RESPFTS *resp) +{ + pycbc_MultiResult *mres = (pycbc_MultiResult *)resp->cookie; + pycbc_Bucket *bucket = mres->parent; + pycbc_ViewResult *vres; + const char * const * hdrs = NULL; + short htcode = 0; + + PYCBC_CONN_THR_END(bucket); + vres = (pycbc_ViewResult *)PyDict_GetItem((PyObject*)mres, Py_None); + + if (resp->htresp) { + hdrs = resp->htresp->headers; + htcode = resp->htresp->htstatus; + } + + if (resp->rflags & LCB_RESP_F_FINAL) { + pycbc_httpresult_add_data(mres, &vres->base, resp->row, resp->nrow); + } else { + /* Like views, try to decode the row and invoke the callback; if we can */ + /* Assume success! */ + pycbc_viewresult_addrow(vres, mres, resp->row, resp->nrow); + } + + pycbc_viewresult_step(vres, mres, bucket, resp->rflags & LCB_RESP_F_FINAL); + + if (resp->rflags & LCB_RESP_F_FINAL) { + pycbc_httpresult_complete(&vres->base, mres, resp->rc, htcode, hdrs); + } else { + PYCBC_CONN_THR_BEGIN(bucket); + } +} + +PyObject * +pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +{ + int rv; + PyObject *ret = NULL; + pycbc_MultiResult *mres; + pycbc_ViewResult *vres; + lcb_error_t rc; + lcb_CMDFTS cmd = { 0 }; + const char *params; + pycbc_strlen_t nparams; + + static char *kwlist[] = { "params", NULL }; + rv = PyArg_ParseTupleAndKeywords(args, kwargs, + "s#", kwlist, ¶ms, &nparams); + + if (!rv) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + if (-1 == pycbc_oputil_conn_lock(self)) { + return NULL; + } + if (self->pipeline_queue) { + PYCBC_EXC_WRAP(PYCBC_EXC_PIPELINE, 0, + "FTS queries cannot be executed in pipeline context"); + } + + mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); + vres = (pycbc_ViewResult *)PYCBC_TYPE_CTOR(&pycbc_ViewResultType); + pycbc_httpresult_init(&vres->base, mres); + vres->rows = PyList_New(0); + vres->base.format = PYCBC_FMT_JSON; + vres->base.htype = PYCBC_HTTP_HN1QL; + + cmd.callback = fts_row_callback; + cmd.query = params; + cmd.nquery = nparams; + cmd.handle = &vres->base.u.fts; + rc = lcb_fts_query(self->instance, mres, &cmd); + + if (rc != LCB_SUCCESS) { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, rc, "Couldn't schedule fts query"); + goto GT_DONE; + } + + ret = (PyObject *)mres; + mres = NULL; + + GT_DONE: + Py_XDECREF(mres); + pycbc_oputil_conn_unlock(self); + return ret; +} diff --git a/src/htresult.c b/src/htresult.c index 02139c861..087d8d372 100644 --- a/src/htresult.c +++ b/src/htresult.c @@ -63,6 +63,8 @@ HttpResult_dealloc(pycbc_HttpResult *self) lcb_view_cancel(self->parent->instance, self->u.vh); } else if (self->htype == PYCBC_HTTP_HN1QL) { lcb_n1ql_cancel(self->parent->instance, self->u.nq); + } else if (self->htype == PYCBC_HTTP_HFTS) { + lcb_fts_cancel(self->parent->instance, self->u.fts); } else { lcb_cancel_http_request(self->parent->instance, self->u.htreq); } diff --git a/src/oputil.h b/src/oputil.h index 71cabf148..7679104b7 100644 --- a/src/oputil.h +++ b/src/oputil.h @@ -352,4 +352,7 @@ PYCBC_DECL_OP(observe_multi); /* n1ql.c */ PYCBC_DECL_OP(_n1ql_query); +/* fts.c */ +PYCBC_DECL_OP(_fts_query); + #endif /* PYCBC_OPUTIL_H */ diff --git a/src/pycbc.h b/src/pycbc.h index e550f22d8..4e4793290 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -26,9 +26,10 @@ #include <libcouchbase/api3.h> #include <libcouchbase/views.h> #include <libcouchbase/n1ql.h> +#include <libcouchbase/cbft.h> -#if LCB_VERSION < 0x020506 -#error "Couchbase Python SDK requires libcouchbase 2.5.6 or greater" +#if LCB_VERSION < 0x020508 +#error "Couchbase Python SDK requires libcouchbase 2.5.8 or greater" #endif #include <pythread.h> @@ -395,10 +396,12 @@ typedef struct { PyObject *specs; } pycbc__SDResult; - -#define PYCBC_HTTP_HVIEW 1 -#define PYCBC_HTTP_HRAW 2 -#define PYCBC_HTTP_HN1QL 3 +enum { + PYCBC_HTTP_HVIEW = 1, + PYCBC_HTTP_HRAW, + PYCBC_HTTP_HN1QL, + PYCBC_HTTP_HFTS +}; typedef struct { pycbc_Result_HEAD @@ -409,6 +412,7 @@ typedef struct { lcb_http_request_t htreq; lcb_VIEWHANDLE vh; lcb_N1QLHANDLE nq; + lcb_FTSHANDLE fts; } u; unsigned int format; unsigned short htcode; From 7eb16d4cb8b2fd8ed7f9ee5326212024fafb6b35 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 12 Apr 2016 15:37:54 -0700 Subject: [PATCH 042/829] PYCBC-388: Enable self-identification in server logs Change-Id: I3326c160e91beddd73384af34d94a56964a027d5 Reviewed-on: http://review.couchbase.org/62958 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 762ce5da4..034604c2b 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -201,6 +201,13 @@ def __init__(self, *args, **kwargs): # Enable detailed error codes for network errors: self._cntlstr("detailed_errcodes", "1") + # Enable self-identification in logs + try: + from couchbase._version import __version__ as cb_version + self._cntlstr('client_string', 'PYCBC/' + cb_version) + except E.NotSupportedError: + pass + for ctl, val in strcntls.items(): self._cntlstr(ctl, val) From 57ddc5f1519101de03df141a1a2e10aefcb90830 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 3 May 2016 11:13:04 -0700 Subject: [PATCH 043/829] Ignore OS X dSYM directories Change-Id: I7408910a60ef052c458ac7d17a7fcbdbced1ec1d Reviewed-on: http://review.couchbase.org/63645 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 9b154568f..6f25fb607 100644 --- a/.gitignore +++ b/.gitignore @@ -29,3 +29,4 @@ ci/ buildall.py *egg-info* _trial* +*.dSYM From a984037e342db55a9ee8e174563a02b73ffacdc0 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 20 Apr 2016 08:07:29 -0700 Subject: [PATCH 044/829] PYCBC-325: Add authenticator methods Change-Id: I8ced97276326a634ccae634b6b906b2d77677214 Reviewed-on: http://review.couchbase.org/63092 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 5 +++++ couchbase/tests/cases/misc_t.py | 9 ++++++++- src/bucket.c | 21 +++++++++++++++++++++ 3 files changed, 34 insertions(+), 1 deletion(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 034604c2b..670626a02 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1712,3 +1712,8 @@ def flush(self): path = path.format(self.bucket) return self._http_request(type=_LCB.LCB_HTTP_TYPE_MANAGEMENT, path=path, method=_LCB.LCB_HTTP_METHOD_POST) + + def add_bucket_creds(self, bucket, password): + if not bucket or not password: + raise ValueError('Bucket and password must be nonempty') + return _Base._add_creds(self, bucket, password) diff --git a/couchbase/tests/cases/misc_t.py b/couchbase/tests/cases/misc_t.py index 8baf5123d..c8f15f658 100644 --- a/couchbase/tests/cases/misc_t.py +++ b/couchbase/tests/cases/misc_t.py @@ -158,4 +158,11 @@ def test_logging(self): def test_compat_timeout(self): cb = self.make_connection(timeout=7.5) - self.assertEqual(7.5, cb.timeout) \ No newline at end of file + self.assertEqual(7.5, cb.timeout) + + def test_multi_auth(self): + cb = self.make_connection() + new_bucket = cb.bucket + '2' + cb.add_bucket_creds(new_bucket, 'newpass') + self.assertRaises(ValueError, cb.add_bucket_creds, '', 'pass') + self.assertRaises(ValueError, cb.add_bucket_creds, 'bkt', '') \ No newline at end of file diff --git a/src/bucket.c b/src/bucket.c index aa57a48fe..ef0606553 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -173,6 +173,22 @@ Bucket__instance_pointer(pycbc_Bucket *self, void *unused) return ret; } +static PyObject * +Bucket__add_creds(pycbc_Bucket *self, PyObject *args) +{ + char *arr[2] = { NULL }; + lcb_error_t rc; + if (!PyArg_ParseTuple(args, "ss", &arr[0], &arr[1])) { + return NULL; + } + rc = lcb_cntl(self->instance, LCB_CNTL_SET, LCB_CNTL_BUCKET_CRED, arr); + if (rc != LCB_SUCCESS) { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, rc, "Couldn't add credentials"); + return NULL; + } + Py_RETURN_NONE; +} + static PyObject * Bucket__thr_lockop(pycbc_Bucket *self, PyObject *arg) { @@ -587,6 +603,11 @@ static PyMethodDef Bucket_TABLE_methods[] = { METH_NOARGS, PyDoc_STR("Gets known mutation information") }, + { "_add_creds", + (PyCFunction)Bucket__add_creds, + METH_VARARGS, + PyDoc_STR("Add additional user/pasword information") + }, { NULL, NULL, 0, NULL } }; From e25f6de0fe12293068b5c834d69eccd3147ac6de Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 4 May 2016 09:59:27 -0700 Subject: [PATCH 045/829] Restore Py26 compat Compatibility was broken with the latest cbft additions. Fixes are: * Explicit format placeholder index ('{0}' vs '{}') * No dict comprehension ({x: stuff for x in iterable}) Change-Id: I38992839fee712a2b6ac2ae4795576f16b1eda43 Reviewed-on: http://review.couchbase.org/63717 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/fulltext.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index 2deab1b65..63c0930a2 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -54,7 +54,7 @@ def fdel(self): pass doc = kwargs.pop( - 'doc', 'Corresponds to the ``{}`` field'.format('.'.join(apipaths))) + 'doc', 'Corresponds to the ``{0}`` field'.format('.'.join(apipaths))) return property(fget, fset, fdel, doc) @@ -262,9 +262,10 @@ def __init__(self, **kwargs): @property def encodable(self): if self.facets: - self._json_['facets'] = { - n: x.encodable for n, x in self.facets.items() - } + encoded_facets = {} + for name, facet in self.facets.items(): + encoded_facets[name] = facet.encodable + self._json_['facets'] = encoded_facets return self._json_ @@ -397,7 +398,9 @@ def _with_fields(*fields): :param fields: List of fields to include. These should be keys of the `_COMMON_FIELDS` dict """ - dd = {x: _COMMON_FIELDS[x] for x in fields} + dd = {} + for x in fields: + dd[x] = _COMMON_FIELDS[x] def wrap(cls): dd.update(cls.__dict__) From 2d7e71f91954ef813092a6a928659a6cc692cc0f Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 6 May 2016 08:08:40 -0700 Subject: [PATCH 046/829] connection_t: a missing bucket results in either BucketNotFound... or AuthError Change-Id: I9ba398e8e5da06db0c17d28b931073eb9ac50a6a Reviewed-on: http://review.couchbase.org/63794 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/tests/cases/connection_t.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/couchbase/tests/cases/connection_t.py b/couchbase/tests/cases/connection_t.py index 141fc27ea..b302f3146 100644 --- a/couchbase/tests/cases/connection_t.py +++ b/couchbase/tests/cases/connection_t.py @@ -48,7 +48,8 @@ def test_bucket(self): def test_bucket_not_found(self): connargs = self.make_connargs(bucket='this_bucket_does_not_exist') - self.assertRaises(BucketNotFoundError, self.factory, **connargs) + self.assertRaises( + (BucketNotFoundError, AuthError), self.factory, **connargs) def test_quiet(self): connparams = self.make_connargs() From 0d371ea50981c462c3e084585069e12e20217d86 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 18 Apr 2016 08:30:46 -0700 Subject: [PATCH 047/829] GH-26: Fix typo in lock example Change-Id: I09b30c7dd4c9466cf8e6a1394b86ef52e6417959 Reviewed-on: http://review.couchbase.org/62960 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 1 + 1 file changed, 1 insertion(+) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 670626a02..f52e0f8fe 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -574,6 +574,7 @@ def lock(self, key, ttl=0): while time.time() - begin_time < 15: try: rv = cb.lock("key", ttl=10) + break except TemporaryFailError: print("Key is currently locked.. waiting") time.sleep(1) From 12b88a4d15b44534bfdb85fa6789261b38d886ef Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 6 May 2016 08:17:46 -0700 Subject: [PATCH 048/829] FTS handle should use PYCBC_HTTP_HFTS, not HN1QL I guess I deserve that for copy/pasting code :( Change-Id: Iedec8cbda76f2d5535cdba1b227a90eb2177217a Reviewed-on: http://review.couchbase.org/63795 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Reviewed-by: Volker Mische <volker.mische@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/fts.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/fts.c b/src/fts.c index 1661fb3b9..2fa6f12e4 100644 --- a/src/fts.c +++ b/src/fts.c @@ -70,7 +70,7 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) pycbc_httpresult_init(&vres->base, mres); vres->rows = PyList_New(0); vres->base.format = PYCBC_FMT_JSON; - vres->base.htype = PYCBC_HTTP_HN1QL; + vres->base.htype = PYCBC_HTTP_HFTS; cmd.callback = fts_row_callback; cmd.query = params; From fa9c49fffe9a3eda6928498185e9cf5c2ec630e4 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 22 Apr 2016 12:38:20 -0700 Subject: [PATCH 049/829] PYCBC-326: Index management operations Change-Id: I9b34138637fb655acd90069cf2952762f4fa032a Reviewed-on: http://review.couchbase.org/63245 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/_ixmgmt.py | 164 ++++++++++++++++++++++++ couchbase/bucketmanager.py | 192 ++++++++++++++++++++++++++++ couchbase/tests/cases/ixmgmt_t.py | 144 +++++++++++++++++++++ docs/source/api/couchbase.rst | 27 ++++ setup.py | 3 +- src/bucket.c | 3 + src/constants.c | 1 + src/ixmgmt.c | 205 ++++++++++++++++++++++++++++++ src/oputil.h | 4 + src/pycbc.h | 5 +- 10 files changed, 745 insertions(+), 3 deletions(-) create mode 100644 couchbase/_ixmgmt.py create mode 100644 couchbase/tests/cases/ixmgmt_t.py create mode 100644 src/ixmgmt.c diff --git a/couchbase/_ixmgmt.py b/couchbase/_ixmgmt.py new file mode 100644 index 000000000..4beb41ef5 --- /dev/null +++ b/couchbase/_ixmgmt.py @@ -0,0 +1,164 @@ +# Copyright 2016, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# This module is used internally by the BucketManager class. Do not use +# this module directly. +import couchbase.exceptions as E +import couchbase._libcouchbase as C +from couchbase import _to_json + +N1QL_PRIMARY_INDEX = '#primary' + + +def _genprop(dictkey): + def fget(self): + return self.raw.get(dictkey, None) + + def fset(self, val): + self.raw[dictkey] = val + + def fdel(self): + try: + del self.raw[dictkey] + except KeyError: + pass + + return property(fget, fset, fdel) + + +class N1qlIndex(object): + def __init__(self, raw=None): + if not raw: + self.raw = {} + return + + if isinstance(raw, N1qlIndex): + raw = raw.raw + self.raw = raw.copy() + + name = _genprop('name') # type: str + primary = _genprop('is_primary') # type: bool + keyspace = _genprop('keyspace_id') # type: str + state = _genprop('state') # type: str + _index_key = _genprop('index_key') # type: str + + @property + def fields(self): + if self._index_key: + return self._index_key.split(',') + + def __repr__(self): + return ('Index<name={0.name}, primary={0.primary}, raw={0.raw!r}>' + .format(self)) + + def __str__(self): + return self.name + + @classmethod + def from_any(cls, obj, bucket): + """ + Ensure the current object is an index. Always returns a new object + :param obj: string or IndexInfo object + :param bucket: The bucket name + :return: A new IndexInfo object + """ + if isinstance(obj, cls): + return cls(obj.raw) + + return cls({ + 'namespace_id': 'default', + 'keyspace_id': bucket, + 'name': obj if obj else N1QL_PRIMARY_INDEX, + 'using': 'gsi' + }) + + +def index_to_rawjson(ix): + """ + :param ix: dict or IndexInfo object + :return: serialized JSON + """ + if isinstance(ix, N1qlIndex): + ix = ix.raw + return _to_json(ix) + + +class IxmgmtRequest(object): + """ + This class has similar functionality as N1QLRequest and View. It + implements iteration over index management results. + """ + def __init__(self, parent, cmd, index, **kwargs): + """ + :param Bucket parent: parent Bucket object + :param str cmd: Type of command to execute. Can be `watch`, `drop`, + `create`, `list`, or `build` + :param index: serialized JSON which should correspond to an index + definition. :meth:`index_to_rawjson` will do this. + If `cmd` is `watch` then this should be a *list* of serialized + JSON strings + :param kwargs: Additional options for the command. + """ + self._index = index + self._cmd = cmd + self._parent = parent + self._mres = None + self.__raw = False + self._options = kwargs + self._ignore_exists = kwargs.pop('ignore_exists', False) + self._ignore_missing = kwargs.pop('ignore_missing', False) + + if cmd == 'create' and self._options.pop('defer', False): + self._options['flags'] = ( + self._options.setdefault('flags', 0) | C.LCB_IXSPEC_F_DEFER) + + def _start(self): + if self._mres: + return + + if self._cmd == 'watch': + self._mres = self._parent._ixwatch( + [index_to_rawjson(x) for x in self._index], **self._options) + else: + self._mres = self._parent._ixmanage( + self._cmd, index_to_rawjson(self._index), **self._options) + + self.__raw = self._mres[None] + + def _process_payload(self, rows): + return rows if rows else [] + + @property + def raw(self): + return self.__raw + + def __iter__(self): + self._start() + while not self.raw.done: + try: + raw_rows = self.__raw.fetch(self._mres) + except E.CouchbaseError as ex: + if ex.CODE == E.KeyExistsError.CODE and self._ignore_exists: + break + elif ex.CODE == E.NotFoundError.CODE and self._ignore_missing: + break + else: + raise + + for row in self._process_payload(raw_rows): + yield N1qlIndex(row) + + def execute(self): + return [x for x in self] diff --git a/couchbase/bucketmanager.py b/couchbase/bucketmanager.py index 03005d17b..973f0972a 100644 --- a/couchbase/bucketmanager.py +++ b/couchbase/bucketmanager.py @@ -23,6 +23,8 @@ from couchbase.exceptions import CouchbaseError, ArgumentError from couchbase.views.params import Query, SpatialQuery, STALE_OK from couchbase._pyport import single_dict_key +from couchbase._ixmgmt import IxmgmtRequest, N1qlIndex, N1QL_PRIMARY_INDEX + class BucketManager(object): """ @@ -280,3 +282,193 @@ def design_delete(self, name, use_devmode=True, syncwait=0): self._design_poll(name, 'del', existing, syncwait) return ret + + def _mk_index_def(self, ix, primary=False): + if isinstance(ix, N1qlIndex): + return N1qlIndex(ix) + + info = N1qlIndex() + info.keyspace = self._cb.bucket + info.primary = primary + + if ix: + info.name = ix + elif not primary: + raise ValueError('Missing name for non-primary index') + + return info + + def n1ql_index_create(self, ix, **kwargs): + """ + Create an index for use with N1QL. + + :param str ix: The name of the index to create + :param bool defer: Whether the building of indexes should be + deferred. If creating multiple indexes on an existing + dataset, using the `defer` option in conjunction with + :meth:`build_deferred_indexes` and :meth:`watch_indexes` may + result in substantially reduced build times. + :param bool ignore_exists: Do not throw an exception if the index + already exists. + :param list fields: A list of fields that should be supplied + as keys for the index. For non-primary indexes, this must + be specified and must contain at least one field name. + :param bool primary: Whether this is a primary index. If creating + a primary index, the name may be an empty string and `fields` + must be empty. + :raise: :exc:`~.KeyExistsError` if the index already exists + + .. seealso:: :meth:`n1ql_index_create_primary` + """ + defer = kwargs.pop('defer', False) + ignore_exists = kwargs.pop('ignore_exists', False) + primary = kwargs.pop('primary', False) + fields = kwargs.pop('fields', []) + + if kwargs: + raise TypeError('Unknown keyword arguments', kwargs) + + info = self._mk_index_def(ix, primary) + + if primary and fields: + raise TypeError('Cannot create primary index with explicit fields') + elif not primary and not fields: + raise ValueError('Fields required for non-primary index') + + if fields: + info._index_key = ','.join(fields) + + if primary and info.name is N1QL_PRIMARY_INDEX: + del info.name + + options = { + 'ignore_exists': ignore_exists, + 'defer': defer + } + + # Now actually create the indexes + return IxmgmtRequest(self._cb, 'create', info, **options).execute() + + def n1ql_index_create_primary(self, defer=False, ignore_exists=False): + """ + Create the primary index on the bucket. + + Equivalent to:: + + n1ql_index_create('', primary=True, **kwargs) + + :param bool defer: + :param bool ignore_exists: + + .. seealso:: :meth:`create_index` + """ + return self.n1ql_index_create( + '', defer=defer, primary=True, ignore_exists=ignore_exists) + + def n1ql_index_drop(self, ix, primary=False, **kwargs): + """ + Delete an index from the cluster. + + :param str ix: the name of the index + :param bool primary: if this index is a primary index + :param bool ignore_missing: Do not raise an exception if the index + does not exist + :raise: :exc:`~.NotFoundError` if the index does not exist and + `ignore_missing` was not specified + """ + info = self._mk_index_def(ix, primary) + return IxmgmtRequest(self._cb, 'drop', info, **kwargs).execute() + + def n1ql_index_drop_primary(self, **kwargs): + """ + Remove the primary index + + Equivalent to ``n1ql_index_drop('', primary=True, **kwargs)`` + """ + return self.n1ql_index_drop('', primary=True, **kwargs) + + def n1ql_index_list(self, other_buckets=False): + """ + List indexes in the cluster. + + :param bool other_buckets: Whether to also include indexes belonging + to other buckets (i.e. buckets other than the current `Bucket` + object) + :return: list[couchbase._ixmgmt.Index] objects + """ + info = N1qlIndex() + if not other_buckets: + info.keyspace = self._cb.bucket + return IxmgmtRequest(self._cb, 'list', info).execute() + + def n1ql_index_build_deferred(self, other_buckets=False): + """ + Instruct the server to begin building any previously deferred index + definitions. + + This method will gather a list of all pending indexes in the cluster + (including those created using the `defer` option with + :meth:`create_index`) and start building them in an efficient manner. + + :param bool other_buckets: Whether to also build indexes found in + other buckets, if possible + :return: list[couchbase._ixmgmt.Index] objects. This + list contains the indexes which are being built and + may be passed to :meth:`n1ql_index_watch` to poll + their build statuses. + + You can use the :meth:`n1ql_index_watch` + method to wait until all indexes have been built:: + + mgr.n1ql_index_create('ix_fld1', fields=['field1'], defer=True) + mgr.n1ql_index_create('ix_fld2', fields['field2'], defer=True) + mgr.n1ql_index_create('ix_fld3', fields=['field3'], defer=True) + + indexes = mgr.n1ql_index_build_deferred() + # [IndexInfo('field1'), IndexInfo('field2'), IndexInfo('field3')] + mgr.n1ql_index_watch(indexes, timeout=30, interval=1) + + """ + info = N1qlIndex() + if not other_buckets: + info.keyspace = self._cb.bucket + return IxmgmtRequest(self._cb, 'build', info).execute() + + def n1ql_index_watch(self, indexes, + timeout=30, interval=1, watch_primary=False): + """ + Await completion of index building + + This method will wait up to `timeout` seconds for every index in + `indexes` to have been built. It will poll the cluster every + `interval` seconds. + + :param list indexes: A list of indexes to check. This is returned by + :meth:`build_deferred_indexes` + :param float timeout: How long to wait for the indexes to become ready. + :param float interval: How often to poll the cluster. + :param bool watch_primary: Whether to also watch the primary index. + This parameter should only be used when manually constructing a + list of string indexes + :raise: :exc:`~.TimeoutError` if the timeout was reached before all + indexes were built + :raise: :exc:`~.NotFoundError` if one of the indexes passed no longer + exists. + """ + kwargs = { + 'timeout_us': int(timeout * 1000000), + 'interval_us': int(interval * 1000000) + } + ixlist = [N1qlIndex.from_any(x, self._cb.bucket) for x in indexes] + if watch_primary: + ixlist.append( + N1qlIndex.from_any(N1QL_PRIMARY_INDEX, self._cb.bucket)) + return IxmgmtRequest(self._cb, 'watch', ixlist, **kwargs).execute() + + create_n1ql_index = n1ql_index_create + create_n1ql_primary_index = n1ql_index_create_primary + list_n1ql_indexes = n1ql_index_list + drop_n1ql_index = n1ql_index_drop + drop_n1ql_primary_index = n1ql_index_drop_primary + build_n1ql_deferred_indexes = n1ql_index_build_deferred + watch_n1ql_indexes = n1ql_index_watch diff --git a/couchbase/tests/cases/ixmgmt_t.py b/couchbase/tests/cases/ixmgmt_t.py new file mode 100644 index 000000000..a20a552af --- /dev/null +++ b/couchbase/tests/cases/ixmgmt_t.py @@ -0,0 +1,144 @@ +# Copyright 2016, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from couchbase.tests.base import RealServerTestCase, SkipTest +from couchbase.bucketmanager import BucketManager +from couchbase.bucket import Bucket +import couchbase.exceptions as E + + +class IndexManagementTestCase(RealServerTestCase): + def _clear_indexes(self): + # Drop all indexes! + cb = self.cb + mgr = cb.bucket_manager() # type: BucketManager + for index in mgr.n1ql_index_list(): + mgr.n1ql_index_drop(index) + + def setUp(self): + super(IndexManagementTestCase, self).setUp() + if self.cb.__class__ is not Bucket: + raise SkipTest('Only supported for synchronous bucket') + self._clear_indexes() + + def tearDown(self): + self._clear_indexes() + super(IndexManagementTestCase, self).tearDown() + + def test_create_primary(self): + cb = self.cb + mgr = cb.bucket_manager() # type: BucketManager + mgr.n1ql_index_create_primary() + # Ensure we can issue a query + qstr = 'select * from {0} limit 1'.format(cb.bucket) + cb.n1ql_query(qstr).execute() + # Drop the primary index + mgr.n1ql_index_drop_primary() + # Ensure we get an error when executing the query + self.assertRaises( + E.CouchbaseError, lambda x: x.execute(), cb.n1ql_query(qstr)) + + ixname = 'namedPrimary' + # Try to create a _named_ primary index + mgr.n1ql_index_create(ixname, primary=True) + cb.n1ql_query(qstr).execute() + # All OK + mgr.n1ql_index_drop(ixname) + self.assertRaises( + E.CouchbaseError, lambda x: x.execute(), cb.n1ql_query(qstr)) + + # Create the primary index the first time + mgr.n1ql_index_create_primary() + mgr.n1ql_index_create_primary(ignore_exists=True) + self.assertRaises(E.KeyExistsError, mgr.n1ql_index_create_primary) + + # Drop the indexes + mgr.n1ql_index_drop_primary() + mgr.n1ql_index_drop_primary(ignore_missing=True) + self.assertRaises(E.NotFoundError, mgr.n1ql_index_drop_primary) + + # Test with _named_ primaries + mgr.n1ql_index_create(ixname, primary=True) + mgr.n1ql_index_create(ixname, primary=True, ignore_exists=True) + self.assertRaises(E.KeyExistsError, mgr.n1ql_index_create, + ixname, primary=True) + + mgr.n1ql_index_drop(ixname) + mgr.n1ql_index_drop(ixname, ignore_missing=True) + self.assertRaises(E.NotFoundError, mgr.n1ql_index_drop, ixname) + + def test_alt_indexes(self): + cb = self.cb # type: Bucket + mgr = cb.bucket_manager() + ixname = 'ix2' + fields = ('fld1', 'fld2') + mgr.n1ql_index_create(ixname, fields=fields) + qq = 'select {1}, {2} from `{0}` where {1}=1 and {2}=2 limit 1'\ + .format(cb.bucket, *fields) + cb.n1ql_query(qq).execute() + + # Drop the index + mgr.n1ql_index_drop(ixname) + # Issue the query again + self.assertRaises(E.CouchbaseError, + lambda x: x.execute(), cb.n1ql_query(qq)) + + self.assertRaises((ValueError, TypeError), + mgr.n1ql_index_create, 'withoutFields') + mgr.n1ql_index_create(ixname, fields=['hello']) + mgr.n1ql_index_create(ixname, fields=['hello'], ignore_exists=True) + self.assertRaises(E.KeyExistsError, mgr.n1ql_index_create, + ixname, fields=['hello']) + + # Drop it + mgr.n1ql_index_drop(ixname) + mgr.n1ql_index_drop(ixname, ignore_missing=True) + self.assertRaises(E.NotFoundError, mgr.n1ql_index_drop, ixname) + + def test_list_indexes(self): + cb = self.cb # type: Bucket + mgr = cb.bucket_manager() + ixs = mgr.n1ql_index_list() + self.assertEqual(0, len(ixs)) + + # Create the primary index + mgr.n1ql_index_create_primary() + ixs = mgr.n1ql_index_list() + self.assertEqual(1, len(ixs)) + self.assertTrue(ixs[0].primary) + self.assertEqual('#primary', ixs[0].name) + self.assertEqual(cb.bucket, ixs[0].keyspace) + + def test_deferred(self): + cb = self.cb # type: Bucket + mgr = cb.bucket_manager() + # Create primary index + mgr.n1ql_index_create_primary(defer=True) + ix = mgr.n1ql_index_list()[0] + self.assertEqual('deferred', ix.state) + + # Create a bunch of other indexes + for n in range(5): + mgr.n1ql_index_create( + 'ix{0}'.format(n), fields=['fld{0}'.format(n)], defer=True) + + ix = mgr.n1ql_index_list() + self.assertEqual(6, len(ix)) + + pending = mgr.n1ql_index_build_deferred() + self.assertEqual(6, len(pending)) + mgr.n1ql_index_watch(pending) # Should be OK + mgr.n1ql_index_watch(pending) # Should be OK again + self.assertRaises(E.NotFoundError, mgr.n1ql_index_watch, ['nonexist']) \ No newline at end of file diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index b6a656731..7525dbaa6 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -356,6 +356,33 @@ the :meth:`~couchbase.bucket.Bucket.bucket_manager` method on the .. automethod:: design_publish .. automethod:: design_delete +N1QL Index Management +===================== + +.. currentmodule:: couchbase.bucketmanager + +Before issuing any N1QL query using :cb_bmeth:`n1ql_query`, the bucket being +queried must have an index defined for the query. The simplest index is the +primary index. + +To create a primary index, use:: + + mgr.n1ql_index_create_primary(ignore_exists=True) + +You can create additional indexes using:: + + mgr.n1ql_create_index('idx_foo', fields=['foo']) + +.. class:: BucketManager + + .. automethod:: n1ql_index_create + .. automethod:: n1ql_index_create_primary + .. automethod:: n1ql_index_drop + .. automethod:: n1ql_index_drop_primary + .. automethod:: n1ql_index_build_deferred + .. automethod:: n1ql_index_watch + .. automethod:: n1ql_index_list + .. currentmodule:: couchbase.bucket .. class:: Bucket diff --git a/setup.py b/setup.py index 1b40ae03e..4a34e710e 100644 --- a/setup.py +++ b/setup.py @@ -88,7 +88,8 @@ 'pipeline', 'views', 'n1ql', - 'fts' + 'fts', + 'ixmgmt' ] if platform.python_implementation() != 'PyPy': diff --git a/src/bucket.c b/src/bucket.c index ef0606553..1221e3283 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -515,6 +515,9 @@ static PyMethodDef Bucket_TABLE_methods[] = { OPFUNC(_n1ql_query, "Internal routine for N1QL queries"), OPFUNC(_fts_query, "Internal routine for Fulltext queries"), + OPFUNC(_ixmanage, "Internal routine for managing indexes"), + OPFUNC(_ixwatch, "Internal routine for monitoring indexes"), + OPFUNC(observe, "Get replication/persistence status for keys"), OPFUNC(observe_multi, "multi-key variant of observe"), diff --git a/src/constants.c b/src/constants.c index 7eba69291..be2a67a44 100644 --- a/src/constants.c +++ b/src/constants.c @@ -191,6 +191,7 @@ do_all_constants(PyObject *module, ADD_MACRO(LCB_SDCMD_COUNTER); ADD_MACRO(LCB_SDCMD_REMOVE); ADD_MACRO(LCB_SDCMD_ARRAY_INSERT); + ADD_MACRO(LCB_IXSPEC_F_DEFER); } static void diff --git a/src/ixmgmt.c b/src/ixmgmt.c new file mode 100644 index 000000000..a8a962ce4 --- /dev/null +++ b/src/ixmgmt.c @@ -0,0 +1,205 @@ +#include "pycbc.h" +#include "oputil.h" +#include "structmember.h" +#include <libcouchbase/ixmgmt.h> + +/* lcb callback for index management operations */ +static void +mgmt_callback(lcb_t instance, int ign, const lcb_RESPIXMGMT *resp) +{ + pycbc_MultiResult *mres = (pycbc_MultiResult *)resp->cookie; + pycbc_Bucket *bucket = mres->parent; + pycbc_ViewResult *vres; + size_t ii; + const char * const * hdrs = NULL; + short htcode = 0; + + PYCBC_CONN_THR_END(bucket); + vres = (pycbc_ViewResult *)PyDict_GetItem((PyObject*)mres, Py_None); + for (ii = 0; ii < resp->nspecs; ++ii) { + const lcb_INDEXSPEC *spec = resp->specs[ii]; + pycbc_viewresult_addrow(vres, mres, spec->rawjson, spec->nrawjson); + } + + pycbc_viewresult_step(vres, mres, bucket, 1); + if (resp->inner) { + pycbc_httpresult_add_data(mres, &vres->base, resp->inner->row, resp->inner->nrow); + if (resp->inner->htresp) { + hdrs = resp->inner->htresp->headers; + htcode = resp->inner->htresp->htstatus; + } + } + pycbc_httpresult_complete(&vres->base, mres, resp->rc, htcode, NULL); +} + +/* Handles simple single-index commands. + * We simply need to pass in the raw JSON payload (the second argument) as the + * rawjson/nrawjson fields for LCB + */ +PyObject * +pycbc_Bucket__ixmanage(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +{ + int rv; + PyObject *ret = NULL; + pycbc_MultiResult *mres; + pycbc_ViewResult *vres; + lcb_error_t rc; + unsigned cmdflags = 0; + lcb_CMDIXMGMT cmd = { { 0 } }; + const char *params; + const char *action; + pycbc_strlen_t nparams; + lcb_error_t (*action_fn)(lcb_t, const void *, const lcb_CMDIXMGMT*); + + static char *kwlist[] = { "action", "index", "flags", NULL }; + rv = PyArg_ParseTupleAndKeywords(args, kwargs, + "ss#|I", kwlist, &action, ¶ms, &nparams, &cmdflags); + + if (!rv) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + if (-1 == pycbc_oputil_conn_lock(self)) { + return NULL; + } + if (self->pipeline_queue) { + PYCBC_EXC_WRAP(PYCBC_EXC_PIPELINE, 0, + "index management operations executed in pipeline context"); + } + + mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); + vres = (pycbc_ViewResult *)PYCBC_TYPE_CTOR(&pycbc_ViewResultType); + pycbc_httpresult_init(&vres->base, mres); + vres->rows = PyList_New(0); + vres->base.format = PYCBC_FMT_JSON; + vres->base.htype = PYCBC_HTTP_HNONE; + + cmd.callback = mgmt_callback; + cmd.spec.flags = cmdflags; + cmd.spec.rawjson = params; + cmd.spec.nrawjson = nparams; + if (!strcmp(action, "create")) { + action_fn = lcb_ixmgmt_mkindex; + } else if (!strcmp(action, "drop")) { + action_fn = lcb_ixmgmt_rmindex; + } else if (!strcmp(action, "list")) { + action_fn = lcb_ixmgmt_list; + } else if (!strcmp(action, "build")) { + action_fn = lcb_ixmgmt_build_begin; + } else { + PYCBC_EXC_WRAP(PYCBC_EXC_INTERNAL, 0, "Bad action name!"); + goto GT_DONE; + } + + rc = action_fn(self->instance, mres, &cmd); + if (rc != LCB_SUCCESS) { + PYCBC_EXC_WRAP( + PYCBC_EXC_LCBERR, rc, "Couldn't schedule ixmgmt operation"); + goto GT_DONE; + } + + ret = (PyObject *)mres; + mres = NULL; + + GT_DONE: + Py_XDECREF(mres); + pycbc_oputil_conn_unlock(self); + return ret; +} + +/* + * Handles 'watch'. This accepts multiple index definitons and is thus its + * own function. + */ +PyObject * +pycbc_Bucket__ixwatch(pycbc_Bucket *self, PyObject *args, PyObject *kw) +{ + unsigned timeout = 0, interval = 0; + PyObject *indexes; + PyObject *ret = NULL; + pycbc_pybuffer *bufs = NULL; + pycbc_MultiResult *mres = NULL; + pycbc_ViewResult *vres = NULL; + lcb_CMDIXWATCH cmd = { 0 }; + lcb_INDEXSPEC **specs = NULL; + int rv; + size_t ii; + Py_ssize_t nspecs; + lcb_error_t rc; + + static char *kwlist[] = { "indexes", "timeout_us", "interval_us", NULL}; + + if (!PyArg_ParseTupleAndKeywords(args, kw, "OII", kwlist, + &indexes, &timeout, &interval)) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + + if (self->pipeline_queue) { + PYCBC_EXC_WRAP(PYCBC_EXC_PIPELINE, 0, + "Index management operations cannot be performed in a pipeline"); + } + mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); + vres = (pycbc_ViewResult *)PYCBC_TYPE_CTOR(&pycbc_ViewResultType); + pycbc_httpresult_init(&vres->base, mres); + vres->rows = PyList_New(0); + vres->base.format = PYCBC_FMT_JSON; + vres->base.htype = PYCBC_HTTP_HNONE; + cmd.callback = mgmt_callback; + cmd.interval = interval; + cmd.timeout = timeout; + + nspecs = PySequence_Length(indexes); + if (nspecs == -1) { + goto GT_DONE; + } else if (nspecs == 0) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "No indexes provided!"); + goto GT_DONE; + } + + cmd.nspec = nspecs; + specs = calloc(nspecs, sizeof *cmd.specs); + cmd.specs = (const lcb_INDEXSPEC * const *)specs; + bufs = calloc(nspecs, sizeof *bufs); + + for (ii = 0; ii < nspecs; ++ii) { + PyObject *index = PySequence_GetItem(indexes, ii); + PyObject *strobj = NULL; + if (index == NULL) { + goto GT_DONE; + } + strobj = PyObject_Str(index); + if (!strobj) { + goto GT_DONE; + } + + rv = pycbc_tc_simple_encode(strobj, bufs + ii, PYCBC_FMT_UTF8); + Py_DECREF(strobj); + + if (rv != 0) { + goto GT_DONE; + } + specs[ii] = calloc(1, sizeof *cmd.specs[ii]); + specs[ii]->rawjson = bufs[ii].buffer; + specs[ii]->nrawjson = bufs[ii].length; + } + + rc = lcb_ixmgmt_build_watch(self->instance, mres, &cmd); + if (rc != LCB_SUCCESS) { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, rc, "Couldn't schedule index watch"); + goto GT_DONE; + } + + ret = (PyObject *)mres; + mres = NULL; + + GT_DONE: + Py_XDECREF(mres); + pycbc_oputil_conn_unlock(self); + for (ii = 0; ii < cmd.nspec; ++ii) { + free(specs[ii]); + PYCBC_PYBUF_RELEASE(bufs + ii); + } + free(specs); + return ret; +} diff --git a/src/oputil.h b/src/oputil.h index 7679104b7..1d1bed3e4 100644 --- a/src/oputil.h +++ b/src/oputil.h @@ -355,4 +355,8 @@ PYCBC_DECL_OP(_n1ql_query); /* fts.c */ PYCBC_DECL_OP(_fts_query); +/* ixmgmt.c */ +PYCBC_DECL_OP(_ixmanage); +PYCBC_DECL_OP(_ixwatch); + #endif /* PYCBC_OPUTIL_H */ diff --git a/src/pycbc.h b/src/pycbc.h index 4e4793290..629e69624 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -27,7 +27,7 @@ #include <libcouchbase/views.h> #include <libcouchbase/n1ql.h> #include <libcouchbase/cbft.h> - +#include <libcouchbase/ixmgmt.h> #if LCB_VERSION < 0x020508 #error "Couchbase Python SDK requires libcouchbase 2.5.8 or greater" #endif @@ -400,7 +400,8 @@ enum { PYCBC_HTTP_HVIEW = 1, PYCBC_HTTP_HRAW, PYCBC_HTTP_HN1QL, - PYCBC_HTTP_HFTS + PYCBC_HTTP_HFTS, + PYCBC_HTTP_HNONE }; typedef struct { From 59aa9de30fb8c3a7cf5ee6a0d1129d79684a8c5d Mon Sep 17 00:00:00 2001 From: Heungsub Lee <sub@subl.ee> Date: Wed, 4 May 2016 16:30:54 +0900 Subject: [PATCH 050/829] Fix PYCBC-344 ---- Fix PYCBC-344 ---- Test killing greenlet case (fail for now) [PYCBC-344] Change-Id: Idcc0feb9dc51ec266c178b62b60e6e634a674fc4 Reviewed-on: http://review.couchbase.org/63917 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- gcouchbase/bucket.py | 19 +++++++++++---- gcouchbase/tests/test_gevent.py | 42 +++++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 4 deletions(-) create mode 100644 gcouchbase/tests/test_gevent.py diff --git a/gcouchbase/bucket.py b/gcouchbase/bucket.py index 5b482fc6e..63796a95f 100644 --- a/gcouchbase/bucket.py +++ b/gcouchbase/bucket.py @@ -1,3 +1,4 @@ +from gevent import GreenletExit from gevent.event import AsyncResult, Event from gevent.hub import get_hub, getcurrent, Waiter @@ -72,6 +73,11 @@ def __init__(self, *args, **kwargs): AsyncN1QLRequest.__init__(self, *args, **kwargs) GRowsHandler.__init__(self) + +def dummy_callback(*args): + pass + + class Bucket(AsyncBucket): def __init__(self, *args, **kwargs): """ @@ -99,10 +105,15 @@ def _on_connected(self, err): def _waitwrap(self, cbasync): cur_thread = getcurrent() - cbasync.callback = cur_thread.switch - cbasync.errback = lambda r, x, y, z: cur_thread.throw(x, y, z) - - return get_hub().switch() + errback = lambda r, x, y, z: cur_thread.throw(x, y, z) + cbasync.set_callbacks(cur_thread.switch, errback) + try: + return get_hub().switch() + except GreenletExit: + # Deregister callbacks to prevent another request on the same + # greenlet to get the result from this context. + cbasync.set_callbacks(dummy_callback, dummy_callback) + raise def _meth_factory(meth, name): def ret(self, *args, **kwargs): diff --git a/gcouchbase/tests/test_gevent.py b/gcouchbase/tests/test_gevent.py new file mode 100644 index 000000000..d45858742 --- /dev/null +++ b/gcouchbase/tests/test_gevent.py @@ -0,0 +1,42 @@ +from couchbase.tests.base import SkipTest +try: + import gevent +except ImportError as e: + raise SkipTest(e) + +from gcouchbase.bucket import Bucket, GView +from couchbase.tests.base import ConnectionTestCase + + +class GeventSpecificTest(ConnectionTestCase): + factory = Bucket + viewfactory = GView + should_check_refcount = False + + def test_killing_greenlet(self): + foo = self.gen_key('foo') + bar = self.gen_key('bar') + self.cb.upsert(foo, 'foo') + self.cb.upsert(bar, 'bar') + def load_foo_and_bar_forever(): + while True: + try: + r = self.cb.get(foo) + except gevent.GreenletExit: + continue + else: + self.assertEqual(r.value, 'foo') + try: + r = self.cb.get(bar) + except gevent.GreenletExit: + continue + else: + self.assertEqual(r.value, 'bar') + def kill_greenlet_forever(g): + while True: + gevent.sleep(0.1) + g.kill() + g = gevent.spawn(load_foo_and_bar_forever) + greenlets = [g, gevent.spawn(kill_greenlet_forever, g)] + with gevent.Timeout(1, False): + gevent.joinall(greenlets, raise_error=True) From 393f490ce0a59ca41da10016b1c4ef5422097a62 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 9 May 2016 08:25:51 -0700 Subject: [PATCH 051/829] MutationState enhancements - Result objects can now be passed in constructor - More tests added to ensure functionality of MutationState and Result._mutinfo functionality Change-Id: Ie6030aeaf57af8c3c735dba38b722d9fa6418cbd Reviewed-on: http://review.couchbase.org/63936 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/n1ql.py | 4 +- couchbase/tests/cases/mutationtokens_t.py | 80 +++++++++++++++++++++++ 2 files changed, 83 insertions(+), 1 deletion(-) create mode 100644 couchbase/tests/cases/mutationtokens_t.py diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 6f2895646..d52bedb38 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -88,8 +88,10 @@ class MutationState(object): for row in cb.n1ql_query(nq): # ... """ - def __init__(self): + def __init__(self, *docs): self._sv = {} + if docs: + self.add_results(*docs) def _add_scanvec(self, mutinfo): """ diff --git a/couchbase/tests/cases/mutationtokens_t.py b/couchbase/tests/cases/mutationtokens_t.py new file mode 100644 index 000000000..55dc4150a --- /dev/null +++ b/couchbase/tests/cases/mutationtokens_t.py @@ -0,0 +1,80 @@ +# +# Copyright 2016, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json + + +from couchbase.tests.base import ConnectionTestCase +from couchbase.connstr import ConnectionString +from couchbase._pyport import long +from couchbase.n1ql import MutationState + + +class MutationTokensTest(ConnectionTestCase): + def make_connection(self, **overrides): + no_mutinfo = overrides.pop('no_mutinfo', False) + kwargs = self.make_connargs(**overrides) + connstr = kwargs.get('connection_string', '') + connstr = ConnectionString.parse(connstr) + if not no_mutinfo: + connstr.options['fetch_mutation_tokens'] = '1' + kwargs['connection_string'] = connstr.encode() + return super(MutationTokensTest, self).make_connection(**kwargs) + + def test_mutinfo_enabled(self): + cb = self.cb + key = self.gen_key('mutinfo') + rv = cb.upsert(key, 'value') + mutinfo = rv._mutinfo + self.assertTrue(mutinfo) + vb, uuid, seq, bktname = mutinfo + self.assertIsInstance(vb, (int, long)) + self.assertIsInstance(uuid, (int, long)) + self.assertIsInstance(seq, (int, long)) + self.assertEqual(cb.bucket, bktname) + + # Get all the mutation tokens + all_info = cb._mutinfo() + self.assertTrue(all_info) + self.assertEqual(1, len(all_info)) + vb, uuid, seq = all_info[0] + self.assertIsInstance(vb, (int, long)) + self.assertIsInstance(uuid, (int, long)) + self.assertIsInstance(seq, (int, long)) + + def test_mutinfo_disabled(self): + cb = self.make_connection(no_mutinfo=True) + key = self.gen_key('mutinfo') + rv = cb.upsert(key, 'value') + self.assertFalse(rv._mutinfo) + self.assertEqual(0, len(cb._mutinfo())) + + def test_mutation_state(self): + cb = self.cb + key = self.gen_key('mutationState') + rv = cb.upsert(key, 'value') + + d1 = json.loads(MutationState(rv).encode()) + ms = MutationState() + ms.add_results(rv) + d2 = json.loads(ms.encode()) + self.assertEqual(d1, d2) # Ensure it's the same + self.assertTrue(d1[cb.bucket]) # Ensure it's not empty + + vb, uuid, seq, _ = rv._mutinfo + mt_got = d1[cb.bucket][str(vb)] + self.assertEqual(seq, mt_got[0]) + self.assertEqual(str(uuid), mt_got[1]) \ No newline at end of file From 4d9b5fa4c988508b045df9a8da1b4e8ad3b62de1 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 12 May 2016 09:00:10 -0700 Subject: [PATCH 052/829] Update index management to correspond with newer lcb ixmgmt API The newer API adds a new field and changes naming. In general we shouldn't make this work with the older API which doesn't work anyway. Change-Id: I768ca306f4db558ddc8c4f557ae8c752e8453630 Reviewed-on: http://review.couchbase.org/63986 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/_ixmgmt.py | 2 +- couchbase/tests/cases/ixmgmt_t.py | 5 ++++ src/constants.c | 4 ++- src/ixmgmt.c | 41 ++++++++++++++++++++++--------- 4 files changed, 38 insertions(+), 14 deletions(-) diff --git a/couchbase/_ixmgmt.py b/couchbase/_ixmgmt.py index 4beb41ef5..5cbcd07b2 100644 --- a/couchbase/_ixmgmt.py +++ b/couchbase/_ixmgmt.py @@ -122,7 +122,7 @@ def __init__(self, parent, cmd, index, **kwargs): if cmd == 'create' and self._options.pop('defer', False): self._options['flags'] = ( - self._options.setdefault('flags', 0) | C.LCB_IXSPEC_F_DEFER) + self._options.setdefault('flags', 0) | C.LCB_N1XSPEC_F_DEFER) def _start(self): if self._mres: diff --git a/couchbase/tests/cases/ixmgmt_t.py b/couchbase/tests/cases/ixmgmt_t.py index a20a552af..520927330 100644 --- a/couchbase/tests/cases/ixmgmt_t.py +++ b/couchbase/tests/cases/ixmgmt_t.py @@ -31,6 +31,11 @@ def setUp(self): super(IndexManagementTestCase, self).setUp() if self.cb.__class__ is not Bucket: raise SkipTest('Only supported for synchronous bucket') + + import couchbase._libcouchbase as C + if not hasattr(C, 'LCB_N1XSPEC_F_DEFER'): + raise SkipTest('LCB headers are too old!') + self._clear_indexes() def tearDown(self): diff --git a/src/constants.c b/src/constants.c index be2a67a44..65e434a92 100644 --- a/src/constants.c +++ b/src/constants.c @@ -191,7 +191,9 @@ do_all_constants(PyObject *module, ADD_MACRO(LCB_SDCMD_COUNTER); ADD_MACRO(LCB_SDCMD_REMOVE); ADD_MACRO(LCB_SDCMD_ARRAY_INSERT); - ADD_MACRO(LCB_IXSPEC_F_DEFER); +#ifdef LCB_N1XSPEC_F_DEFER + ADD_MACRO(LCB_N1XSPEC_F_DEFER); +#endif } static void diff --git a/src/ixmgmt.c b/src/ixmgmt.c index a8a962ce4..e53d6c3b6 100644 --- a/src/ixmgmt.c +++ b/src/ixmgmt.c @@ -3,9 +3,10 @@ #include "structmember.h" #include <libcouchbase/ixmgmt.h> +#ifdef LCB_N1XSPEC_F_PRIMARY /* lcb callback for index management operations */ static void -mgmt_callback(lcb_t instance, int ign, const lcb_RESPIXMGMT *resp) +mgmt_callback(lcb_t instance, int ign, const lcb_RESPN1XMGMT *resp) { pycbc_MultiResult *mres = (pycbc_MultiResult *)resp->cookie; pycbc_Bucket *bucket = mres->parent; @@ -17,7 +18,7 @@ mgmt_callback(lcb_t instance, int ign, const lcb_RESPIXMGMT *resp) PYCBC_CONN_THR_END(bucket); vres = (pycbc_ViewResult *)PyDict_GetItem((PyObject*)mres, Py_None); for (ii = 0; ii < resp->nspecs; ++ii) { - const lcb_INDEXSPEC *spec = resp->specs[ii]; + const lcb_N1XSPEC *spec = resp->specs[ii]; pycbc_viewresult_addrow(vres, mres, spec->rawjson, spec->nrawjson); } @@ -45,11 +46,11 @@ pycbc_Bucket__ixmanage(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) pycbc_ViewResult *vres; lcb_error_t rc; unsigned cmdflags = 0; - lcb_CMDIXMGMT cmd = { { 0 } }; + lcb_CMDN1XMGMT cmd = { { 0 } }; const char *params; const char *action; pycbc_strlen_t nparams; - lcb_error_t (*action_fn)(lcb_t, const void *, const lcb_CMDIXMGMT*); + lcb_error_t (*action_fn)(lcb_t, const void *, const lcb_CMDN1XMGMT*); static char *kwlist[] = { "action", "index", "flags", NULL }; rv = PyArg_ParseTupleAndKeywords(args, kwargs, @@ -79,13 +80,13 @@ pycbc_Bucket__ixmanage(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) cmd.spec.rawjson = params; cmd.spec.nrawjson = nparams; if (!strcmp(action, "create")) { - action_fn = lcb_ixmgmt_mkindex; + action_fn = lcb_n1x_create; } else if (!strcmp(action, "drop")) { - action_fn = lcb_ixmgmt_rmindex; + action_fn = lcb_n1x_drop; } else if (!strcmp(action, "list")) { - action_fn = lcb_ixmgmt_list; + action_fn = lcb_n1x_list; } else if (!strcmp(action, "build")) { - action_fn = lcb_ixmgmt_build_begin; + action_fn = lcb_n1x_startbuild; } else { PYCBC_EXC_WRAP(PYCBC_EXC_INTERNAL, 0, "Bad action name!"); goto GT_DONE; @@ -120,8 +121,8 @@ pycbc_Bucket__ixwatch(pycbc_Bucket *self, PyObject *args, PyObject *kw) pycbc_pybuffer *bufs = NULL; pycbc_MultiResult *mres = NULL; pycbc_ViewResult *vres = NULL; - lcb_CMDIXWATCH cmd = { 0 }; - lcb_INDEXSPEC **specs = NULL; + lcb_CMDN1XWATCH cmd = { 0 }; + lcb_N1XSPEC **specs = NULL; int rv; size_t ii; Py_ssize_t nspecs; @@ -159,7 +160,7 @@ pycbc_Bucket__ixwatch(pycbc_Bucket *self, PyObject *args, PyObject *kw) cmd.nspec = nspecs; specs = calloc(nspecs, sizeof *cmd.specs); - cmd.specs = (const lcb_INDEXSPEC * const *)specs; + cmd.specs = (const lcb_N1XSPEC * const *)specs; bufs = calloc(nspecs, sizeof *bufs); for (ii = 0; ii < nspecs; ++ii) { @@ -184,7 +185,7 @@ pycbc_Bucket__ixwatch(pycbc_Bucket *self, PyObject *args, PyObject *kw) specs[ii]->nrawjson = bufs[ii].length; } - rc = lcb_ixmgmt_build_watch(self->instance, mres, &cmd); + rc = lcb_n1x_watchbuild(self->instance, mres, &cmd); if (rc != LCB_SUCCESS) { PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, rc, "Couldn't schedule index watch"); goto GT_DONE; @@ -203,3 +204,19 @@ pycbc_Bucket__ixwatch(pycbc_Bucket *self, PyObject *args, PyObject *kw) free(specs); return ret; } +#else +#warning "Index management operations not supported in this version of libcouchbase" +PyObject * +pycbc_Bucket__ixmanage(pycbc_Bucket *s, PyObject *a, PyObject *k) +{ + PYCBC_EXC_WRAP(PYCBC_EXC_INTERNAL, 0, + "Index management requires at least version 2.6.0 of libcouchbase. " + "Please compile with a newer libcouchbase version"); + return NULL; +} +PyObject * +pycbc_Bucket__ixwatch(pycbc_Bucket *s, PyObject *a, PyObject *k) +{ + return pycbc_Bucket__ixmanage(s, a, k); +} +#endif From 69a0628d75ee74d7804e7dc2cc07cf64b8690210 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 12 May 2016 09:18:00 -0700 Subject: [PATCH 053/829] Add 'condition' to index management Change-Id: Idbb1e0044197a613658b8e9ac8e541ab26124c47 Reviewed-on: http://review.couchbase.org/63987 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/_ixmgmt.py | 1 + couchbase/bucketmanager.py | 8 ++++++++ couchbase/tests/cases/ixmgmt_t.py | 8 ++++++++ 3 files changed, 17 insertions(+) diff --git a/couchbase/_ixmgmt.py b/couchbase/_ixmgmt.py index 5cbcd07b2..d28af292d 100644 --- a/couchbase/_ixmgmt.py +++ b/couchbase/_ixmgmt.py @@ -52,6 +52,7 @@ def __init__(self, raw=None): primary = _genprop('is_primary') # type: bool keyspace = _genprop('keyspace_id') # type: str state = _genprop('state') # type: str + condition = _genprop('condition') # type: str _index_key = _genprop('index_key') # type: str @property diff --git a/couchbase/bucketmanager.py b/couchbase/bucketmanager.py index 973f0972a..f8fc50616 100644 --- a/couchbase/bucketmanager.py +++ b/couchbase/bucketmanager.py @@ -316,6 +316,8 @@ def n1ql_index_create(self, ix, **kwargs): :param bool primary: Whether this is a primary index. If creating a primary index, the name may be an empty string and `fields` must be empty. + :param str condition: Specify a condition for indexing. Using + a condition reduces an index size :raise: :exc:`~.KeyExistsError` if the index already exists .. seealso:: :meth:`n1ql_index_create_primary` @@ -324,6 +326,7 @@ def n1ql_index_create(self, ix, **kwargs): ignore_exists = kwargs.pop('ignore_exists', False) primary = kwargs.pop('primary', False) fields = kwargs.pop('fields', []) + cond = kwargs.pop('condition', None) if kwargs: raise TypeError('Unknown keyword arguments', kwargs) @@ -341,6 +344,11 @@ def n1ql_index_create(self, ix, **kwargs): if primary and info.name is N1QL_PRIMARY_INDEX: del info.name + if cond: + if primary: + raise ValueError('cannot specify condition for primary index') + info.condition = cond + options = { 'ignore_exists': ignore_exists, 'defer': defer diff --git a/couchbase/tests/cases/ixmgmt_t.py b/couchbase/tests/cases/ixmgmt_t.py index 520927330..4c0d18626 100644 --- a/couchbase/tests/cases/ixmgmt_t.py +++ b/couchbase/tests/cases/ixmgmt_t.py @@ -112,6 +112,14 @@ def test_alt_indexes(self): mgr.n1ql_index_drop(ixname, ignore_missing=True) self.assertRaises(E.NotFoundError, mgr.n1ql_index_drop, ixname) + # Create an index with a condition + ixname = 'ix_with_condition' + cond = '((`foo` = "foo") and (`bar` = "bar"))' + mgr.n1ql_index_create(ixname, fields=['foo'], condition=cond) + ll = filter(lambda x: x.name == ixname, mgr.n1ql_index_list()) + self.assertTrue(ll) + self.assertEqual(cond, ll[0].condition) + def test_list_indexes(self): cb = self.cb # type: Bucket mgr = cb.bucket_manager() From e35373ee56f47f7bbbdd7f21a286d6784b98360b Mon Sep 17 00:00:00 2001 From: Heungsub Lee <sub@subl.ee> Date: Fri, 13 May 2016 11:26:02 +0900 Subject: [PATCH 054/829] Fix PYCBC-344 when timed out ---- Clear async result callbacks always ---- Make test case of result confusion when timed out Change-Id: I4f924d0105bef63894577b083b6fe95516ef49d1 Reviewed-on: http://review.couchbase.org/64014 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- gcouchbase/bucket.py | 3 +- gcouchbase/tests/test_gevent.py | 49 +++++++++++++++++++++++---------- 2 files changed, 35 insertions(+), 17 deletions(-) diff --git a/gcouchbase/bucket.py b/gcouchbase/bucket.py index 63796a95f..89e4daaf0 100644 --- a/gcouchbase/bucket.py +++ b/gcouchbase/bucket.py @@ -109,11 +109,10 @@ def _waitwrap(self, cbasync): cbasync.set_callbacks(cur_thread.switch, errback) try: return get_hub().switch() - except GreenletExit: + finally: # Deregister callbacks to prevent another request on the same # greenlet to get the result from this context. cbasync.set_callbacks(dummy_callback, dummy_callback) - raise def _meth_factory(meth, name): def ret(self, *args, **kwargs): diff --git a/gcouchbase/tests/test_gevent.py b/gcouchbase/tests/test_gevent.py index d45858742..61f7d307a 100644 --- a/gcouchbase/tests/test_gevent.py +++ b/gcouchbase/tests/test_gevent.py @@ -1,3 +1,5 @@ +from itertools import cycle + from couchbase.tests.base import SkipTest try: import gevent @@ -13,30 +15,47 @@ class GeventSpecificTest(ConnectionTestCase): viewfactory = GView should_check_refcount = False + def setUp(self): + super(GeventSpecificTest, self).setUp() + self.foo = self.gen_key('foo') + self.bar = self.gen_key('bar') + self.cb.upsert(self.foo, 'foo') + self.cb.upsert(self.bar, 'bar') + def test_killing_greenlet(self): - foo = self.gen_key('foo') - bar = self.gen_key('bar') - self.cb.upsert(foo, 'foo') - self.cb.upsert(bar, 'bar') def load_foo_and_bar_forever(): - while True: - try: - r = self.cb.get(foo) - except gevent.GreenletExit: - continue - else: - self.assertEqual(r.value, 'foo') + for key, value in cycle([(self.foo, 'foo'), (self.bar, 'bar')]): try: - r = self.cb.get(bar) + r = self.cb.get(key) except gevent.GreenletExit: continue else: - self.assertEqual(r.value, 'bar') + self.assertEqual(r.value, value) def kill_greenlet_forever(g): while True: gevent.sleep(0.1) g.kill() g = gevent.spawn(load_foo_and_bar_forever) greenlets = [g, gevent.spawn(kill_greenlet_forever, g)] - with gevent.Timeout(1, False): - gevent.joinall(greenlets, raise_error=True) + try: + with gevent.Timeout(1, False): + gevent.joinall(greenlets, raise_error=True) + finally: + gevent.killall(greenlets, StopIteration) + + def test_timeout(self): + def load_foo_and_bar_forever(): + for key, value in cycle([(self.foo, 'foo'), (self.bar, 'bar')]): + try: + with gevent.Timeout(0.0000001): + r = self.cb.get(key) + except gevent.Timeout: + continue + else: + self.assertEqual(r.value, value) + g = gevent.spawn(load_foo_and_bar_forever) + try: + with gevent.Timeout(1, False): + g.get() + finally: + g.kill(StopIteration) From 284f39bc281be110f50058bd3141010ad2e2b611 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 13 May 2016 12:01:25 -0700 Subject: [PATCH 055/829] Minor exception fixes Create new SubdocPathEmptyError, rather than InvalidError Change-Id: I197af7617c7bcf65f26e4d63ea4d86ee01731b57 Reviewed-on: http://review.couchbase.org/64165 Reviewed-by: Will Gardner <will.gardner@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/exceptions.py | 8 +++++++- couchbase/tests/cases/subdoc_t.py | 7 ++++--- 2 files changed, 11 insertions(+), 4 deletions(-) diff --git a/couchbase/exceptions.py b/couchbase/exceptions.py index 3bde96f8e..dc2bb2e79 100644 --- a/couchbase/exceptions.py +++ b/couchbase/exceptions.py @@ -508,6 +508,10 @@ class SubdocMultipleErrors(CouchbaseError): """One or more subcommands failed. Inspect the individual operation""" CODE = C.LCB_SUBDOC_MULTI_FAILURE + +class SubdocEmptyPathError(CouchbaseError): + """Empty path passed as subdoc spec""" + _LCB_ERRCAT_MAP = { C.LCB_ERRTYPE_NETWORK: CouchbaseNetworkError, C.LCB_ERRTYPE_INPUT: CouchbaseInputError, @@ -555,7 +559,7 @@ class SubdocMultipleErrors(CouchbaseError): C.LCB_SUBDOC_VALUE_CANTINSERT: SubdocCantInsertValueError, C.LCB_SUBDOC_BAD_DELTA: SubdocBadDeltaError, C.LCB_SUBDOC_NUM_ERANGE: SubdocNumberTooBigError, - C.LCB_EMPTY_PATH: InvalidError + C.LCB_EMPTY_PATH: SubdocEmptyPathError } @@ -563,6 +567,8 @@ def _set_default_codes(): for k, v in _LCB_ERRNO_MAP.items(): v.CODE = k + ArgumentError.CODE = 0 + _set_default_codes() diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index 2bb79d670..bb85fa9eb 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -64,7 +64,7 @@ def test_lookup_in(self): cb.lookup_in, bkey, SD.exists('path')) # Empty paths fail for get_in - self.assertRaises(E.InvalidError, + self.assertRaises(E.SubdocEmptyPathError, cb.retrieve_in, key, '') # Try on non-existing document. Should fail @@ -125,8 +125,9 @@ def test_mutate_in(self): self.assertEqual(None, cb.retrieve_in(key, 'null')[0]) # Test with empty path. Should throw some kind of error? - self.assertRaises((E.SubdocCantInsertValueError, E.InvalidError), - cb.mutate_in, key, SD.upsert('', {})) + self.assertRaises( + (E.SubdocCantInsertValueError, E.SubdocEmptyPathError), + cb.mutate_in, key, SD.upsert('', {})) cb.mutate_in(key, SD.upsert('array', [1, 2, 3])) self.assertRaises(E.SubdocPathMismatchError, cb.mutate_in, key, From 03a37ee007e67194b9a8a8e6efd03987c6301b48 Mon Sep 17 00:00:00 2001 From: mattcarabine <matt.carabine@couchbase.com> Date: Sun, 22 May 2016 12:49:11 +0100 Subject: [PATCH 056/829] PYCBC-346: Fix flush_enabled param in 'bucket_create' Change-Id: I9721f1b020560588807208710c25b8325a4ef372 Reviewed-on: http://review.couchbase.org/64274 Tested-by: Matt Carabine <matt.carabine@couchbase.com> Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchbase/admin.py b/couchbase/admin.py index c5b1b8d63..bf6e45e4a 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -187,7 +187,7 @@ def bucket_create(self, name, bucket_type='couchbase', 'type': bucket_type, 'authType': 'sasl', 'saslPassword': bucket_password if bucket_password else '', - 'flush_enabled': int(flush_enabled), + 'flushEnabled': int(flush_enabled), 'ramQuotaMB': ram_quota } if bucket_type in ('couchbase', 'membase'): From 020c3100d39ba2daeaf470f4b112b8bf8d736652 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 20 Jun 2016 15:28:05 -0700 Subject: [PATCH 057/829] PYCBC-348: Fix typo in SubdocResult.access_ok Change-Id: I0773132bf42927cecab59400083eb5132e4e2c6e Reviewed-on: http://review.couchbase.org/65073 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/result.py | 2 +- couchbase/tests/cases/subdoc_t.py | 15 ++++++++++++++- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/couchbase/result.py b/couchbase/result.py index dfa2b6ed5..fd5498e73 100644 --- a/couchbase/result.py +++ b/couchbase/result.py @@ -153,7 +153,7 @@ def access_ok(self): :return: True if the document is accessible, False otherwise """ - return self.rc == 0 or self.rc == C.LCB_SUBDOC_MULTI_FAILRUE + return self.rc == 0 or self.rc == C.LCB_SUBDOC_MULTI_FAILURE @property def value(self): diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index bb85fa9eb..732545cd6 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -248,4 +248,17 @@ def test_result_iter(self): self.assertFalse(vals.success) it = iter(vals) self.assertEqual(1, next(it)) - self.assertRaises(E.SubdocPathNotFoundError, next, it) \ No newline at end of file + self.assertRaises(E.SubdocPathNotFoundError, next, it) + + def test_access_ok(self): + cb = self.cb + key = self.gen_key('non-exist') + try: + cb.lookup_in(key, SD.get('pth1'), quiet=True) + except E.NotFoundError as e: + rv = e.all_results[key] + self.assertFalse(rv.access_ok) + + cb.upsert(key, {'hello': 'world'}) + rv = cb.lookup_in(key, SD.get('nonexist')) + self.assertTrue(rv.access_ok) \ No newline at end of file From a4e5108bac5d45f71d80ea018582a49f87256d8c Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 25 May 2016 15:53:55 -0700 Subject: [PATCH 058/829] Add SearchRequest.execute() This allows exhausting the iterator and can be used to do facet-only queries Change-Id: I497aa852a9864ff0dad14a34a867e5a036bc4847 Reviewed-on: http://review.couchbase.org/65072 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/fulltext.py | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index 63c0930a2..af36b7ffd 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -926,6 +926,23 @@ def _start(self): def raw(self): return self.__raw + def execute(self): + """ + Use this convenience method if you are not actually reading the + search hits, for example if you are only using :attr:`facets`. + + Equivalent to:: + + def execute(self): + [x for x in self] + return self + + :return: :class:`SearchRequest` (self) + """ + for _ in self: + pass + return self + @property def meta(self): """ From 2dc82f7987776bdd062ce2ce8ab0023ea66eea4a Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 20 Jun 2016 15:28:39 -0700 Subject: [PATCH 059/829] PYCBC-347: `fields` is not comma-delimited, but a list Change-Id: I8d8a453837eea70e85a0491ced0327a00d7a6ff6 Reviewed-on: http://review.couchbase.org/65074 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <will.gardner@couchbase.com> --- couchbase/_ixmgmt.py | 7 +------ couchbase/bucketmanager.py | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/couchbase/_ixmgmt.py b/couchbase/_ixmgmt.py index d28af292d..a9c19ccc3 100644 --- a/couchbase/_ixmgmt.py +++ b/couchbase/_ixmgmt.py @@ -53,12 +53,7 @@ def __init__(self, raw=None): keyspace = _genprop('keyspace_id') # type: str state = _genprop('state') # type: str condition = _genprop('condition') # type: str - _index_key = _genprop('index_key') # type: str - - @property - def fields(self): - if self._index_key: - return self._index_key.split(',') + fields = _genprop('index_key') # type: list[str] def __repr__(self): return ('Index<name={0.name}, primary={0.primary}, raw={0.raw!r}>' diff --git a/couchbase/bucketmanager.py b/couchbase/bucketmanager.py index f8fc50616..21c598750 100644 --- a/couchbase/bucketmanager.py +++ b/couchbase/bucketmanager.py @@ -339,7 +339,7 @@ def n1ql_index_create(self, ix, **kwargs): raise ValueError('Fields required for non-primary index') if fields: - info._index_key = ','.join(fields) + info.fields = fields if primary and info.name is N1QL_PRIMARY_INDEX: del info.name From e6ce458ae2e7dad9ade224b2797ee13ceb0febf8 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 1 Aug 2016 08:25:47 -0700 Subject: [PATCH 060/829] PYCBC-355: Add up-to-date documentation links to README Change-Id: I13aacb0727bee27c4b7d6c79d85725b5735b49ba Reviewed-on: http://review.couchbase.org/66362 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- README.rst | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/README.rst b/README.rst index 3b8f38cdf..8ad6ed5d2 100644 --- a/README.rst +++ b/README.rst @@ -262,13 +262,17 @@ To run the tests:: nosetests -------- -Support -------- +------------------------------ +Support & Additional Resources +------------------------------ + +If you found an issue, please file it in our JIRA_. +You can ask questions in our forums_ or in the `#libcouchbase` channel on +freenode_. -If you found an issue, please file it in our JIRA_. You may also ask in the -`#libcouchbase` IRC channel at freenode_. (which is where the author(s) -of this module may be found). +The `official documentation`_ can be consulted as well for +general Couchbase concepts and offers a more didactic approach to using the +SDK. ------- License @@ -277,7 +281,9 @@ License The Couchbase Python SDK is licensed under the Apache License 2.0. .. _Couchbase: http://couchbase.com -.. _libcouchbase: http://couchbase.com/develop/c/current +.. _libcouchbase: http://developer.couchbase.com/documentation/server/4.5/sdk/c/start-using-sdk.html +.. _official documentation: http://developer.couchbase.com/documentation/server/4.5/sdk/python/start-using-sdk.html .. _JIRA: http://couchbase.com/issues/browse/pycbc .. _freenode: http://freenode.net/irc_servers.shtml .. _pypi: http://pypi.python.org/pypi/couchbase +.. _forums: https://forums.couchbase.com From 791018cd2f26a724f961934335299f118740bbf4 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 30 Sep 2016 14:28:38 -0700 Subject: [PATCH 061/829] PYCBC-323: Datastructure support Change-Id: Ib4062826f2b4d9b6fed6253b2f594fcff0962b7a Reviewed-on: http://review.couchbase.org/68234 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 372 ++++++++++++++++++++++ couchbase/exceptions.py | 6 + couchbase/tests/cases/datastructures_t.py | 110 +++++++ docs/source/api/datastructures.rst | 89 ++++++ docs/source/index.rst | 1 + 5 files changed, 578 insertions(+) create mode 100644 couchbase/tests/cases/datastructures_t.py create mode 100644 docs/source/api/datastructures.rst diff --git a/couchbase/bucket.py b/couchbase/bucket.py index f52e0f8fe..7e7c3d6dd 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -31,6 +31,7 @@ from couchbase.n1ql import N1QLQuery, N1QLRequest import couchbase.fulltext as _FTS from couchbase._pyport import basestring +import couchbase.subdocument as SD def _depr(fn, usage, stacklevel=3): @@ -92,6 +93,34 @@ def __exit__(self, *args): return False +def _dsop(create_type=None, wrap_missing_path=True): + from couchbase.experimental import enabled_or_raise + import functools + + def real_decorator(fn): + @functools.wraps(fn) + def newfn(self, key, *args, **kwargs): + enabled_or_raise() + try: + return fn(self, key, *args, **kwargs) + except E.NotFoundError: + if kwargs.get('create'): + try: + self.insert(key, create_type()) + except E.KeyExistsError: + pass + return fn(self, key, *args, **kwargs) + else: + raise + except E.SubdocPathNotFoundError: + if wrap_missing_path: + raise IndexError(args[0]) + + return newfn + + return real_decorator + + class Bucket(_Base): def __init__(self, *args, **kwargs): """Connect to a bucket. @@ -1718,3 +1747,346 @@ def add_bucket_creds(self, bucket, password): if not bucket or not password: raise ValueError('Bucket and password must be nonempty') return _Base._add_creds(self, bucket, password) + + def _wrap_dsop(self, sdres, has_value=False): + from couchbase.items import Item + it = Item(sdres.key) + it.cas = sdres.cas + if has_value: + it.value = sdres[0] + return it + + @_dsop(create_type=dict) + def map_add(self, key, mapkey, value, create=False, **kwargs): + """ + Set a value for a key in a map. + + .. warning:: + + The functionality of the various `map_*`, `list_*`, `queue_*` + and `set_*` functions are considered experimental and are included + in the library to demonstrate new functionality. + They may change in the future or be removed entirely! + + These functions are all wrappers around the :meth:`mutate_in` or + :meth:`lookup_in` methods. + + :param key: The document ID of the map + :param mapkey: The key in the map to set + :param value: The value to use (anything serializable to JSON) + :param create: Whether the map should be created if it does not exist + :param kwargs: Additional arguments passed to :meth:`mutate_in` + :return: A :class:`~.OperationResult` + :raise: :cb_exc:`NotFoundError` if the document does not exist. + and `create` was not specified + + .. Initialize a map and add a value + + cb.upsert('a_map', {}) + cb.map_add('a_map', 'some_key', 'some_value') + cb.map_get('a_map', 'some_key').value # => 'some_value' + cb.get('a_map').value # => {'some_key': 'some_value'} + + """ + op = SD.upsert(mapkey, value) + sdres = self.mutate_in(key, op, **kwargs) + return self._wrap_dsop(sdres) + + @_dsop() + def map_get(self, key, mapkey): + """ + Retrieve a value from a map. + + :param str key: The document ID + :param str mapkey: Key within the map to retrieve + :return: :class:`~.ValueResult` + :raise: :exc:`IndexError` if the mapkey does not exist + :raise: :cb_exc:`NotFoundError` if the document does not exist. + + .. seealso:: :meth:`map_add` for an example + """ + op = SD.get(mapkey) + sdres = self.lookup_in(key, op) + return self._wrap_dsop(sdres, True) + + @_dsop() + def map_remove(self, key, mapkey, **kwargs): + """ + Remove an item from a map. + + :param str key: The document ID + :param str mapkey: The key in the map + :param kwargs: See :meth:`mutate_in` for options + :raise: :exc:`IndexError` if the mapkey does not exist + :raise: :cb_exc:`NotFoundError` if the document does not exist. + + .. Remove a map key-value pair: + + cb.map_remove('a_map', 'some_key') + + .. seealso:: :meth:`map_add` + """ + op = SD.remove(mapkey) + sdres = self.mutate_in(key, op, **kwargs) + return self._wrap_dsop(sdres) + + @_dsop() + def map_length(self, key): + """ + Get the number of items in the map. + + :param str key: The document ID of the map + :return int: The number of items in the map + :raise: :cb_exc:`NotFoundError` if the document does not exist. + + .. seealso:: :meth:`map_add` + """ + # TODO: This should use get_count, but we need to check for compat + # with server version (i.e. >= 4.6) first; otherwise it just + # disconnects. + + rv = self.get(key) + return len(rv.value) + + @_dsop(create_type=list) + def list_append(self, key, value, create=False, **kwargs): + """ + Add an item to the end of a list. + + :param str key: The document ID of the list + :param value: The value to append + :param create: Whether the list should be created if it does not + exist. Note that this option only works on servers >= 4.6 + :param kwargs: Additional arguments to :meth:`mutate_in` + :return: :class:`~.OperationResult`. + :raise: :cb_exc:`NotFoundError` if the document does not exist. + and `create` was not specified. + + example:: + + cb.list_append('a_list', 'hello') + cb.list_append('a_list', 'world') + + .. seealso:: :meth:`map_add` + """ + op = SD.array_append('', value) + sdres = self.mutate_in(key, op, **kwargs) + return self._wrap_dsop(sdres) + + @_dsop(create_type=list) + def list_prepend(self, key, value, create=False, **kwargs): + """ + Add an item to the beginning of a list. + + :param str key: Document ID + :param value: Value to prepend + :param bool create: + Whether the list should be created if it does not exist + :param kwargs: Additional arguments to :meth:`mutate_in`. + :return: :class:`OperationResult`. + :raise: :cb_exc:`NotFoundError` if the document does not exist. + and `create` was not specified. + + This function is identical to :meth:`list_append`, except for prepending + rather than appending the item + + .. seealso:: :meth:`list_append`, :meth:`map_add` + """ + op = SD.array_prepend('', value) + sdres = self.mutate_in(key, op, **kwargs) + return self._wrap_dsop(sdres) + + @_dsop() + def list_insert(self, key, index, value, **kwargs): + """ + Insert an item within a list at a given position. + + :param key: The key of the document + :param index: The position at which the item should be inserted + :param value: The value to be inserted + :param kwargs: Additional arguments to :meth:`mutate_in` + :return: :class:`OperationResult` + :raise: :cb_exc:`NotFoundError` if the list does not exist + :raise: :exc:`IndexError` if the index is out of bounds + + example:: + + cb.upsert('a_list', ['hello', 'world']) + cb.list_insert('a_list', 1, 'good') + cb.get('a_list').value # => ['hello', 'good', 'world'] + + .. seealso:: :meth:`map_add`, :meth:`list_append` + """ + op = SD.array_insert('[{0}]'.format(index), value) + sdres = self.mutate_in(key, op, **kwargs) + return self._wrap_dsop(sdres) + + @_dsop(create_type=list) + def set_add(self, key, value, create=False, **kwargs): + """ + Add an item to a set if the item does not yet exist. + + :param key: The document ID + :param value: Value to add + :param create: Create the set if it does not exist + :param kwargs: Arguments to :meth:`mutate_in` + :return: A :class:`~.OperationResult` if the item was added, + :raise: :cb_exc:`NotFoundError` if the document does not exist + and `create` was not specified. + + .. seealso:: :meth:`map_add` + """ + op = SD.array_addunique('', value) + try: + sdres = self.mutate_in(key, op, **kwargs) + return self._wrap_dsop(sdres) + except E.SubdocPathExistsError: + pass + + @_dsop() + def set_remove(self, key, value, **kwargs): + """ + Remove an item from a set. + + :param key: The docuent ID + :param value: Value to remove + :param kwargs: Arguments to :meth:`mutate_in` + :return: A :class:`OperationResult` if the item was removed, false + otherwise + :raise: :cb_exc:`NotFoundError` if the set does not exist. + + .. seealso:: :meth:`set_add`, :meth:`map_add` + """ + while True: + rv = self.get(key) + try: + ix = rv.value.index(value) + kwargs['cas'] = rv.cas + return self.list_remove(key, ix, **kwargs) + except E.KeyExistsError: + pass + except ValueError: + return + + def set_length(self, key): + """ + Get the length of a set. + + :param key: The document ID of the set + :return: The length of the set + :raise: :cb_exc:`NotFoundError` if the set does not exist. + + """ + return self.list_length(key) + + def set_exists(self, key, value): + """ + Determine if an item exists in a set + :param key: The document ID of the set + :param value: The value to check for + :return: True if `value` exists in the set + :raise: :cb_exc:`NotFoundError` if the document does not exist + """ + rv = self.get(key) + return value in rv.value + + @_dsop() + def list_get(self, key, index): + """ + Get a specific element within a list. + + :param key: The document ID + :param index: The index to retrieve + :return: :class:`ValueResult` for the element + :raise: :exc:`IndexError` if the index does not exist + :raise: :cb_exc:`NotFoundError` if the list does not exist + """ + return self.map_get(key, '[{0}]'.format(index)) + + @_dsop() + def list_remove(self, key, index, **kwargs): + """ + Remove the element at a specific index from a list. + + :param key: The document ID of the list + :param index: The index to remove + :param kwargs: Arguments to :meth:`mutate_in` + :return: :class:`OperationResult` + :raise: :exc:`IndexError` if the index does not exist + :raise: :cb_exc:`NotFoundError` if the list does not exist + + """ + return self.map_remove(key, '[{0}]'.format(index), **kwargs) + + @_dsop() + def list_length(self, key): + """ + Retrieve the number of elements in the list. + + :param key: The document ID of the list + :return: The number of elements within the list + :raise: :cb_exc:`NotFoundError` if the list does not exist + + """ + return self.map_length(key) + + @_dsop(create_type=list) + def queue_push(self, key, value, create=False, **kwargs): + """ + Add an item to the end of a queue. + + :param key: The document ID of the queue + :param value: The item to add to the queue + :param create: Whether the queue should be created if it does not exist + :param kwargs: Arguments to pass to :meth:`mutate_in` + :return: :class:`OperationResult` + :raise: :cb_exc:`NotFoundError` if the queue does not exist and + `create` was not specified. + + example:: + + # Ensure it's removed first + + cb.remove('a_queue') + cb.queue_push('a_queue', 'job9999', create=True) + cb.queue_pop('a_queue').value # => job9999 + + """ + return self.list_prepend(key, value, **kwargs) + + @_dsop() + def queue_pop(self, key, **kwargs): + """ + Remove and return the first item queue. + + :param key: The document ID + :param kwargs: Arguments passed to :meth:`mutate_in` + :return: A :class:`ValueResult` + :raise: :cb_exc:`QueueEmpty` if there are no items in the queue. + :raise: :cb_exc:`NotFoundError` if the queue does not exist. + """ + while True: + try: + itm = self.list_get(key, -1) + except IndexError: + raise E.QueueEmpty + + kwargs['cas'] = itm.cas + try: + self.list_remove(key, -1, **kwargs) + return itm + except E.KeyExistsError: + pass + except IndexError: + raise E.QueueEmpty + + @_dsop() + def queue_length(self, key): + """ + Get the length of the queue. + + :param key: The document ID of the queue + :return: The length of the queue + :raise: :cb_exc:`NotFoundError` if the queue does not exist. + """ + return self.list_length(key) \ No newline at end of file diff --git a/couchbase/exceptions.py b/couchbase/exceptions.py index dc2bb2e79..f16ef3784 100644 --- a/couchbase/exceptions.py +++ b/couchbase/exceptions.py @@ -642,3 +642,9 @@ def exc_from_rc(rc, msg=None, obj=None): """ newcls = CouchbaseError.rc_to_exctype(rc) return newcls(params={'rc': rc, 'objextra': obj, 'message': msg}) + + +class QueueEmpty(Exception): + """ + Thrown if a datastructure queue is empty + """ \ No newline at end of file diff --git a/couchbase/tests/cases/datastructures_t.py b/couchbase/tests/cases/datastructures_t.py new file mode 100644 index 000000000..88250f388 --- /dev/null +++ b/couchbase/tests/cases/datastructures_t.py @@ -0,0 +1,110 @@ +# +# Copyright 2016, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from couchbase.tests.base import ConnectionTestCase, SkipTest +import couchbase.exceptions as E +import couchbase.experimental; couchbase.experimental.enable() + + +class DatastructureTest(ConnectionTestCase): + def test_map(self): + key = self.gen_key('dsmap') + self.cb.remove(key, quiet=True) + + cb = self.cb + self.assertRaises(E.NotFoundError, cb.map_add, key, 'key1', 'val1') + rv = cb.map_add(key, 'key1', 'val1', create=True) + self.assertTrue(rv.success) + self.assertNotEquals(0, rv.cas) + + rv = cb.map_get(key, 'key1') + self.assertEqual('val1', rv.value) + + self.assertEqual(1, cb.map_length(key)) + + self.assertRaises(IndexError, cb.map_remove, key, 'key2') + rv = cb.map_remove(key, 'key1') + self.assertTrue(rv.success) + self.assertEqual(0, cb.map_length(key)) + + def test_list(self): + key = self.gen_key('dslist') + self.cb.remove(key, quiet=True) + + cb = self.cb + + self.assertRaises(E.NotFoundError, cb.list_append, key, 'hello') + rv = self.cb.list_append(key, 'hello', create=True) + self.assertTrue(rv.success) + self.assertNotEquals(0, rv.cas) + + rv = cb.list_get(key, 0) + self.assertEqual('hello', rv.value) + + rv = cb.list_prepend(key, 'before') + self.assertTrue(rv.success) + self.assertEqual('before', cb.list_get(key, 0).value) + self.assertEqual('hello', cb.list_get(key, 1).value) + self.assertEqual(2, cb.list_length(key)) + + # Insert value + rv = cb.list_insert(key, 1, 'between') + self.assertTrue(rv.success) + self.assertEqual(['before', 'between', 'hello'], cb.get(key).value) + + rv = cb.list_remove(key, 1) + self.assertTrue(rv.success) + self.assertEqual(['before', 'hello'], cb.get(key).value) + + def test_sets(self): + key = self.gen_key('dsset') + self.cb.remove(key, quiet=True) + cb = self.cb + + self.assertRaises(E.NotFoundError, cb.set_add, key, 123) + rv = cb.set_add(key, 123, create=True) + self.assertTrue(rv.success) + rv = cb.set_add(key, 123) + self.assertFalse(rv) + self.assertEqual(1, cb.set_length(key)) + self.assertTrue(cb.set_exists(key, 123)) + + rv = cb.set_remove(key, 123) + self.assertTrue(rv.success) + self.assertEqual(0, cb.set_length(key)) + rv = cb.set_remove(key, 123) + self.assertFalse(rv) + self.assertFalse(cb.set_exists(key, 123)) + + def test_queue(self): + key = self.gen_key('a_queue') + self.cb.remove(key, quiet=True) + + cb = self.cb + self.assertRaises(E.NotFoundError, cb.queue_push, key, 1) + rv = cb.queue_push(key, 1, create=True) + self.assertTrue(rv.success) + cb.queue_push(key, 2) + cb.queue_push(key, 3) + + # Pop the items now + self.assertEqual(1, cb.queue_pop(key).value) + self.assertEqual(2, cb.queue_pop(key).value) + self.assertEqual(3, cb.queue_pop(key).value) + self.assertRaises(E.QueueEmpty, cb.queue_pop, key) + + diff --git a/docs/source/api/datastructures.rst b/docs/source/api/datastructures.rst new file mode 100644 index 000000000..a1ec0c40c --- /dev/null +++ b/docs/source/api/datastructures.rst @@ -0,0 +1,89 @@ +.. module:: couchbase.datastructures + + +================== +Datastructures API +================== + +.. currentmodule:: couchbase.bucket + +.. versionadded:: 2.1.1 + +The datastructure API is a new experimental API added which allows your +application to view Couchbase documents as common data structures such as +lists, maps, and queues. + +Datastructure operations are implemented largely using +:mod:`couchbase.subdocument` operations, and also carry with some more +efficiency. + +.. warning:: + + The data structure API is currently experimental. The API may change + or be removed entirely. + + +In order to use this API you must first enable experimental functionality: + +.. code-block:: python + + import couchbase.experimental + couchbase.experimental.enable() + + +Couchbase datatypes are still JSON underneath, and use the +:meth:`couchbase.bucket.Bucket.mutate_in` and +:meth:`couchbase.bucket.Bucket.lookup_in` methods to access the structures +themselves. + +The datastructures are: + +- Map: This is similar to a Python `dict`. It is represented as a JSON object + (i.e. ``{}``). Maps can be manipulated using the `map_*` methods below. + +- List: This is similar to a Python `list`. It is represented as a JSON array + (``[]``). Lists can be accessed using the `list_*` methods below. + +- Set: This is like a List, but also contains methods allowing to avoid inserting + duplicates. All List methods may be used on Sets, and vice versa. Sets can + be accessed using the `set_*` methods below, as well as various `list_*` + methods. + +- Queues: This is like a list, but also features a ``queue_pop`` method + which can make it suitable for a light weight FIFO queue. + You can access queues using the `queue_*` methods below. + + +.. note:: + + Datastructures are just JSON documents and can be accessed using any of the + full-document (e.g. :meth:`~.Bucket.upsert` and :meth:`~.Bucket.get`) or + sub-document (e.g. :meth:`~.Bucket.lookup_in` and :meth:`~.Bucket.mutate_in`) + methods. + + +.. currentmodule:: couchbase.bucket + +.. class:: Bucket + :noindex: + + .. automethod:: map_add + .. automethod:: map_get + .. automethod:: map_length + .. automethod:: map_remove + + .. automethod:: list_append + .. automethod:: list_prepend + .. automethod:: list_insert + .. automethod:: list_get + .. automethod:: list_remove + .. automethod:: list_length + + + .. automethod:: set_length + .. automethod:: set_add + .. automethod:: set_remove + .. automethod:: set_exists + + .. automethod:: queue_push + .. automethod:: queue_pop diff --git a/docs/source/index.rst b/docs/source/index.rst index 77086169e..f31e1a17c 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -13,6 +13,7 @@ Contents: api/couchbase api/subdoc + api/datastructures api/views api/n1ql api/fulltext From 42de6573f2c504726fc9d8ff6ea21a10f347d0d4 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 19 Oct 2016 13:33:48 -0700 Subject: [PATCH 062/829] PYCBC-361: Add `design_list`. This allows to list all design documents of the bucket by using `bucket.bucket_manager().design_list()`. Change-Id: I4225c58352aede61768da892d414ee8949e65d6a Reviewed-on: http://review.couchbase.org/68950 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/bucketmanager.py | 55 ++++++++++++++++++++++++++++++++++- docs/source/api/couchbase.rst | 1 + 2 files changed, 55 insertions(+), 1 deletion(-) diff --git a/couchbase/bucketmanager.py b/couchbase/bucketmanager.py index 21c598750..7e6fa5689 100644 --- a/couchbase/bucketmanager.py +++ b/couchbase/bucketmanager.py @@ -203,7 +203,7 @@ def design_get(self, name, use_devmode=True): :raise: :exc:`couchbase.exceptions.HTTPError` if the design does not exist. - .. seealso:: :meth:`design_create` + .. seealso:: :meth:`design_create`, :meth:`design_list` """ name = self._mk_devmode(name, use_devmode) @@ -283,6 +283,59 @@ def design_delete(self, name, use_devmode=True, syncwait=0): self._design_poll(name, 'del', existing, syncwait) return ret + def design_list(self): + """ + List all design documents for the current bucket. + + :return: A :class:`~couchbase.result.HttpResult` containing + a dict, with keys being the ID of the design document. + + .. note:: + + This information is derived using the + ``pools/default/buckets/<bucket>ddocs`` endpoint, but the return + value has been modified to match that of :meth:`design_get`. + + .. note:: + + This function returns both 'production' and 'development' mode + views. These two can be distinguished by the name of the + design document being prefixed with the ``dev_`` identifier. + + The keys of the dict in ``value`` will be of the form + ``_design/<VIEWNAME>`` where ``VIEWNAME`` may either be e.g. + ``foo`` or ``dev_foo`` depending on whether ``foo`` is a + production or development mode view. + + :: + + for name, ddoc in mgr.design_list().value.items(): + if name.startswith('_design/dev_'): + print "Development view!" + else: + print "Production view!" + + Example:: + + for name, ddoc in mgr.design_list().value.items(): + print 'Design name {0}. Contents {1}'.format(name, ddoc) + + .. seealso:: :meth:`design_get` + + """ + ret = self._http_request( + type=_LCB.LCB_HTTP_TYPE_MANAGEMENT, + path="/pools/default/buckets/{0}/ddocs".format(self._cb.bucket), + method=_LCB.LCB_HTTP_METHOD_GET) + + real_rows = {r['doc']['meta']['id']: r['doc']['json'] + for r in ret.value['rows']} + + # Can't use normal assignment because 'value' is read-only + ret.value.clear() + ret.value.update(real_rows) + return ret + def _mk_index_def(self, ix, primary=False): if isinstance(ix, N1qlIndex): return N1qlIndex(ix) diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index 7525dbaa6..7360dfaba 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -355,6 +355,7 @@ the :meth:`~couchbase.bucket.Bucket.bucket_manager` method on the .. automethod:: design_get .. automethod:: design_publish .. automethod:: design_delete + .. automethod:: design_list N1QL Index Management ===================== From 353d14e9ab6c00f6345ff1591cbe1ec11024da98 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 18 Oct 2016 15:01:35 -0700 Subject: [PATCH 063/829] PYCBC-362: Upgrade version requirement to 2.6.0 Change-Id: If61606f78f52d6579b011a011732efee17e24380 Reviewed-on: http://review.couchbase.org/68946 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <willg@rdner.io> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- README.rst | 2 +- src/pycbc.h | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index 8ad6ed5d2..2a5e057b1 100644 --- a/README.rst +++ b/README.rst @@ -38,7 +38,7 @@ Prerequisites ~~~~~~~~~~~~~ - Couchbase Server (http://couchbase.com/download) -- libcouchbase_. version 2.4.7 or greater (Bundled in Windows installer) +- libcouchbase_. version 2.6.0 or greater (Bundled in Windows installer) - libcouchbase development files. - Python development files - A C compiler. diff --git a/src/pycbc.h b/src/pycbc.h index 629e69624..d08ef3bc0 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -28,8 +28,8 @@ #include <libcouchbase/n1ql.h> #include <libcouchbase/cbft.h> #include <libcouchbase/ixmgmt.h> -#if LCB_VERSION < 0x020508 -#error "Couchbase Python SDK requires libcouchbase 2.5.8 or greater" +#if LCB_VERSION < 0x020600 +#error "Couchbase Python SDK requires libcouchbase 2.6.0 or greater" #endif #include <pythread.h> From e959f04f1b235cd82e72da6884285c7eaec4c5d7 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 19 Oct 2016 08:45:57 -0700 Subject: [PATCH 064/829] PYCBC-322: Allow cross-bucket ("Cluster") queries Change-Id: I0889ba2e87fbee13de45823f3ba84c84235c5e08 Reviewed-on: http://review.couchbase.org/68947 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <willg@rdner.io> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/n1ql.py | 21 +++++++++++++++++++-- docs/source/api/n1ql.rst | 1 + src/n1ql.c | 9 ++++++--- 3 files changed, 26 insertions(+), 5 deletions(-) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index d52bedb38..f4354e584 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -241,6 +241,7 @@ def __init__(self, query, *args, **kwargs): """ self._adhoc = True + self._cross_bucket = False self._body = {'statement': query} if args: self._add_pos_args(*args) @@ -336,6 +337,20 @@ def adhoc(self): def adhoc(self, arg): self._adhoc = arg + @property + def cross_bucket(self): + """ + Set this to true to indicate that the query string involves multiple + buckets. This makes the query a "cluster-level" query. Cluster level + queries have access to documents in multiple buckets, using credentials + supplied via :meth:`.Bucket.add_bucket_creds` + """ + return self._cross_bucket + + @cross_bucket.setter + def cross_bucket(self, value): + self._cross_bucket = value + @property def timeout(self): """ @@ -416,8 +431,10 @@ def _start(self): if self._mres: return - self._mres = self._parent._n1ql_query(self._params.encoded, - not self._params.adhoc) + self._mres = self._parent._n1ql_query( + self._params.encoded, not self._params.adhoc, + cross_bucket=self._params.cross_bucket) + self.__raw = self._mres[None] @property diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index 304d67de8..0c34c3424 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -13,6 +13,7 @@ N1QL Queries .. autoattribute:: encoded .. autoattribute:: adhoc .. autoattribute:: timeout + .. autoattribute:: cross_bucket .. autodata:: CONSISTENCY_NONE .. autodata:: CONSISTENCY_REQUEST diff --git a/src/n1ql.c b/src/n1ql.c index 081743c03..0e1213539 100644 --- a/src/n1ql.c +++ b/src/n1ql.c @@ -48,11 +48,11 @@ pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) lcb_CMDN1QL cmd = { 0 }; const char *params; pycbc_strlen_t nparams; - int prepared = 0; + int prepared = 0, cross_bucket = 0; - static char *kwlist[] = { "params", "prepare", NULL }; + static char *kwlist[] = { "params", "prepare", "cross_bucket", NULL }; rv = PyArg_ParseTupleAndKeywords( - args, kwargs, "s#|i", kwlist, ¶ms, &nparams, &prepared); + args, kwargs, "s#|ii", kwlist, ¶ms, &nparams, &prepared, &cross_bucket); if (!rv) { PYCBC_EXCTHROW_ARGS(); @@ -82,6 +82,9 @@ pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) if (prepared) { cmd.cmdflags |= LCB_CMDN1QL_F_PREPCACHE; } + if (cross_bucket) { + cmd.cmdflags |= LCB_CMD_F_MULTIAUTH; + } rc = lcb_n1ql_query(self->instance, mres, &cmd); if (rc != LCB_SUCCESS) { From a42ae7ee525547ea17c89771dc33630215b670f7 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 24 Oct 2016 09:19:33 -0700 Subject: [PATCH 065/829] PYCBC-363: Use REQUEST_PLUS instead of CONSISTENCY_REQUEST Change-Id: I6cc0daeb3c379b7bb4caa1f0618cf18355b30cd4 Reviewed-on: http://review.couchbase.org/69146 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/n1ql.py | 18 +++++++++++------- docs/source/api/n1ql.rst | 4 ++-- 2 files changed, 13 insertions(+), 9 deletions(-) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index f4354e584..269aea6a2 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -28,19 +28,23 @@ def n1ql_errcode(self): return self.objextra['code'] -CONSISTENCY_REQUEST = 'request_plus' +STATEMENT_PLUS = 'statement_plus' + +REQUEST_PLUS = 'request_plus' """ For use with :attr:`~.N1QLQuery.consistency`, will ensure that query results always reflect the latest data in the server """ - -CONSISTENCY_NONE = 'none' +UNBOUNDED = 'none' """ For use with :attr:`~.N1QLQuery.consistency`, will allow cached values to be returned. This will improve performance but may not reflect the latest data in the server. """ +CONSISTENCY_REQUEST = REQUEST_PLUS +CONSISTENCY_NONE = UNBOUNDED + class MissingTokenError(CouchbaseError): pass @@ -58,7 +62,7 @@ class MutationState(object): mutations. Using `consistent_with` is similar to setting - :attr:`~.N1QLQuery.consistency` to :data:`CONSISTENCY_REQUEST`, + :attr:`~.N1QLQuery.consistency` to :data:`REQUEST_PLUS`, but is more optimal as the query will use cached data, *except* when the given mutation(s) are concerned. This option is useful for use patterns when an application has just performed a mutation, @@ -292,9 +296,9 @@ def consistency(self): """ Sets the consistency level. - :see: :data:`CONSISTENCY_NONE`, :data:`CONSISTENCY_REQUEST` + :see: :data:`UNBOUNDED`, :data:`REQUEST_PLUS` """ - return self._body.get('scan_consistency', CONSISTENCY_NONE) + return self._body.get('scan_consistency', UNBOUNDED) @consistency.setter def consistency(self, value): @@ -308,7 +312,7 @@ def consistent_with(self, state): :param state: The state of the mutations it should be consistent with. """ - if self.consistency not in (CONSISTENCY_NONE, 'at_plus'): + if self.consistency not in (UNBOUNDED, 'at_plus'): raise TypeError( 'consistent_with not valid with other consistency options') diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index 0c34c3424..0879870fd 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -15,8 +15,8 @@ N1QL Queries .. autoattribute:: timeout .. autoattribute:: cross_bucket -.. autodata:: CONSISTENCY_NONE -.. autodata:: CONSISTENCY_REQUEST +.. autodata:: UNBOUNDED +.. autodata:: REQUEST_PLUS .. autoclass:: MutationState From a3412f5c3c3381f073296d75219452d6f390ff70 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 24 Oct 2016 09:13:56 -0700 Subject: [PATCH 066/829] don't use dict comprehension (unsupp. on 2.6) Change-Id: I139ba7e7aef5061745046d148430421d331810a8 Reviewed-on: http://review.couchbase.org/69145 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <willg@rdner.io> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/bucketmanager.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/couchbase/bucketmanager.py b/couchbase/bucketmanager.py index 7e6fa5689..5bbad65f5 100644 --- a/couchbase/bucketmanager.py +++ b/couchbase/bucketmanager.py @@ -328,8 +328,9 @@ def design_list(self): path="/pools/default/buckets/{0}/ddocs".format(self._cb.bucket), method=_LCB.LCB_HTTP_METHOD_GET) - real_rows = {r['doc']['meta']['id']: r['doc']['json'] - for r in ret.value['rows']} + real_rows = {} + for r in ret.value['rows']: + real_rows[r['doc']['meta']['id']] = r['doc']['json'] # Can't use normal assignment because 'value' is read-only ret.value.clear() From e90c1b3b19e7944ee47cb82ef6f903ba3c86c04f Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 25 Oct 2016 14:02:02 -0700 Subject: [PATCH 067/829] PYCBC-351: Clarify type constraints for value for upsert et. al Change-Id: I055a1d762bb977e2421881f0895943a914dae050 Reviewed-on: http://review.couchbase.org/69215 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/bucket.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 7e7c3d6dd..c87e08cd1 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -325,8 +325,14 @@ def upsert(self, key, value, cas=0, ttl=0, format=None, to the transcoder, which may serialize it how it wishes. :type key: string or bytes - :param value: The value to set for the key. The type for `value` - follows the same rules as for `key` + :param value: The value to set for the key. + This should be a native Python value which will be transparently + serialized to JSON by the library. Do not pass already-serialized + JSON as the value or it will be serialized again. + + If you are using a different `format` setting (see `format` + parameter), and/or a custom transcoder then value for this + argument may need to conform to different criteria. :param int cas: The _CAS_ value to use. If supplied, the value will only be stored if it already exists with the supplied From ff344ed94079a1c5cb6c5403a6ae71499c899586 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 8 Nov 2016 11:14:06 -0800 Subject: [PATCH 068/829] fts: Don't rethrow invalid arguments Python will raise an error if arguments fail anyway Change-Id: I8f480cd3dc045629fe36e11bae8a4303a5f38433 Reviewed-on: http://review.couchbase.org/69714 Reviewed-by: Will Gardner <willg@rdner.io> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/fts.c | 1 - 1 file changed, 1 deletion(-) diff --git a/src/fts.c b/src/fts.c index 2fa6f12e4..faafbd160 100644 --- a/src/fts.c +++ b/src/fts.c @@ -54,7 +54,6 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) "s#", kwlist, ¶ms, &nparams); if (!rv) { - PYCBC_EXCTHROW_ARGS(); return NULL; } if (-1 == pycbc_oputil_conn_lock(self)) { From df786d4e816e5344df65b47fdfecbe4169c65b7e Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 15 Nov 2016 17:15:56 -0800 Subject: [PATCH 069/829] PYCBC-366: Don't crash on get_multi w/item options Change-Id: I8d4163e65f89926360d18d224bdfdce9f5a94fe5 Reviewed-on: http://review.couchbase.org/69932 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/tests/cases/itmops_t.py | 4 ++++ src/get.c | 8 ++++++++ 2 files changed, 12 insertions(+) diff --git a/couchbase/tests/cases/itmops_t.py b/couchbase/tests/cases/itmops_t.py index 74759afad..2f9cbc1f3 100644 --- a/couchbase/tests/cases/itmops_t.py +++ b/couchbase/tests/cases/itmops_t.py @@ -193,3 +193,7 @@ def _find_item(key): self.assertEqual(1, len(bar_options)) self.assertEqual(-1, bar_options['replicate_to']) + def test_pycbc366(self): + itcoll = ItemOptionDict() + itcoll.create_and_add('foo', replica=True) + self.assertRaises(ArgumentError, self.cb.get_multi, itcoll) \ No newline at end of file diff --git a/src/get.c b/src/get.c index b47b71d8f..fb75428b0 100644 --- a/src/get.c +++ b/src/get.c @@ -72,6 +72,14 @@ handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, goto GT_DONE; } + /* Note, options only comes when ItemOptionsCollection and friends + * are used. When this is in effect, options is the options for the + * current item, and value is NULL. + */ + if (!curval) { + curval = options; + } + if (PyDict_Check(curval)) { rv = PyArg_ParseTupleAndKeywords(pycbc_DummyTuple, curval, "|O", kwlist, &ttl_O); From 220de5ee34522890a92418d624fbe9097225fff6 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 8 Nov 2016 11:23:07 -0800 Subject: [PATCH 070/829] fts: don't assume params are ascii encoded Change-Id: I00fcae62c3c0865cd6ae894917aacea963b1bbdb Reviewed-on: http://review.couchbase.org/69715 Reviewed-by: Will Gardner <willg@rdner.io> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- src/fts.c | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/src/fts.c b/src/fts.c index faafbd160..0ead5a8e5 100644 --- a/src/fts.c +++ b/src/fts.c @@ -46,16 +46,20 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) pycbc_ViewResult *vres; lcb_error_t rc; lcb_CMDFTS cmd = { 0 }; - const char *params; - pycbc_strlen_t nparams; + pycbc_pybuffer buf = { 0 }; + PyObject *params_o = NULL; static char *kwlist[] = { "params", NULL }; - rv = PyArg_ParseTupleAndKeywords(args, kwargs, - "s#", kwlist, ¶ms, &nparams); + rv = PyArg_ParseTupleAndKeywords(args, kwargs, "O", kwlist, ¶ms_o); if (!rv) { return NULL; } + + if (pycbc_tc_simple_encode(params_o, &buf, PYCBC_FMT_UTF8) != 0) { + return NULL; + } + if (-1 == pycbc_oputil_conn_lock(self)) { return NULL; } @@ -72,10 +76,11 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) vres->base.htype = PYCBC_HTTP_HFTS; cmd.callback = fts_row_callback; - cmd.query = params; - cmd.nquery = nparams; + cmd.query = buf.buffer; + cmd.nquery = buf.length; cmd.handle = &vres->base.u.fts; rc = lcb_fts_query(self->instance, mres, &cmd); + PYCBC_PYBUF_RELEASE(&buf); if (rc != LCB_SUCCESS) { PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, rc, "Couldn't schedule fts query"); From 01ae2f62d32f3fd4a6bf9f86f9390c59e64822a8 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Wed, 23 Nov 2016 08:59:52 -0800 Subject: [PATCH 071/829] Rename data structure APIs to conform to RFC Change-Id: If2d93d07a39e1e5231712fc223b7f0d2daf9e2ff Reviewed-on: http://review.couchbase.org/70301 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 25 +++++++++++-------------- docs/source/api/datastructures.rst | 11 ++++++----- 2 files changed, 17 insertions(+), 19 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index c87e08cd1..e5b663f60 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1837,7 +1837,7 @@ def map_remove(self, key, mapkey, **kwargs): return self._wrap_dsop(sdres) @_dsop() - def map_length(self, key): + def map_size(self, key): """ Get the number of items in the map. @@ -1903,12 +1903,12 @@ def list_prepend(self, key, value, create=False, **kwargs): return self._wrap_dsop(sdres) @_dsop() - def list_insert(self, key, index, value, **kwargs): + def list_set(self, key, index, value, **kwargs): """ - Insert an item within a list at a given position. + Sets an item within a list at a given position. :param key: The key of the document - :param index: The position at which the item should be inserted + :param index: The position to replace :param value: The value to be inserted :param kwargs: Additional arguments to :meth:`mutate_in` :return: :class:`OperationResult` @@ -1918,12 +1918,12 @@ def list_insert(self, key, index, value, **kwargs): example:: cb.upsert('a_list', ['hello', 'world']) - cb.list_insert('a_list', 1, 'good') - cb.get('a_list').value # => ['hello', 'good', 'world'] + cb.list_set('a_list', 1, 'good') + cb.get('a_list').value # => ['hello', 'good'] .. seealso:: :meth:`map_add`, :meth:`list_append` """ - op = SD.array_insert('[{0}]'.format(index), value) + op = SD.replace('[{0}]'.format(index), value) sdres = self.mutate_in(key, op, **kwargs) return self._wrap_dsop(sdres) @@ -1974,7 +1974,7 @@ def set_remove(self, key, value, **kwargs): except ValueError: return - def set_length(self, key): + def set_size(self, key): """ Get the length of a set. @@ -1985,7 +1985,7 @@ def set_length(self, key): """ return self.list_length(key) - def set_exists(self, key, value): + def set_contains(self, key, value): """ Determine if an item exists in a set :param key: The document ID of the set @@ -2020,19 +2020,17 @@ def list_remove(self, key, index, **kwargs): :return: :class:`OperationResult` :raise: :exc:`IndexError` if the index does not exist :raise: :cb_exc:`NotFoundError` if the list does not exist - """ return self.map_remove(key, '[{0}]'.format(index), **kwargs) @_dsop() - def list_length(self, key): + def list_size(self, key): """ Retrieve the number of elements in the list. :param key: The document ID of the list :return: The number of elements within the list :raise: :cb_exc:`NotFoundError` if the list does not exist - """ return self.map_length(key) @@ -2056,7 +2054,6 @@ def queue_push(self, key, value, create=False, **kwargs): cb.remove('a_queue') cb.queue_push('a_queue', 'job9999', create=True) cb.queue_pop('a_queue').value # => job9999 - """ return self.list_prepend(key, value, **kwargs) @@ -2087,7 +2084,7 @@ def queue_pop(self, key, **kwargs): raise E.QueueEmpty @_dsop() - def queue_length(self, key): + def queue_size(self, key): """ Get the length of the queue. diff --git a/docs/source/api/datastructures.rst b/docs/source/api/datastructures.rst index a1ec0c40c..af3cae1a2 100644 --- a/docs/source/api/datastructures.rst +++ b/docs/source/api/datastructures.rst @@ -69,21 +69,22 @@ The datastructures are: .. automethod:: map_add .. automethod:: map_get - .. automethod:: map_length + .. automethod:: map_size .. automethod:: map_remove .. automethod:: list_append .. automethod:: list_prepend - .. automethod:: list_insert + .. automethod:: list_set .. automethod:: list_get .. automethod:: list_remove - .. automethod:: list_length + .. automethod:: list_size - .. automethod:: set_length + .. automethod:: set_size .. automethod:: set_add .. automethod:: set_remove - .. automethod:: set_exists + .. automethod:: set_contains .. automethod:: queue_push .. automethod:: queue_pop + .. automethod:: queue_size From 1aa78f1559fe2407d664b7d5fd1f885359750147 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 1 Dec 2016 16:05:15 -0800 Subject: [PATCH 072/829] Data structure renaming fixes The previous commit didn't pass tests. This commit fixes these issues. Tests pass, and some bugs were fixed from the refactor Change-Id: I887a0983cb40f4eb369c45f39904dd547ebbe2bc Reviewed-on: http://review.couchbase.org/70541 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 13 ++++++++--- couchbase/tests/cases/datastructures_t.py | 27 +++++++++++++---------- 2 files changed, 25 insertions(+), 15 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index e5b663f60..add7f46fe 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1983,7 +1983,7 @@ def set_size(self, key): :raise: :cb_exc:`NotFoundError` if the set does not exist. """ - return self.list_length(key) + return self.list_size(key) def set_contains(self, key, value): """ @@ -2032,7 +2032,7 @@ def list_size(self, key): :return: The number of elements within the list :raise: :cb_exc:`NotFoundError` if the list does not exist """ - return self.map_length(key) + return self.map_size(key) @_dsop(create_type=list) def queue_push(self, key, value, create=False, **kwargs): @@ -2092,4 +2092,11 @@ def queue_size(self, key): :return: The length of the queue :raise: :cb_exc:`NotFoundError` if the queue does not exist. """ - return self.list_length(key) \ No newline at end of file + return self.list_length(key) + + + def get_attribute(self, key, attrname): + pass + + def set_attribute(self, key, attrname): + pass \ No newline at end of file diff --git a/couchbase/tests/cases/datastructures_t.py b/couchbase/tests/cases/datastructures_t.py index 88250f388..55dec885f 100644 --- a/couchbase/tests/cases/datastructures_t.py +++ b/couchbase/tests/cases/datastructures_t.py @@ -34,12 +34,12 @@ def test_map(self): rv = cb.map_get(key, 'key1') self.assertEqual('val1', rv.value) - self.assertEqual(1, cb.map_length(key)) + self.assertEqual(1, cb.map_size(key)) self.assertRaises(IndexError, cb.map_remove, key, 'key2') rv = cb.map_remove(key, 'key1') self.assertTrue(rv.success) - self.assertEqual(0, cb.map_length(key)) + self.assertEqual(0, cb.map_size(key)) def test_list(self): key = self.gen_key('dslist') @@ -59,16 +59,19 @@ def test_list(self): self.assertTrue(rv.success) self.assertEqual('before', cb.list_get(key, 0).value) self.assertEqual('hello', cb.list_get(key, 1).value) - self.assertEqual(2, cb.list_length(key)) + self.assertEqual(2, cb.list_size(key)) - # Insert value - rv = cb.list_insert(key, 1, 'between') + rv = cb.list_remove(key, 1) self.assertTrue(rv.success) - self.assertEqual(['before', 'between', 'hello'], cb.get(key).value) + self.assertEqual(['before'], cb.get(key).value) - rv = cb.list_remove(key, 1) + rv = cb.list_append(key, 'world') + self.assertTrue(rv.success) + self.assertEqual(['before', 'world'], cb.get(key).value) + + rv = cb.list_set(key, 1, 'after') self.assertTrue(rv.success) - self.assertEqual(['before', 'hello'], cb.get(key).value) + self.assertEqual(['before', 'after'], cb.get(key).value) def test_sets(self): key = self.gen_key('dsset') @@ -80,15 +83,15 @@ def test_sets(self): self.assertTrue(rv.success) rv = cb.set_add(key, 123) self.assertFalse(rv) - self.assertEqual(1, cb.set_length(key)) - self.assertTrue(cb.set_exists(key, 123)) + self.assertEqual(1, cb.set_size(key)) + self.assertTrue(cb.set_contains(key, 123)) rv = cb.set_remove(key, 123) self.assertTrue(rv.success) - self.assertEqual(0, cb.set_length(key)) + self.assertEqual(0, cb.set_size(key)) rv = cb.set_remove(key, 123) self.assertFalse(rv) - self.assertFalse(cb.set_exists(key, 123)) + self.assertFalse(cb.set_contains(key, 123)) def test_queue(self): key = self.gen_key('a_queue') From 28a0ee3e8804a095f3e9c2006bcf725db60b8580 Mon Sep 17 00:00:00 2001 From: Chanwoong Kim <me@chanwoong.kim> Date: Fri, 16 Dec 2016 11:01:01 +0900 Subject: [PATCH 073/829] Store reference count of 'excinstance' before assert ---- Use the correct type ---- Store reference count of 'excinstance' before assert Reference count of 'excinstance' is not always 1 Change-Id: I64d88b61e6cbc09fe23bc489e16137313978949b Reviewed-on: http://review.couchbase.org/71318 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/exceptions.c | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/exceptions.c b/src/exceptions.c index d197aa152..7aac6232c 100644 --- a/src/exceptions.c +++ b/src/exceptions.c @@ -45,6 +45,7 @@ pycbc_exc_wrap_REAL(int mode, struct pycbc_exception_params *p) PyObject *excparams; PyObject *excinstance; PyObject *ctor_args; + Py_ssize_t excinstance_refcnt; PyErr_Fetch(&type, &value, &traceback); PyErr_Clear(); @@ -96,9 +97,10 @@ pycbc_exc_wrap_REAL(int mode, struct pycbc_exception_params *p) Py_XDECREF(traceback); } else { + excinstance_refcnt = Py_REFCNT(excinstance); Py_INCREF(Py_TYPE(excinstance)); PyErr_Restore((PyObject*)Py_TYPE(excinstance), excinstance, traceback); - PYCBC_REFCNT_ASSERT(Py_REFCNT(excinstance) == 1); + PYCBC_REFCNT_ASSERT(Py_REFCNT(excinstance) == excinstance_refcnt); } } From a15c7005ff855aa8ae568a384f8022c4d8f309ca Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 30 Dec 2016 08:02:40 -0800 Subject: [PATCH 074/829] PYCBC-365: Make FTS API conformant with RFC * StringQuery -> QueryStringQuery * Change 'SearchQuery' to 'Query'. The former name is used in the RFC for a different purpose, and the Python SDK does not have a fluent API or 'god-query-object': Combining the index name, query parameters and query terms is done as arguments of the `bucket.search()` method. * Add `Params.consistency` and `Params.consistent_with` * Fix bug for MatchAllQuery/MatchNoneQuery * Add `Params.sort` field Change-Id: I76d45653d82bcc8168f41d400a7a4ed39f5bc668 Reviewed-on: http://review.couchbase.org/71436 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/fulltext.py | 97 ++++++++++++++++++++------ couchbase/n1ql.py | 9 +++ couchbase/tests/cases/cbftstrings_t.py | 62 ++++++++++++---- docs/source/api/fulltext.rst | 6 +- 4 files changed, 136 insertions(+), 38 deletions(-) diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index af36b7ffd..7553064b2 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -75,6 +75,13 @@ def _highlight(value): 'Highlight must be "html" or "ansi", got {0}'.format(value)) return value +def _consistency(value): + """ + Validator for 'consistency' parameter + """ + if value and value.lower() not in ('', 'not_bounded'): + raise ValueError('Invalid value!') + return value def _assign_kwargs(self, kwargs): """ @@ -256,17 +263,30 @@ class Params(object): """ def __init__(self, **kwargs): self._json_ = {} + self._ms = None self.facets = _FacetDict(**kwargs.pop('facets', {})) _assign_kwargs(self, kwargs) - @property - def encodable(self): + def as_encodable(self, index_name): + """ + :param index_name: The name of the index for the query + :return: A dict suitable for passing to `json.dumps()` + """ if self.facets: encoded_facets = {} for name, facet in self.facets.items(): encoded_facets[name] = facet.encodable self._json_['facets'] = encoded_facets + if self._ms: + # Encode according to scan vectors.. + sv_val = { + 'level': 'at_plus', + 'vectors': { + index_name: self._ms._to_fts_encodable() + } + } + self._json_.setdefault('ctl', {})['consistency'] = sv_val return self._json_ limit = _genprop(int, 'size', doc='Maximum number of results to return') @@ -292,8 +312,38 @@ def encodable(self): Highlight the results from these fields (list) """) + sort = _genprop( + list, 'sort', doc=""" + Specify a list of fields by which to sort the results + """ + ) -class SearchQuery(object): + consistency = _genprop( + _consistency, 'ctl', 'consistency', doc=""" + Consistency for the query. Use this for 'fixed' consistency, or to + clear consistency. + + You might want to use :meth:`consistent_with` for consistency that is + bounded to specific mutations + """ + ) + + def consistent_with(self, ms): + """ + Ensure that this query is consistent with the given mutations. When + set, this ensures that only document versions as or more recent than + the provided mutations are used for the search. This is often helpful + when attempting searches on newly inserted documents. + :param ms: Mutation State + :type ms: :class:`couchbase.n1ql.MutationState` + """ + if self.consistency: + raise ValueError( + 'Clear "consistency" before specifying "consistent_with"') + self._ms = ms + + +class Query(object): """ Base query object. You probably want to use one of the subclasses. @@ -331,7 +381,7 @@ def validate(self): pass -class RawQuery(SearchQuery): +class RawQuery(Query): """ This class is used to wrap a raw query payload. It should be used for custom query parameters, or in cases where any of the other @@ -342,7 +392,7 @@ def __init__(self, obj): self._json_ = obj -class _SingleQuery(SearchQuery): +class _SingleQuery(Query): __metaclass__ = abc.ABCMeta @abc.abstractproperty @@ -409,7 +459,7 @@ def wrap(cls): return wrap -class StringQuery(_SingleQuery): +class QueryStringQuery(_SingleQuery): """ Query which allows users to describe a query in a query language. The server will then execute the appropriate query based on the contents @@ -421,7 +471,7 @@ class StringQuery(_SingleQuery): Example:: - StringQuery('description:water and stuff') + QueryStringQuery('description:water and stuff') """ _TERMPROP = 'query' @@ -541,7 +591,7 @@ class RegexQuery(_SingleQuery): RegexpQuery = RegexQuery -class _RangeQuery(SearchQuery): +class _RangeQuery(Query): __metaclass__ = abc.ABCMeta @abc.abstractproperty @@ -642,7 +692,7 @@ def __init__(self, start=None, end=None, **kwargs): _MINMAX = 'start', 'end' -class _CompoundQuery(SearchQuery): +class _CompoundQuery(Query): __metaclass__ = abc.ABCMeta @abc.abstractproperty @@ -739,7 +789,7 @@ def fset(self, value): del self._subqueries[name] elif isinstance(value, reqtype): self._subqueries[name] = value - elif isinstance(value, SearchQuery): + elif isinstance(value, Query): self._subqueries[name] = reqtype(value) else: try: @@ -749,7 +799,7 @@ def fset(self, value): l = [] for q in it: - if not isinstance(q, SearchQuery): + if not isinstance(q, Query): raise TypeError('Item is not a query!', q) l.append(q) self._subqueries[name] = reqtype(*l) @@ -760,7 +810,7 @@ def fdel(self): return property(fget, fset, fdel, doc) -class BooleanQuery(SearchQuery): +class BooleanQuery(Query): def __init__(self, must=None, should=None, must_not=None): super(BooleanQuery, self).__init__() self._subqueries = {} @@ -816,24 +866,24 @@ def validate(self): raise ValueError('No sub-queries specified', self) -class MatchAllQuery(_SingleQuery): +class MatchAllQuery(Query): """ Special query which matches all documents """ def __init__(self, **kwargs): - super(MatchAllQuery, self).__init__(None, **kwargs) - - _TERMFIELD = 'match_all' + super(MatchAllQuery, self).__init__() + self._json_['match_all'] = None + _assign_kwargs(self, kwargs) -class MatchNoneQuery(_SingleQuery): +class MatchNoneQuery(Query): """ Special query which matches no documents """ def __init__(self, **kwargs): - super(MatchNoneQuery, self).__init__(None, **kwargs) - - _TERMFIELD = 'match_none' + super(MatchNoneQuery, self).__init__() + self._json_['match_none'] = None + _assign_kwargs(self, kwargs) @_with_fields('field') @@ -862,16 +912,17 @@ def make_search_body(index, query, params=None): :param index: The index name to query :param query: The query itself :param params: Modifiers for the query + :type params: :class:`couchbase.fulltext.Params` :return: A dictionary suitable for serialization """ dd = {} - if not isinstance(query, SearchQuery): - query = StringQuery(query) + if not isinstance(query, Query): + query = QueryStringQuery(query) dd['query'] = query.encodable if params: - dd.update(params.encodable) + dd.update(params.as_encodable(index)) dd['indexName'] = index return dd diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 269aea6a2..ba41cc260 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -115,6 +115,15 @@ def encode(self): """ return couchbase._to_json(self._sv) + def _to_fts_encodable(self): + if len(self._sv) > 1: + raise TypeError('Cannot use more than a single bucket with FTS') + from couchbase._pyport import single_dict_key + out = {} + for vb, params in self._sv[single_dict_key(self._sv)].items(): + out['{0}/{1}'.format(vb, params[1])] = params[0] + return out + @classmethod def decode(cls, s): """ diff --git a/couchbase/tests/cases/cbftstrings_t.py b/couchbase/tests/cases/cbftstrings_t.py index 2944ce566..fe080f940 100644 --- a/couchbase/tests/cases/cbftstrings_t.py +++ b/couchbase/tests/cases/cbftstrings_t.py @@ -2,6 +2,7 @@ from couchbase.tests.base import CouchbaseTestCase import couchbase.fulltext as cbft +from couchbase.n1ql import MutationState class FTStringsTest(CouchbaseTestCase): @@ -70,29 +71,35 @@ def test_string_query(self): 'size': 10, 'indexName': 'ix' } - q = cbft.StringQuery('q*ry', boost=2.0) + q = cbft.QueryStringQuery('q*ry', boost=2.0) p = cbft.Params(limit=10, explain=True) self.assertEqual(exp_json, cbft.make_search_body('ix', q, p)) def test_params(self): - self.assertEqual({}, cbft.Params().encodable) - self.assertEqual({'size': 10}, cbft.Params(limit=10).encodable) - self.assertEqual({'from': 100}, cbft.Params(skip=100).encodable) + self.assertEqual({}, cbft.Params().as_encodable('ix')) + self.assertEqual({'size': 10}, cbft.Params(limit=10).as_encodable('ix')) + self.assertEqual({'from': 100}, + cbft.Params(skip=100).as_encodable('ix')) self.assertEqual({'explain': True}, - cbft.Params(explain=True).encodable) + cbft.Params(explain=True).as_encodable('ix')) self.assertEqual({'highlight': {'style': 'html'}}, - cbft.Params(highlight_style='html').encodable) + cbft.Params(highlight_style='html').as_encodable('ix')) self.assertEqual({'highlight': {'style': 'ansi', 'fields': ['foo', 'bar', 'baz']}}, cbft.Params(highlight_style='ansi', highlight_fields=['foo', 'bar', 'baz']) - .encodable) + .as_encodable('ix')) self.assertEqual({'fields': ['foo', 'bar', 'baz']}, - cbft.Params(fields=['foo', 'bar', 'baz']).encodable) + cbft.Params(fields=['foo', 'bar', 'baz'] + ).as_encodable('ix')) + + self.assertEqual({'sort': ['f1', 'f2', '-_score']}, + cbft.Params(sort=['f1', 'f2', '-_score'] + ).as_encodable('ix')) p = cbft.Params(facets={ 'term': cbft.TermFacet('somefield', limit=10), @@ -123,7 +130,7 @@ def test_params(self): }, } } - self.assertEqual(exp, p.encodable) + self.assertEqual(exp, p.as_encodable('ix')) def test_facets(self): p = cbft.Params() @@ -231,9 +238,9 @@ def test_conjunction_query(self): self.assertEqual({'conjuncts': [{'prefix': 'somePrefix'}]}, cq.encodable) - def match_all_none_queries(self): + def test_match_all_none_queries(self): self.assertEqual({'match_all': None}, cbft.MatchAllQuery().encodable) - self.assertEqual({'match_none': None}, cbft.MatchNoneQuery.encodable) + self.assertEqual({'match_none': None}, cbft.MatchNoneQuery().encodable) def test_phrase_query(self): pq = cbft.PhraseQuery('salty', 'beers') @@ -254,4 +261,35 @@ def test_regexp_query(self): def test_booleanfield_query(self): bq = cbft.BooleanFieldQuery(True) - self.assertEqual({'bool': True}, bq.encodable) \ No newline at end of file + self.assertEqual({'bool': True}, bq.encodable) + + def test_consistency(self): + uuid = str('10000') + vb = 42 + seq = 101 + ixname = 'ix' + + mutinfo = (vb, uuid, seq, 'dummy-bucket-name') + ms = MutationState() + ms._add_scanvec(mutinfo) + + params = cbft.Params() + params.consistent_with(ms) + got = cbft.make_search_body('ix', cbft.MatchNoneQuery(), params) + exp = { + 'indexName': ixname, + 'query': { + 'match_none': None + }, + 'ctl': { + 'consistency': { + 'level': 'at_plus', + 'vectors': { + ixname: { + '{0}/{1}'.format(vb, uuid): seq + } + } + } + } + } + self.assertEqual(exp, got) \ No newline at end of file diff --git a/docs/source/api/fulltext.rst b/docs/source/api/fulltext.rst index 1e9e79e93..47f96b2dd 100644 --- a/docs/source/api/fulltext.rst +++ b/docs/source/api/fulltext.rst @@ -14,7 +14,7 @@ Couchbase offers full-text queries in version 4.5. To issue a query from the Python SDK, use the :cb_bmeth:`search` bucket method. To perform an actual search, define a search index (via Couchbase web UI, or -using the REST interface), create a :class:`SearchQuery` object and then pass the +using the REST interface), create a :class:`Query` object and then pass the index name and the query object to the `search` method: .. code-block:: python @@ -43,7 +43,7 @@ term; :class:`TermQuery` to match a field exactly, :class:`PrefixQuery` for type-ahead queries, or a compound query type such as :class:`ConjunctionQuery` for more complex queries. -.. autoclass:: SearchQuery +.. autoclass:: Query :members: ============= @@ -68,7 +68,7 @@ Match Queries .. autoclass:: BooleanFieldQuery :members: -.. autoclass:: StringQuery +.. autoclass:: QueryStringQuery :members: .. autoclass:: DocIdQuery From 8a123d9f71683d77e4309e52622a59f569d3ad5a Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 30 Dec 2016 10:03:33 -0800 Subject: [PATCH 075/829] PYCBC-371: Remove 'experimental' designation from data structures Change-Id: Idc4cd3c988552d32452f8e7ee631afd2195bb8dc Reviewed-on: http://review.couchbase.org/71437 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 2 -- couchbase/tests/cases/datastructures_t.py | 1 - docs/source/api/datastructures.rst | 18 ++---------------- 3 files changed, 2 insertions(+), 19 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index add7f46fe..248ecade2 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -94,13 +94,11 @@ def __exit__(self, *args): def _dsop(create_type=None, wrap_missing_path=True): - from couchbase.experimental import enabled_or_raise import functools def real_decorator(fn): @functools.wraps(fn) def newfn(self, key, *args, **kwargs): - enabled_or_raise() try: return fn(self, key, *args, **kwargs) except E.NotFoundError: diff --git a/couchbase/tests/cases/datastructures_t.py b/couchbase/tests/cases/datastructures_t.py index 55dec885f..ae1fac21b 100644 --- a/couchbase/tests/cases/datastructures_t.py +++ b/couchbase/tests/cases/datastructures_t.py @@ -17,7 +17,6 @@ from couchbase.tests.base import ConnectionTestCase, SkipTest import couchbase.exceptions as E -import couchbase.experimental; couchbase.experimental.enable() class DatastructureTest(ConnectionTestCase): diff --git a/docs/source/api/datastructures.rst b/docs/source/api/datastructures.rst index af3cae1a2..e78ac275b 100644 --- a/docs/source/api/datastructures.rst +++ b/docs/source/api/datastructures.rst @@ -9,27 +9,13 @@ Datastructures API .. versionadded:: 2.1.1 -The datastructure API is a new experimental API added which allows your -application to view Couchbase documents as common data structures such as -lists, maps, and queues. +The datastructure API allows your application to view Couchbase documents as +common data structures such as lists, maps, and queues. Datastructure operations are implemented largely using :mod:`couchbase.subdocument` operations, and also carry with some more efficiency. -.. warning:: - - The data structure API is currently experimental. The API may change - or be removed entirely. - - -In order to use this API you must first enable experimental functionality: - -.. code-block:: python - - import couchbase.experimental - couchbase.experimental.enable() - Couchbase datatypes are still JSON underneath, and use the :meth:`couchbase.bucket.Bucket.mutate_in` and From d672e1bc1ac0dbe1dc0a1932d9b5d576d812e8f8 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 3 Jan 2017 10:46:37 -0800 Subject: [PATCH 076/829] Bump lcb version dependency 2.6.1 added a new constant which the Python SDK uses. Change-Id: Iaf9e55b6b175ca9bd0851ecb67271e91d9d5f352 Reviewed-on: http://review.couchbase.org/71513 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- src/pycbc.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/pycbc.h b/src/pycbc.h index d08ef3bc0..aca2c7397 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -28,8 +28,8 @@ #include <libcouchbase/n1ql.h> #include <libcouchbase/cbft.h> #include <libcouchbase/ixmgmt.h> -#if LCB_VERSION < 0x020600 -#error "Couchbase Python SDK requires libcouchbase 2.6.0 or greater" +#if LCB_VERSION < 0x020601 +#error "Couchbase Python SDK requires libcouchbase 2.6.1 or greater" #endif #include <pythread.h> From e872975b1ccf28fffe61ac4e306e4250a54d62a3 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 3 Jan 2017 10:37:26 -0800 Subject: [PATCH 077/829] Move MutationState to own file This is now shared between N1QL and FTS Change-Id: Icee1f872383e4f0df767ac80279981efb0c7cd69 Reviewed-on: http://review.couchbase.org/71514 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/fulltext.py | 2 +- couchbase/mutation_state.py | 173 ++++++++++++++++++++++++++++++++++++ couchbase/n1ql.py | 160 +-------------------------------- setup.py | 1 + 4 files changed, 179 insertions(+), 157 deletions(-) create mode 100644 couchbase/mutation_state.py diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index 7553064b2..84834732f 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -335,7 +335,7 @@ def consistent_with(self, ms): the provided mutations are used for the search. This is often helpful when attempting searches on newly inserted documents. :param ms: Mutation State - :type ms: :class:`couchbase.n1ql.MutationState` + :type ms: :class:`couchbase.mutation_state.MutationState` """ if self.consistency: raise ValueError( diff --git a/couchbase/mutation_state.py b/couchbase/mutation_state.py new file mode 100644 index 000000000..4c281d986 --- /dev/null +++ b/couchbase/mutation_state.py @@ -0,0 +1,173 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import couchbase +from couchbase.exceptions import CouchbaseError + + +class MissingTokenError(CouchbaseError): + pass + + +class MutationState(object): + """ + .. warning:: + + The API and implementation of this class are subject to change. + + This class acts as a container for one or more mutations. It may + then be used with the :meth:`~.N1QLQuery.consistent_with` method to + indicate that a given query should be bounded by the contained + mutations. + + Using `consistent_with` is similar to setting + :attr:`~.N1QLQuery.consistency` to :data:`REQUEST_PLUS`, + but is more optimal as the query will use cached data, *except* + when the given mutation(s) are concerned. This option is useful + for use patterns when an application has just performed a mutation, + and wishes to perform a query in which the newly-performed mutation + should reflect on the query results. + + .. note:: + + This feature requires Couchbase Server 4.5 or greater, + and also requires that `fetch_mutation_tokens=true` + be specified in the connection string when creating + a :class:`~couchbase.bucket.Bucket` + + .. code-block:: python + + cb = Bucket('couchbase://localhost/default?fetch_mutation_tokens=true') + + rvs = cb.upsert_multi({ + 'foo': {'type': 'user', 'value': 'a foo value'}, + 'bar': {'type': 'user', 'value': 'a bar value'} + }) + + nq = N1QLQuery('SELECT type, value FROM default WHERE type="user"') + ms = MutationToken() + ms.add_result(rv + nq.consistent_with_ops(*rvs.values()) + for row in cb.n1ql_query(nq): + # ... + """ + def __init__(self, *docs): + self._sv = {} + if docs: + self.add_results(*docs) + + def _add_scanvec(self, mutinfo): + """ + Internal method used to specify a scan vector. + :param mutinfo: A tuple in the form of + `(vbucket id, vbucket uuid, mutation sequence)` + """ + vb, uuid, seq, bktname = mutinfo + self._sv.setdefault(bktname, {})[vb] = (seq, str(uuid)) + + def encode(self): + """ + Encodes this state object to a string. This string may be passed + to the :meth:`decode` at a later time. The returned object is safe + for sending over the network. + :return: A serialized string representing the state + """ + return couchbase._to_json(self._sv) + + def _to_fts_encodable(self): + if len(self._sv) > 1: + raise TypeError('Cannot use more than a single bucket with FTS') + from couchbase._pyport import single_dict_key + out = {} + for vb, params in self._sv[single_dict_key(self._sv)].items(): + out['{0}/{1}'.format(vb, params[1])] = params[0] + return out + + @classmethod + def decode(cls, s): + """ + Create a :class:`MutationState` from the encoded string + :param s: The encoded string + :return: A new MutationState restored from the string + """ + d = couchbase._from_json(s) + o = MutationState() + o._sv = d + # TODO: Validate + + def add_results(self, *rvs, **kwargs): + """ + Changes the state to reflect the mutation which yielded the given + result. + + In order to use the result, the `fetch_mutation_tokens` option must + have been specified in the connection string, _and_ the result + must have been successful. + + :param rvs: One or more :class:`~.OperationResult` which have been + returned from mutations + :param quiet: Suppress errors if one of the results does not + contain a convertible state. + :return: `True` if the result was valid and added, `False` if not + added (and `quiet` was specified + :raise: :exc:`~.MissingTokenError` if `result` does not contain + a valid token + """ + if not rvs: + raise MissingTokenError.pyexc(message='No results passed') + for rv in rvs: + mi = rv._mutinfo + if not mi: + if kwargs.get('quiet'): + return False + raise MissingTokenError.pyexc( + message='Result does not contain token') + self._add_scanvec(mi) + return True + + def add_all(self, bucket, quiet=False): + """ + Ensures the query result is consistent with all prior + mutations performed by a given bucket. + + Using this function is equivalent to keeping track of all + mutations performed by the given bucket, and passing them to + :meth:`~add_result` + + :param bucket: A :class:`~couchbase.bucket.Bucket` object + used for the mutations + :param quiet: If the bucket contains no valid mutations, this + option suppresses throwing exceptions. + :return: `True` if at least one mutation was added, `False` if none + were added (and `quiet` was specified) + :raise: :exc:`~.MissingTokenError` if no mutations were added and + `quiet` was not specified + """ + added = False + for mt in bucket._mutinfo(): + added = True + self._add_scanvec(mt) + if not added and not quiet: + raise MissingTokenError('Bucket object contains no tokens!') + return added + + def __repr__(self): + return repr(self._sv) + + def __nonzero__(self): + return bool(self._sv) + + __bool__ = __nonzero__ diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index ba41cc260..1ee844e3d 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -16,11 +16,13 @@ # import json -import couchbase from couchbase._pyport import basestring from couchbase.views.iterator import AlreadyQueriedError from couchbase.exceptions import CouchbaseError, NotSupportedError +# Not used internally, but by other modules +from couchbase.mutation_state import MutationState, MissingTokenError + class N1QLError(CouchbaseError): @property @@ -46,161 +48,6 @@ def n1ql_errcode(self): CONSISTENCY_NONE = UNBOUNDED -class MissingTokenError(CouchbaseError): - pass - - -class MutationState(object): - """ - .. warning:: - - The API and implementation of this class are subject to change. - - This class acts as a container for one or more mutations. It may - then be used with the :meth:`~.N1QLQuery.consistent_with` method to - indicate that a given query should be bounded by the contained - mutations. - - Using `consistent_with` is similar to setting - :attr:`~.N1QLQuery.consistency` to :data:`REQUEST_PLUS`, - but is more optimal as the query will use cached data, *except* - when the given mutation(s) are concerned. This option is useful - for use patterns when an application has just performed a mutation, - and wishes to perform a query in which the newly-performed mutation - should reflect on the query results. - - .. note:: - - This feature requires Couchbase Server 4.5 or greater, - and also requires that `fetch_mutation_tokens=true` - be specified in the connection string when creating - a :class:`~couchbase.bucket.Bucket` - - .. code-block:: python - - cb = Bucket('couchbase://localhost/default?fetch_mutation_tokens=true') - - rvs = cb.upsert_multi({ - 'foo': {'type': 'user', 'value': 'a foo value'}, - 'bar': {'type': 'user', 'value': 'a bar value'} - }) - - nq = N1QLQuery('SELECT type, value FROM default WHERE type="user"') - ms = MutationToken() - ms.add_result(rv - nq.consistent_with_ops(*rvs.values()) - for row in cb.n1ql_query(nq): - # ... - """ - def __init__(self, *docs): - self._sv = {} - if docs: - self.add_results(*docs) - - def _add_scanvec(self, mutinfo): - """ - Internal method used to specify a scan vector. - :param mutinfo: A tuple in the form of - `(vbucket id, vbucket uuid, mutation sequence)` - """ - vb, uuid, seq, bktname = mutinfo - self._sv.setdefault(bktname, {})[vb] = (seq, str(uuid)) - - def encode(self): - """ - Encodes this state object to a string. This string may be passed - to the :meth:`decode` at a later time. The returned object is safe - for sending over the network. - :return: A serialized string representing the state - """ - return couchbase._to_json(self._sv) - - def _to_fts_encodable(self): - if len(self._sv) > 1: - raise TypeError('Cannot use more than a single bucket with FTS') - from couchbase._pyport import single_dict_key - out = {} - for vb, params in self._sv[single_dict_key(self._sv)].items(): - out['{0}/{1}'.format(vb, params[1])] = params[0] - return out - - @classmethod - def decode(cls, s): - """ - Create a :class:`MutationState` from the encoded string - :param s: The encoded string - :return: A new MutationState restored from the string - """ - d = couchbase._from_json(s) - o = MutationState() - o._sv = d - # TODO: Validate - - def add_results(self, *rvs, **kwargs): - """ - Changes the state to reflect the mutation which yielded the given - result. - - In order to use the result, the `fetch_mutation_tokens` option must - have been specified in the connection string, _and_ the result - must have been successful. - - :param rvs: One or more :class:`~.OperationResult` which have been - returned from mutations - :param quiet: Suppress errors if one of the results does not - contain a convertible state. - :return: `True` if the result was valid and added, `False` if not - added (and `quiet` was specified - :raise: :exc:`~.MissingTokenError` if `result` does not contain - a valid token - """ - if not rvs: - raise MissingTokenError.pyexc(message='No results passed') - for rv in rvs: - mi = rv._mutinfo - if not mi: - if kwargs.get('quiet'): - return False - raise MissingTokenError.pyexc( - message='Result does not contain token') - self._add_scanvec(mi) - return True - - def add_all(self, bucket, quiet=False): - """ - Ensures the query result is consistent with all prior - mutations performed by a given bucket. - - Using this function is equivalent to keeping track of all - mutations performed by the given bucket, and passing them to - :meth:`~add_result` - - :param bucket: A :class:`~couchbase.bucket.Bucket` object - used for the mutations - :param quiet: If the bucket contains no valid mutations, this - option suppresses throwing exceptions. - :return: `True` if at least one mutation was added, `False` if none - were added (and `quiet` was specified) - :raise: :exc:`~.MissingTokenError` if no mutations were added and - `quiet` was not specified - """ - added = False - for mt in bucket._mutinfo(): - added = True - self._add_scanvec(mt) - if not added and not quiet: - raise MissingTokenError('Bucket object contains no tokens!') - return added - - def __repr__(self): - return repr(self._sv) - - def __nonzero__(self): - return bool(self._sv) - - __bool__ = __nonzero__ - - class N1QLQuery(object): def __init__(self, query, *args, **kwargs): """ @@ -320,6 +167,7 @@ def consistent_with(self, state): :param state: The state of the mutations it should be consistent with. + :type state: :class:`~.couchbase.mutation_state.MutationState` """ if self.consistency not in (UNBOUNDED, 'at_plus'): raise TypeError( diff --git a/setup.py b/setup.py index 4a34e710e..f0bc07ddf 100644 --- a/setup.py +++ b/setup.py @@ -74,6 +74,7 @@ 'store', 'constants', 'multiresult', + 'mutation_state', 'miscops', 'typeutil', 'oputil', From d2e28a57cd7a243a66ae1848783340889e8955da Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 3 Jan 2017 10:58:47 -0800 Subject: [PATCH 078/829] 'mutation_state' is not a C module It doesn't need to be included in the 'module list', and in fact prevents a build. When I +1'd it, I was testing a more recently modified version which removed this line. Change-Id: I5084fc128f067eb2663de9be2f6932d5dd64e383 Reviewed-on: http://review.couchbase.org/71516 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- setup.py | 1 - 1 file changed, 1 deletion(-) diff --git a/setup.py b/setup.py index f0bc07ddf..4a34e710e 100644 --- a/setup.py +++ b/setup.py @@ -74,7 +74,6 @@ 'store', 'constants', 'multiresult', - 'mutation_state', 'miscops', 'typeutil', 'oputil', From 51ffc7d9470f11af46669f4c7024c73c226962d8 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 12 Jan 2017 08:41:47 -0800 Subject: [PATCH 079/829] Clarify how to build documentation Change-Id: I09137b72f78c747b1196065585656bc87f41a9bc Reviewed-on: http://review.couchbase.org/71919 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- README.rst | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/README.rst b/README.rst index 2a5e057b1..f464dc34e 100644 --- a/README.rst +++ b/README.rst @@ -228,6 +228,9 @@ Building documentation The documentation is using Sphinx and also needs the numpydoc Sphinx extension. +In order for the documentation to build properly, the C extension must have +been built, since there are embedded docstrings in there as well. + To build the documentation, go into the `docs` directory and run .. code-block:: sh @@ -236,6 +239,16 @@ To build the documentation, go into the `docs` directory and run The HTML output can be found in `docs/build/html/`. + +Alternatively, you can also build the documentation (after building the module +itself) from the top-level directory: + +.. code-block:: sh + + python setup.py build_sphinx + +Once built, the docs will be in in `build/sphinx/html` + ------- Testing ------- From 94626e7fcd5c2ec0ac2d4858bf16639b9a5240c0 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 12 Jan 2017 11:41:00 -0800 Subject: [PATCH 080/829] XATTR support Subdocument operations (subdocument.xxx, SD.xxx) now all support two additional (private) options: * `xattr`: Maps to `FLAG_XATTR_PATH` * `_expand_macros`: Maps to `FLAG_XATTR_PATH|FLAGS_EXPAND_MACROS` * `_access_deleted`: Maps to `FLAG_XATTR_PATH|FLAG_ACCESS_DELETED` You don't need a specific version of libcouchbase to build, but you will need libcouchbase https://github.com/couchbase/libcouchbase/commit/c9fc07986947ff36778d7e79c19480a63b6c149c At the time of writing, this is available on github master, and is not part of any release yet. Change-Id: I21b5c2594e0b9247e966001925bfedbbb12038ac Reviewed-on: http://review.couchbase.org/71923 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/subdocument.py | 98 ++++++++++++++++++++++++-------- couchbase/tests/cases/xattr_t.py | 29 ++++++++++ src/oputil.c | 7 ++- 3 files changed, 106 insertions(+), 28 deletions(-) create mode 100644 couchbase/tests/cases/xattr_t.py diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index 352814048..b1b7b9f70 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -5,6 +5,15 @@ LCB_SDCMD_COUNTER, LCB_SDCMD_REMOVE, LCB_SDCMD_ARRAY_INSERT ) +# Defined in libcouchbase's headers. We don't want to force a hard dependency +# on a later version of libcouchbase just for these flags, especially +# considering that most users will only be using _F_MKDIR_P +_F_MKDIR_P = 1 << 16 +_F_MKDOC = 1 << 17 +_F_XATTR = 1 << 18 +_F_EXPAND_MACROS = 1 << 19 +_F_ACCESS_DELETED = 1 << 20 + _SPECMAP = {} for k, v in tuple(globals().items()): if not k.startswith('LCB_SDCMD_'): @@ -25,12 +34,43 @@ def __repr__(self): ', '.join(details)) -def _gen_2spec(op, path): - return Spec(op, path) +def _gen_3spec(op, path, xattr=False, _access_deleted=False): + """ + Returns a Spec tuple suitable for passing to the underlying C extension. + This variant is called for operations that lack an input value. + :param str path: The path to fetch + :param bool xattr: Whether this is an extended attribute + :return: a spec suitable for passing to the underlying C extension + """ + flags = 0 + if xattr: + flags |= _F_XATTR + if _access_deleted: + flags |= _F_ACCESS_DELETED + return Spec(op, path, flags) -def _gen_4spec(op, path, value, create=False): - return Spec(op, path, value, int(create)) + +def _gen_4spec(op, path, value, + create_path=False, create_doc=False, xattr=False, + _expand_macros=False, _access_deleted=False): + """ + Like `_gen_3spec`, but also accepts a mandatory value as its third argument + :param bool _expand_macros: Whether macros in the value should be expanded. + The macros themselves are defined at the server side + """ + flags = 0 + if create_path: + flags |= _F_MKDIR_P + if create_doc: + flags |= _F_MKDOC + if xattr: + flags |= _F_XATTR + if _expand_macros: + flags |= _F_EXPAND_MACROS + if _access_deleted: + flags |= _F_ACCESS_DELETED + return Spec(op, path, flags, value) class MultiValue(tuple): @@ -44,19 +84,18 @@ def __repr__(self): # The following functions return either 2-tuples or 4-tuples for operations # which are converted into mutation or lookup specifications -def get(path): +def get(path, **kwargs): """ Retrieve the value from the given path. The value is returned in the result. Valid only in :cb_bmeth:`lookup_in` :param path: The path to retrieve - .. seealso:: :meth:`exists` """ - return _gen_2spec(LCB_SDCMD_GET, path) + return _gen_3spec(LCB_SDCMD_GET, path, **kwargs) -def exists(path): +def exists(path, **kwargs): """ Check if a given path exists. This is the same as :meth:`get()`, but the result will not contain the value. @@ -64,10 +103,10 @@ def exists(path): :param path: The path to check """ - return _gen_2spec(LCB_SDCMD_EXISTS, path) + return _gen_3spec(LCB_SDCMD_EXISTS, path, **kwargs) -def upsert(path, value, create_parents=False): +def upsert(path, value, create_parents=False, **kwargs): """ Create or replace a dictionary path. @@ -95,10 +134,11 @@ def upsert(path, value, create_parents=False): `baz` value. """ - return _gen_4spec(LCB_SDCMD_DICT_UPSERT, path, value, create_parents) + return _gen_4spec(LCB_SDCMD_DICT_UPSERT, path, value, + create_path=create_parents, **kwargs) -def replace(path, value): +def replace(path, value, **kwargs): """ Replace an existing path. This works on any valid path if the path already exists. Valid only in :cb_bmeth:`mutate_in` @@ -106,10 +146,11 @@ def replace(path, value): :param path: The path to replace :param value: The new value """ - return _gen_4spec(LCB_SDCMD_REPLACE, path, value, False) + return _gen_4spec(LCB_SDCMD_REPLACE, path, value, + create_path=False, **kwargs) -def insert(path, value, create_parents=False): +def insert(path, value, create_parents=False, **kwargs): """ Create a new path in the document. The final path element points to a dictionary key that should be created. Valid only in :cb_bmeth:`mutate_in` @@ -118,7 +159,8 @@ def insert(path, value, create_parents=False): :param value: Value for the path :param create_parents: Whether intermediate parents should be created """ - return _gen_4spec(LCB_SDCMD_DICT_ADD, path, value, create_parents) + return _gen_4spec(LCB_SDCMD_DICT_ADD, path, value, + create_path=create_parents, **kwargs) def array_append(path, *values, **kwargs): @@ -143,7 +185,9 @@ def array_append(path, *values, **kwargs): .. seealso:: :func:`array_prepend`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_LAST, path, - MultiValue(*values), kwargs.get('create_parents', False)) + MultiValue(*values), + create_path=kwargs.pop('create_parents', False), + **kwargs) def array_prepend(path, *values, **kwargs): @@ -160,10 +204,12 @@ def array_prepend(path, *values, **kwargs): .. seealso:: :func:`array_append`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_ADD_FIRST, path, - MultiValue(*values), kwargs.get('create_parents', False)) + MultiValue(*values), + create_path=kwargs.pop('create_parents', False), + **kwargs) -def array_insert(path, *values): +def array_insert(path, *values, **kwargs): """ Insert items at a given position within an array. @@ -176,10 +222,10 @@ def array_insert(path, *values): .. seealso:: :func:`array_prepend`, :func:`upsert` """ return _gen_4spec(LCB_SDCMD_ARRAY_INSERT, path, - MultiValue(*values), False) + MultiValue(*values), **kwargs) -def array_addunique(path, value, create_parents=False): +def array_addunique(path, value, create_parents=False, **kwargs): """ Add a new value to an array if the value does not exist. @@ -199,10 +245,11 @@ def array_addunique(path, value, create_parents=False): .. seealso:: :func:`array_append`, :func:`upsert` """ - return _gen_4spec(LCB_SDCMD_ARRAY_ADD_UNIQUE, path, value, create_parents) + return _gen_4spec(LCB_SDCMD_ARRAY_ADD_UNIQUE, path, value, + create_path=create_parents, **kwargs) -def counter(path, delta, create_parents=False): +def counter(path, delta, create_parents=False, **kwargs): """ Increment or decrement a counter in a document. @@ -226,10 +273,11 @@ def counter(path, delta, create_parents=False): """ if not delta: raise ValueError("Delta must be positive or negative!") - return _gen_4spec(LCB_SDCMD_COUNTER, path, delta, create_parents) + return _gen_4spec(LCB_SDCMD_COUNTER, path, delta, + create_path=create_parents, **kwargs) -def remove(path): +def remove(path, **kwargs): """ Remove an existing path in the document. @@ -237,4 +285,4 @@ def remove(path): :param path: The path to remove """ - return _gen_2spec(LCB_SDCMD_REMOVE, path) + return _gen_3spec(LCB_SDCMD_REMOVE, path, **kwargs) diff --git a/couchbase/tests/cases/xattr_t.py b/couchbase/tests/cases/xattr_t.py new file mode 100644 index 000000000..e9900d321 --- /dev/null +++ b/couchbase/tests/cases/xattr_t.py @@ -0,0 +1,29 @@ +from couchbase.tests.base import MockTestCase +from couchbase import FMT_UTF8 +import couchbase.subdocument as SD +import couchbase.exceptions as E + +class XattrTest(MockTestCase): + def test_xattrs_basic(self): + cb = self.cb + k = self.gen_key('xattrs') + cb.upsert(k, {}) + + # Try to upsert a single xattr + rv = cb.mutate_in(k, SD.upsert('my.attr', 'value', + xattr=True, + create_parents=True)) + self.assertTrue(rv.success) + + body = cb.get(k) + self.assertFalse('my' in body.value) + self.assertFalse('my.attr' in body.value) + + # Try using lookup_in + rv = cb.retrieve_in(k, 'my.attr') + self.assertFalse(rv.exists('my.attr')) + + # Finally, use lookup_in with 'xattrs' attribute enabled + rv = cb.lookup_in(k, SD.get('my.attr', xattr=True)) + self.assertTrue(rv.exists('my.attr')) + self.assertEqual('value', rv['my.attr']) \ No newline at end of file diff --git a/src/oputil.c b/src/oputil.c index 27a01e587..035f3718d 100644 --- a/src/oputil.c +++ b/src/oputil.c @@ -528,14 +528,15 @@ sd_convert_spec(PyObject *pyspec, lcb_SDSPEC *sdspec, { PyObject *path = NULL; PyObject *val = NULL; - int op = 0, create = 0; + int op = 0; + unsigned flags = 0; if (!PyTuple_Check(pyspec)) { PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ARGUMENTS, 0, "Expected tuple for spec", pyspec); return -1; } - if (!PyArg_ParseTuple(pyspec, "iO|Oi", &op, &path, &val, &create)) { + if (!PyArg_ParseTuple(pyspec, "iOI|O", &op, &path, &flags, &val)) { PYCBC_EXCTHROW_ARGS(); return -1; } @@ -544,7 +545,7 @@ sd_convert_spec(PyObject *pyspec, lcb_SDSPEC *sdspec, } sdspec->sdcmd = op; - sdspec->options = create ? LCB_SDSPEC_F_MKINTERMEDIATES : 0; + sdspec->options = flags; LCB_SDSPEC_SET_PATH(sdspec, pathbuf->buffer, pathbuf->length); if (val != NULL) { int is_multival = 0; From 66b70a4e01a80cf12941bf0fb5e509e35ab49025 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 12 Jan 2017 10:25:58 -0800 Subject: [PATCH 081/829] PYCBC-375: SubdocResult.get() should raise exception for non-requested .. path Change-Id: Ieaa73380e253aa7600286ff91e8500d66ce7be30 Reviewed-on: http://review.couchbase.org/71920 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/result.py | 15 ++++++++++----- couchbase/tests/cases/subdoc_t.py | 4 ++++ 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/couchbase/result.py b/couchbase/result.py index fd5498e73..df09769e4 100644 --- a/couchbase/result.py +++ b/couchbase/result.py @@ -120,12 +120,17 @@ def get(self, path_or_index, default=None): :param default: If the given result does not exist, return this value instead :return: A tuple of `(error, value)`. If the entry does not exist - then `(0, default)` is returned. + then `(err, default)` is returned, where `err` is the actual error + which occurred. + You can use :meth:`couchbase.exceptions.CouchbaseError.rc_to_exctype` + to convert the error code to a proper exception class + :raise: :exc:`IndexError` or :exc:`KeyError` if `path_or_index` + is not an initially requested path. This is a programming error + as opposed to a constraint error where the path is not found. """ - try: - return self._resolve(path_or_index) - except (KeyError, IndexError): - return 0, default + err, value = self._resolve(path_or_index) + value = default if err else value + return err, value def exists(self, path_or_index): """ diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index 732545cd6..5dc886af8 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -52,6 +52,10 @@ def test_lookup_in(self): self.assertEqual(E.SubdocMultipleErrors.CODE, result.rc) self.assertEqual(E.SubdocPathNotFoundError.CODE, result.get(0)[0]) + # Ensure that we complain about a missing path + self.assertRaises((IndexError, KeyError), result.get, 33) + self.assertRaises((IndexError, KeyError), result.get, 'non-requested') + # Test with quiet result = cb.lookup_in(key, SD.exists('p'), quiet=True) self.assertFalse(result.exists('p')) From be06842739243e39f09763012b0790b0b3a15f7a Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 12 Jan 2017 11:30:51 -0800 Subject: [PATCH 082/829] PYCBC-374: Implement SubdocResult.__contains__ This is an alias to the 'exists' method, but is more idiomatic to Python Change-Id: I4645ab49e49978cea32838b72bdc9a6763d1708c Reviewed-on: http://review.couchbase.org/71921 Reviewed-by: Will Gardner <willg@rdner.io> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/result.py | 3 +++ couchbase/tests/cases/subdoc_t.py | 2 ++ 2 files changed, 5 insertions(+) diff --git a/couchbase/result.py b/couchbase/result.py index df09769e4..4fb18d183 100644 --- a/couchbase/result.py +++ b/couchbase/result.py @@ -150,6 +150,9 @@ def exists(self, path_or_index): else: raise E.exc_from_rc(result[0]) + def __contains__(self, item): + return self.exists(item) + @property def access_ok(self): """ diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index 5dc886af8..e9b632ae9 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -202,6 +202,8 @@ def test_multi_lookup(self): self.assertTrue(rvs.exists('field2')) self.assertTrue(rvs.exists(1)) + self.assertTrue(1 in rvs) + self.assertTrue('field2' in rvs) self.assertEqual((E.SubdocPathNotFoundError.CODE, None), rvs.get('field3')) From a73ffeb96682ed073c466aaecfd9a6792ae71a37 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 16 Feb 2017 08:56:38 -0800 Subject: [PATCH 083/829] PYCBC-384: Fix typo: list_size, not list_length Change-Id: If0540b05f3461cf4e572ab8ecec366ac38713def Reviewed-on: http://review.couchbase.org/73752 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/bucket.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 248ecade2..fbcc90146 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -2090,11 +2090,11 @@ def queue_size(self, key): :return: The length of the queue :raise: :cb_exc:`NotFoundError` if the queue does not exist. """ - return self.list_length(key) + return self.list_size(key) def get_attribute(self, key, attrname): pass def set_attribute(self, key, attrname): - pass \ No newline at end of file + pass From 208df9c862b37a6701c4a0f3998d8de4920a1ca5 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 17 Feb 2017 09:14:20 -0800 Subject: [PATCH 084/829] PYCBC-370: Add experimental Analytics support Change-Id: I660f8f83d1412b80d21ad1fe9a0b3052d2a7d5c2 Reviewed-on: http://review.couchbase.org/73833 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <willg@rdner.io> Reviewed-by: Brett Lawson <brett19@gmail.com> --- couchbase/bucket.py | 6 +++++ couchbase/cbas.py | 36 ++++++++++++++++++++++++++ couchbase/n1ql.py | 11 +++++--- src/bucket.c | 1 + src/n1ql.c | 63 +++++++++++++++++++++++++++++++++------------ src/oputil.h | 1 + 6 files changed, 97 insertions(+), 21 deletions(-) create mode 100644 couchbase/cbas.py diff --git a/couchbase/bucket.py b/couchbase/bucket.py index fbcc90146..8c348fef8 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1417,6 +1417,12 @@ def n1ql_query(self, query, *args, **kwargs): itercls = kwargs.pop('itercls', N1QLRequest) return itercls(query, self, *args, **kwargs) + def _analytics_query(self, query, host): + from cbas import AnalyticsRequest, AnalyticsQuery + if not isinstance(query, AnalyticsQuery): + query = AnalyticsQuery(query) + return AnalyticsRequest(query, host, self) + def search(self, index, query, **kwargs): """ Perform full-text searches diff --git a/couchbase/cbas.py b/couchbase/cbas.py new file mode 100644 index 000000000..5e5c53019 --- /dev/null +++ b/couchbase/cbas.py @@ -0,0 +1,36 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import couchbase.n1ql as N + + +class AnalyticsQuery(N.N1QLQuery): + def __init__(self, querystr): + querystr = querystr.rstrip() + if not querystr.endswith(';'): + querystr += ';' + super(AnalyticsQuery, self).__init__(querystr) + + +class AnalyticsRequest(N.N1QLRequest): + def __init__(self, params, host, parent): + self._host = host + super(AnalyticsRequest, self).__init__(params, parent) + + def _submit_query(self): + return self._parent._cbas_query(self._params.encoded, + self._host) \ No newline at end of file diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 1ee844e3d..f19b0f691 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -288,14 +288,17 @@ def __init__(self, params, parent, row_factory=lambda x: x): self.__raw = False self.__meta_received = False + def _submit_query(self): + return self._parent._n1ql_query(self._params.encoded, + not self._params.adhoc, + cross_bucket=self._params.cross_bucket) + def _start(self): if self._mres: return - self._mres = self._parent._n1ql_query( - self._params.encoded, not self._params.adhoc, - cross_bucket=self._params.cross_bucket) - + self._mres = self._submit_query() + self._mres = self._submit_query() self.__raw = self._mres[None] @property diff --git a/src/bucket.c b/src/bucket.c index 1221e3283..b9d2d93d9 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -513,6 +513,7 @@ static PyMethodDef Bucket_TABLE_methods[] = { OPFUNC(_http_request, "Internal routine for HTTP requests"), OPFUNC(_view_request, "Internal routine for view requests"), OPFUNC(_n1ql_query, "Internal routine for N1QL queries"), + OPFUNC(_cbas_query, "Internal routine for analytics queries"), OPFUNC(_fts_query, "Internal routine for Fulltext queries"), OPFUNC(_ixmanage, "Internal routine for managing indexes"), diff --git a/src/n1ql.c b/src/n1ql.c index 0e1213539..9677a0c73 100644 --- a/src/n1ql.c +++ b/src/n1ql.c @@ -37,27 +37,15 @@ n1ql_row_callback(lcb_t instance, int ign, const lcb_RESPN1QL *resp) } } -PyObject * -pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) -{ - int rv; +static PyObject * +query_common(pycbc_Bucket *self, const char *params, unsigned nparams, + const char *host, int is_prepared, int is_xbucket) { PyObject *ret = NULL; pycbc_MultiResult *mres; pycbc_ViewResult *vres; lcb_error_t rc; lcb_CMDN1QL cmd = { 0 }; - const char *params; - pycbc_strlen_t nparams; - int prepared = 0, cross_bucket = 0; - static char *kwlist[] = { "params", "prepare", "cross_bucket", NULL }; - rv = PyArg_ParseTupleAndKeywords( - args, kwargs, "s#|ii", kwlist, ¶ms, &nparams, &prepared, &cross_bucket); - - if (!rv) { - PYCBC_EXCTHROW_ARGS(); - return NULL; - } if (-1 == pycbc_oputil_conn_lock(self)) { return NULL; } @@ -79,12 +67,20 @@ pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) cmd.query = params; cmd.nquery = nparams; cmd.handle = &vres->base.u.nq; - if (prepared) { + + if (is_prepared) { cmd.cmdflags |= LCB_CMDN1QL_F_PREPCACHE; } - if (cross_bucket) { + if (is_xbucket) { cmd.cmdflags |= LCB_CMD_F_MULTIAUTH; } + + if (host) { + /* #define LCB_CMDN1QL_F_CBASQUERY 1<<18 */ + cmd.cmdflags |= (1<<18); + cmd.host = host; + } + rc = lcb_n1ql_query(self->instance, mres, &cmd); if (rc != LCB_SUCCESS) { @@ -99,4 +95,37 @@ pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) Py_XDECREF(mres); pycbc_oputil_conn_unlock(self); return ret; + +} + +PyObject * +pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +{ + const char *params = NULL; + pycbc_strlen_t nparams = 0; + int prepared = 0, cross_bucket = 0; + static char *kwlist[] = { "params", "prepare", "cross_bucket", NULL }; + if (!PyArg_ParseTupleAndKeywords( + args, kwargs, "s#|ii", kwlist, ¶ms, + &nparams, &prepared, &cross_bucket)) { + + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + return query_common(self, params, nparams, NULL, prepared, cross_bucket); +} + +PyObject * +pycbc_Bucket__cbas_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +{ + const char *host = NULL; + const char *params = NULL; + pycbc_strlen_t nparams = 0; + static char *kwlist[] = { "params", "host", NULL }; + if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#s", kwlist, + ¶ms, &nparams, &host)) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + return query_common(self, params, nparams, host, 0, 0); } diff --git a/src/oputil.h b/src/oputil.h index 1d1bed3e4..17350611c 100644 --- a/src/oputil.h +++ b/src/oputil.h @@ -351,6 +351,7 @@ PYCBC_DECL_OP(observe_multi); /* n1ql.c */ PYCBC_DECL_OP(_n1ql_query); +PYCBC_DECL_OP(_cbas_query); /* fts.c */ PYCBC_DECL_OP(_fts_query); From 69a03c1850bcfb5cd901d352f7f92a16e745b35d Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 9 Mar 2017 10:07:13 -0800 Subject: [PATCH 085/829] PYCBC-388, PYCBC-386: Add Cluster and Authenticator classes These classes are added for uniformity with other SDKs. Note that for full support of new-style "username" based auth, a yet-unreleased version of libcouchbase is required. Change-Id: I2fc351775ccc0479a39eac59f392e9de69ccefeb Reviewed-on: http://review.couchbase.org/74931 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Brett Lawson <brett19@gmail.com> --- couchbase/cluster.py | 169 +++++++++++++++++++++++++++++ couchbase/connstr.py | 9 +- couchbase/tests/cases/cluster_t.py | 38 +++++++ 3 files changed, 213 insertions(+), 3 deletions(-) create mode 100644 couchbase/cluster.py create mode 100644 couchbase/tests/cases/cluster_t.py diff --git a/couchbase/cluster.py b/couchbase/cluster.py new file mode 100644 index 000000000..c2ad2bbe8 --- /dev/null +++ b/couchbase/cluster.py @@ -0,0 +1,169 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import weakref + +from couchbase.admin import Admin +from couchbase.bucket import Bucket +from couchbase.connstr import ConnectionString + + +class Cluster(object): + def __init__(self, connection_string='couchbase://localhost', + bucket_class=Bucket): + """ + Creates a new Cluster object + :param connection_string: Base connection string. It is an error to + specify a bucket in the string. + :param bucket_class: :class:`couchbase.bucket.Bucket` implementation to + use. + """ + self.connstr = ConnectionString.parse(str(connection_string)) + self.bucket_class = bucket_class + self.authenticator = None + self._buckets = {} + if self.connstr.bucket: + raise ValueError('Cannot pass bucket to connection string: ' + self.connstr.bucket) + if 'username' in self.connstr.options: + raise ValueError('username must be specified in the authenticator, ' + 'not the connection string') + + def authenticate(self, authenticator): + """ + Set the type of authenticator to use when opening buckets or performing + cluster management operations + :param authenticator: The new authenticator to use + """ + self.authenticator = authenticator + + def open_bucket(self, bucket_name, **kwargs): + """ + Open a new connection to a Couchbase bucket + :param bucket_name: The name of the bucket to open + :param kwargs: Additional arguments to provide to the constructor + :return: An instance of the `bucket_class` object provided to + :meth:`__init__` + """ + if self.authenticator: + username, password = self.authenticator.get_credentials(bucket_name) + else: + username, password = None, None + + connstr = ConnectionString.parse(str(self.connstr)) + connstr.bucket = bucket_name + if username: + connstr.options['username'] = username + + if 'password' in kwargs: + if isinstance(self.authenticator, PasswordAuthenticator): + raise ValueError("Cannot override " + "PasswordAuthenticators password") + else: + kwargs['password'] = password + rv = self.bucket_class(str(connstr), **kwargs) + self._buckets[bucket_name] = weakref.ref(rv) + return rv + + def cluster_manager(self): + """ + Returns an instance of :class:`~.couchbase.admin.Admin` which may be + used to create and manage buckets in the cluster. + """ + username, password = self.authenticator.get_credentials() + return Admin(username, password) + + def n1ql_query(self, query, *args, **kwargs): + """ + Issue a "cluster-level" query. This requires that at least one + connection to a bucket is active. + :param query: The query string or object + :param args: Additional arguments to :cb_bmeth:`n1ql_query` + + .. seealso:: :cb_bmeth:`n1ql_query` + """ + from couchbase.n1ql import N1QLQuery + if not isinstance(query, N1QLQuery): + query = N1QLQuery(query) + + query.cross_bucket = True + + for k, v in self._buckets.items(): + bucket = v() + if bucket: + return bucket.n1ql_query(query, *args, **kwargs) + else: + del self._buckets[k] + + raise Exception('Must have at least one active bucket for query') + + +class Authenticator(object): + def get_credentials(self, bucket=None): + """ + Gets the username and password for a specified bucket. If bucket is + `None`, gets the username and password for the entire cluster, if + different. + :param bucket: The bucket to act as context + :return: A tuple of `username, password` + """ + raise NotImplemented() + + +class PasswordAuthenticator(Authenticator): + def __init__(self, username, password): + """ + This class uses a single credential pair of username and password, and + is designed to be used either with cluster management operations or + with Couchbase 5.0 style usernames with role based access control. + + For older cluster versions, or if you are only using a bucket's "SASL" + password, use :class:`~.ClassicAuthenticator` + + :param username: + :param password: + + .. warning:: This functionality is experimental both in API and + implementation. + + """ + self.username = username + self.password = password + + def get_credentials(self, *unused): + return self.username, self.password + + +class ClassicAuthenticator(Authenticator): + def __init__(self, cluster_username=None, + cluster_password=None, + buckets=None): + """ + Classic authentication mechanism. + :param cluster_username: + Global cluster username. Only required for management operations + :param cluster_password: + Global cluster password. Only required for management operations + :param buckets: + A dictionary of `{bucket_name: bucket_password}`. + """ + self.username = cluster_username + self.password = cluster_password + self.buckets = buckets if buckets else {} + + def get_credentials(self, bucket=None): + if not bucket: + return self.username, self.password + return None, self.buckets.get(bucket) diff --git a/couchbase/connstr.py b/couchbase/connstr.py index 99cd6f6f8..df5905e92 100644 --- a/couchbase/connstr.py +++ b/couchbase/connstr.py @@ -122,9 +122,12 @@ def encode(self): for k, v in self.options.items(): opt_dict[k] = v[0] - ss = '{scheme}://{hosts}/{bucket}?{options}'.format( - scheme=self.scheme, hosts=','.join(self.hosts), bucket=self.bucket, - options=urlencode(opt_dict)) + ss = '{0}://{1}'.format(self.scheme, ','.join(self.hosts)) + if self.bucket: + ss += '/' + self.bucket + + ss += '?' + urlencode(opt_dict) + return ss def __str__(self): diff --git a/couchbase/tests/cases/cluster_t.py b/couchbase/tests/cases/cluster_t.py new file mode 100644 index 000000000..cff928271 --- /dev/null +++ b/couchbase/tests/cases/cluster_t.py @@ -0,0 +1,38 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from couchbase.tests.base import CouchbaseTestCase +from couchbase.connstr import ConnectionString +from couchbase.cluster import Cluster, ClassicAuthenticator + + +class ClusterTest(CouchbaseTestCase): + def test_cluster(self): + connargs = self.make_connargs() + connstr = ConnectionString.parse(str(connargs.pop('connection_string'))) + password = connstr.options.pop('password', '') + connstr.options.pop('username', '') + bucket = connstr.bucket + connstr.bucket = None + + # Can I open a new bucket via open_bucket? + cluster = Cluster(connstr, bucket_class=self.factory) + cluster.authenticate(ClassicAuthenticator(buckets={bucket: password})) + + cb = cluster.open_bucket(bucket, **connargs) + key = self.gen_key('cluster_test') + cb.upsert(key, 'cluster test') \ No newline at end of file From 2b4c4e1cff9840ea82d0d6606ed0eed7d6ffc94e Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Thu, 9 Mar 2017 13:14:50 -0800 Subject: [PATCH 086/829] Cluster/Authenticator fixups: - Better Py3 compatibility - More tests - Defined exceptions specified in RBAC RFC Change-Id: I8837cca0faded0f6fb9b00c6d4f61e7e343e9ddf Reviewed-on: http://review.couchbase.org/74939 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Will Gardner <willg@rdner.io> Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/cluster.py | 30 +++++++++++++++---- couchbase/connstr.py | 12 ++++++++ couchbase/tests/cases/cluster_t.py | 46 ++++++++++++++++++++++++++---- 3 files changed, 76 insertions(+), 12 deletions(-) diff --git a/couchbase/cluster.py b/couchbase/cluster.py index c2ad2bbe8..c72326953 100644 --- a/couchbase/cluster.py +++ b/couchbase/cluster.py @@ -19,6 +19,20 @@ from couchbase.admin import Admin from couchbase.bucket import Bucket from couchbase.connstr import ConnectionString +from couchbase.exceptions import CouchbaseError + + +class MixedAuthError(CouchbaseError): + """ + Cannot use old and new style auth together in the same cluster + """ + pass + + +class NoBucketError(CouchbaseError): + """ + Operation requires at least a single bucket to be open + """ class Cluster(object): @@ -65,12 +79,12 @@ def open_bucket(self, bucket_name, **kwargs): connstr = ConnectionString.parse(str(self.connstr)) connstr.bucket = bucket_name if username: - connstr.options['username'] = username + connstr.set_option('username', username) if 'password' in kwargs: if isinstance(self.authenticator, PasswordAuthenticator): - raise ValueError("Cannot override " - "PasswordAuthenticators password") + raise MixedAuthError("Cannot override " + "PasswordAuthenticators password") else: kwargs['password'] = password rv = self.bucket_class(str(connstr), **kwargs) @@ -100,14 +114,18 @@ def n1ql_query(self, query, *args, **kwargs): query.cross_bucket = True + to_purge = [] for k, v in self._buckets.items(): bucket = v() if bucket: return bucket.n1ql_query(query, *args, **kwargs) else: - del self._buckets[k] + to_purge.append(k) + + for k in to_purge: + del self._buckets[k] - raise Exception('Must have at least one active bucket for query') + raise NoBucketError('Must have at least one active bucket for query') class Authenticator(object): @@ -119,7 +137,7 @@ def get_credentials(self, bucket=None): :param bucket: The bucket to act as context :return: A tuple of `username, password` """ - raise NotImplemented() + raise NotImplementedError() class PasswordAuthenticator(Authenticator): diff --git a/couchbase/connstr.py b/couchbase/connstr.py index df5905e92..e2cbf206d 100644 --- a/couchbase/connstr.py +++ b/couchbase/connstr.py @@ -112,6 +112,18 @@ def implicit_port(self): else: return -1 + def get_option(self, optname, default=None): + try: + return self.options[optname][0] + except KeyError: + return default + + def set_option(self, optname, value): + self.options[optname] = [value] + + def clear_option(self, optname): + self.options.pop(optname, None) + def encode(self): """ Encodes the current state of the object into a string. diff --git a/couchbase/tests/cases/cluster_t.py b/couchbase/tests/cases/cluster_t.py index cff928271..3cdfb740c 100644 --- a/couchbase/tests/cases/cluster_t.py +++ b/couchbase/tests/cases/cluster_t.py @@ -17,22 +17,56 @@ from couchbase.tests.base import CouchbaseTestCase from couchbase.connstr import ConnectionString -from couchbase.cluster import Cluster, ClassicAuthenticator +from couchbase.cluster import Cluster, ClassicAuthenticator,\ + PasswordAuthenticator, NoBucketError, MixedAuthError class ClusterTest(CouchbaseTestCase): - def test_cluster(self): + def _create_cluster(self): connargs = self.make_connargs() connstr = ConnectionString.parse(str(connargs.pop('connection_string'))) - password = connstr.options.pop('password', '') - connstr.options.pop('username', '') + connstr.clear_option('username') bucket = connstr.bucket connstr.bucket = None + password = connargs.get('password', '') # Can I open a new bucket via open_bucket? cluster = Cluster(connstr, bucket_class=self.factory) cluster.authenticate(ClassicAuthenticator(buckets={bucket: password})) + return cluster, bucket - cb = cluster.open_bucket(bucket, **connargs) + def test_cluster(self): + cluster, bucket_name = self._create_cluster() + cb = cluster.open_bucket(bucket_name) key = self.gen_key('cluster_test') - cb.upsert(key, 'cluster test') \ No newline at end of file + cb.upsert(key, 'cluster test') + + def test_query(self): + self.skipUnlessMock() + + cluster, bucket_name = self._create_cluster() + + # Should fail if no bucket is active yet + self.assertRaises(NoBucketError, cluster.n1ql_query, "select mockrow") + + # Open a bucket + cb = cluster.open_bucket(bucket_name) + row = cluster.n1ql_query('select mockrow').get_single_result() + + # Should fail again once the bucket has been GC'd + del cb + self.assertRaises(NoBucketError, cluster.n1ql_query, 'select mockrow') + + def test_no_mixed_auth(self): + cluster, bucket_name = self._create_cluster() + auther = PasswordAuthenticator(bucket_name, + self.cluster_info.bucket_password) + + cluster.authenticate(auther) + cb1 = cluster.open_bucket(bucket_name) + self.assertRaises(MixedAuthError, cluster.open_bucket, bucket_name, + password=self.cluster_info.bucket_password) + + cluster2, bucket_name = self._create_cluster() + cb2 = cluster2.open_bucket(bucket_name, + password=self.cluster_info.bucket_password) \ No newline at end of file From 5436719e19b05f9c1891cf7af323056107cc37c9 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 28 Mar 2017 09:25:16 -0700 Subject: [PATCH 087/829] Provide N1QL credentials when using `Cluster` interface This ensures that 'Authenticator' credentials are passed to old-style N1QL auth. Change-Id: I665a7317c90175cd0941dd69e10f8ee6ec1375ce Reviewed-on: http://review.couchbase.org/75879 Reviewed-by: Brett Lawson <brett19@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/cluster.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/couchbase/cluster.py b/couchbase/cluster.py index c72326953..6765b6f7a 100644 --- a/couchbase/cluster.py +++ b/couchbase/cluster.py @@ -89,6 +89,10 @@ def open_bucket(self, bucket_name, **kwargs): kwargs['password'] = password rv = self.bucket_class(str(connstr), **kwargs) self._buckets[bucket_name] = weakref.ref(rv) + if isinstance(self.authenticator, ClassicAuthenticator): + for bucket, passwd in self.authenticator.buckets.items(): + if passwd: + rv.add_bucket_creds(bucket, passwd) return rv def cluster_manager(self): From 5d739e52387d86a45382e7427a14576931a7b5da Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 18 Apr 2017 07:19:26 -0700 Subject: [PATCH 088/829] PYCBC-393: n1ql_index_watch: Set default polling interval to 0.2 (200ms) .. Per RFC Change-Id: I82c8bee09e333aa5fc2976c52d4e6e3443885c8b Reviewed-on: http://review.couchbase.org/76957 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/bucketmanager.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchbase/bucketmanager.py b/couchbase/bucketmanager.py index 5bbad65f5..875f715c6 100644 --- a/couchbase/bucketmanager.py +++ b/couchbase/bucketmanager.py @@ -497,7 +497,7 @@ def n1ql_index_build_deferred(self, other_buckets=False): return IxmgmtRequest(self._cb, 'build', info).execute() def n1ql_index_watch(self, indexes, - timeout=30, interval=1, watch_primary=False): + timeout=30, interval=0.2, watch_primary=False): """ Await completion of index building From bd6d5271bd038e9b9eb7d48667a88a25862ca1bd Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 18 Apr 2017 11:42:22 -0700 Subject: [PATCH 089/829] Fix memory leak when using the Item API This fixes a memory leak when a single `Item` is used more than once in a get operation. This commit ensures that any prior value is first cleared. Change-Id: Iad16e9d3875a9451ff9654caee244406a88125b8 Reviewed-on: http://review.couchbase.org/76976 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- src/callbacks.c | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/callbacks.c b/src/callbacks.c index 482456316..fd9914a40 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -370,6 +370,11 @@ value_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) eflags = gresp->itmflags; } + if (res->value) { + Py_DECREF(res->value); + res->value = NULL; + } + rv = pycbc_tc_decode_value(mres->parent, gresp->value, gresp->nvalue, eflags, &res->value); if (rv < 0) { From 52cd6becee799f9091392dadd9a764e2680e7492 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 18 Apr 2017 11:53:34 -0700 Subject: [PATCH 090/829] Fix sporadic cluster_t test failure The test was failing because a GC sweep wasn't happening. This fix ensures that the GC does a pass before running the assertion Change-Id: Ibc357094d97717f8f6f53d82c65c5107d5a8c438 Reviewed-on: http://review.couchbase.org/76978 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/tests/cases/cluster_t.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/couchbase/tests/cases/cluster_t.py b/couchbase/tests/cases/cluster_t.py index 3cdfb740c..fc7d3c3a5 100644 --- a/couchbase/tests/cases/cluster_t.py +++ b/couchbase/tests/cases/cluster_t.py @@ -19,6 +19,7 @@ from couchbase.connstr import ConnectionString from couchbase.cluster import Cluster, ClassicAuthenticator,\ PasswordAuthenticator, NoBucketError, MixedAuthError +import gc class ClusterTest(CouchbaseTestCase): @@ -55,6 +56,8 @@ def test_query(self): # Should fail again once the bucket has been GC'd del cb + gc.collect() + self.assertRaises(NoBucketError, cluster.n1ql_query, 'select mockrow') def test_no_mixed_auth(self): From 99a87dfdbcc74b665aec42fd90e1afae2594c92c Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 28 Apr 2017 17:19:43 -0700 Subject: [PATCH 091/829] PYCBC-391: Add new FTS query types - GeoDistanceQuery - GeoBoundingBoxQuery - TermRangeQuery Change-Id: I855375c50986cf03684fdc8f6e77bc72fa0498e2 Reviewed-on: http://review.couchbase.org/77573 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/fulltext.py | 69 ++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 65 insertions(+), 4 deletions(-) diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index 84834732f..c52d46012 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -591,6 +591,45 @@ class RegexQuery(_SingleQuery): RegexpQuery = RegexQuery +def _location_conv(l): + if len(l) != 2: + raise ValueError('Require list of two numbers') + return [float(l[0]), float(l[1])] + + +@_with_fields('field') +class GeoDistanceQuery(Query): + def __init__(self, distance, location, **kwargs): + """ + Search for items within a given radius + :param distance: The distance string specifying the radius + :param location: A tuple of `(lon, lat)` indicating point of origin + """ + super(GeoDistanceQuery, self).__init__() + kwargs['distance'] = distance + kwargs['location'] = location + self._assign_kwargs(**kwargs) + + location = _genprop(_location_conv, 'location', doc='Location') + distance = _genprop_str('distance') + + +@_with_fields('field') +class GeoBoundingBoxQuery(Query): + def __init__(self, top_left, bottom_right, **kwargs): + super(GeoBoundingBoxQuery, self).__init__() + kwargs['top_left'] = top_left + kwargs['bottom_right'] = bottom_right + self._assign_kwargs(**kwargs) + + top_left = _genprop( + _location_conv, 'top_left', + doc='Tuple of `(lat, lon)` for the top left corner of bounding box') + bottom_right = _genprop( + _location_conv, 'bottom_right', + doc='Tuple of `(lat, lon`) for the bottom right corner of bounding box') + + class _RangeQuery(Query): __metaclass__ = abc.ABCMeta @@ -629,7 +668,7 @@ def __init__(self, min=None, max=None, **kwargs): float, 'min', doc='Lower bound of range. See :attr:`min_inclusive`') min_inclusive = _genprop( - bool, 'min_inclusive', + bool, 'inclusive_min', doc='Whether matches are inclusive of lower bound') max = _genprop( @@ -637,7 +676,7 @@ def __init__(self, min=None, max=None, **kwargs): doc='Upper bound of range. See :attr:`max_inclusive`') max_inclusive = _genprop( - bool, 'max_inclusive', + bool, 'inclusive_max', doc='Whether matches are inclusive of upper bound') _MINMAX = 'min', 'max' @@ -673,10 +712,10 @@ def __init__(self, start=None, end=None, **kwargs): end = _genprop_str('end', doc='Upper bound datetime') start_inclusive = _genprop( - bool, 'start_inclusive', doc='If :attr:`start` is inclusive') + bool, 'inclusive_start', doc='If :attr:`start` is inclusive') end_inclusive = _genprop( - bool, 'end_inclusive', doc='If :attr:`end` is inclusive') + bool, 'inclusive_end', doc='If :attr:`end` is inclusive') datetime_parser = _genprop_str( 'datetime_parser', @@ -692,6 +731,28 @@ def __init__(self, start=None, end=None, **kwargs): _MINMAX = 'start', 'end' +@_with_fields('field') +class TermRangeQuery(_RangeQuery): + """ + Search documents for fields containing a value within a given + lexical range. + """ + def __init__(self, start=None, end=None, **kwargs): + super(TermRangeQuery, self).__init__(start=start, end=end, **kwargs) + + start = _genprop_str('start', doc='Lower range of term') + + end = _genprop('end', doc='Upper range of term') + + start_inclusive = _genprop_str( + bool, 'inclusive_start', doc='If :attr:`start` is inclusive') + + end_inclusive = _genprop_str( + bool, 'inclusive_end', doc='If :attr:`end` is inclusive') + + _MINMAX = 'start', 'end' + + class _CompoundQuery(Query): __metaclass__ = abc.ABCMeta From 45b9df37a8aa25fb96c8d3567d530e3b9b326e17 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 1 May 2017 17:50:01 -0700 Subject: [PATCH 092/829] PYCBC-387: Add ephemeral bucket to `bucket_create` Change-Id: I012d3e527af90eb7904234ab0d56abc291ea0cd4 Reviewed-on: http://review.couchbase.org/77574 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/admin.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/couchbase/admin.py b/couchbase/admin.py index bf6e45e4a..ba18a434e 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -163,6 +163,9 @@ def bucket_create(self, name, bucket_type='couchbase', bucket (which persists data and supports replication) or `memcached` (which is memory-only and does not support replication). + Since Couchbase version 5.0, you can also specify + `ephemeral`, which is a replicated bucket which does + not have strict disk persistence requirements :param string bucket_password: The bucket password. This can be empty to disable authentication. This can be changed later on using :meth:`bucket_update` @@ -190,7 +193,7 @@ def bucket_create(self, name, bucket_type='couchbase', 'flushEnabled': int(flush_enabled), 'ramQuotaMB': ram_quota } - if bucket_type in ('couchbase', 'membase'): + if bucket_type in ('couchbase', 'membase', 'ephemeral'): params['replicaNumber'] = replicas return self.http_request( From 2a07bf2c76e4c11faadd71649d0e548c79a45e62 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 1 May 2017 08:46:22 -0700 Subject: [PATCH 093/829] PYCBC-389: Implement user management API Change-Id: I44552cfea6f40fbc9a73aba8aaa9e4d2fb1e8302 Reviewed-on: http://review.couchbase.org/77576 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/admin.py | 65 +++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 64 insertions(+), 1 deletion(-) diff --git a/couchbase/admin.py b/couchbase/admin.py index ba18a434e..cb9657ac1 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -23,7 +23,7 @@ import couchbase._libcouchbase as LCB import couchbase.exceptions as E from couchbase.user_constants import FMT_JSON -from couchbase._pyport import ulp +from couchbase._pyport import ulp, basestring METHMAP = { 'GET': LCB.LCB_HTTP_METHOD_GET, @@ -319,3 +319,66 @@ def bucket_update(self, name, current, bucket_password=None, replicas=None, method='POST', content_type='application/x-www-form-urlencoded', content=self._mk_formstr(params)) + + def users_get(self): + """ + Retrieve a list of users from the server. + :return: :class:`~.HttpResult`. The list of users can be obtained from + the returned object's `value` property. + """ + return self.http_request(path='/settings/rbac/users', method='GET') + + def user_upsert(self, userid, password, roles, name=None): + """ + Upsert a user in the cluster + :param userid: The user ID + :param password: The user password + :param roles: A list of roles. A role can either be a simple string, + or a list of `(role, bucket)` pairs. + :param name: Human-readable name + :return: :class:`~.HttpResult` + + .. Creating a new read-only admin user:: + + adm.upsert_user('mark', 's3cr3t', ['ro_admin']) + + .. An example of using more complex roles: + + adm.upsert_user('mark', 's3cr3t', [('data_reader', '*'), + ('data_writer', 'inbox')]) + + + .. warning:: + + Due to the asynchronous nature of Couchbase management APIs, it may + take a few moments for the new user settings to take effect. + """ + tmplist = [] + for role in roles: + if isinstance(role, basestring): + tmplist.append(role) + else: + tmplist.append('{0}[{1}]'.format(*role)) + + role_string = ','.join(tmplist) + params = { + 'roles': role_string, + 'password': password + } + if name: + params['name'] = name + + form = self._mk_formstr(params) + return self.http_request(path='/settings/rbac/users/local/' + userid, + method='PUT', + content_type='application/x-www-form-urlencoded', + content=form) + + def user_remove(self, userid): + """ + Remove a user + :param userid: The user ID to remove + :return: :class:`~.HttpResult` + """ + return self.http_request(path='/settings/rbac/users/local/' + userid, + method='DELETE') From f13e362e9a5fa440f0bd161344e401cfe0d62e21 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Sat, 29 Apr 2017 15:51:34 -0700 Subject: [PATCH 094/829] Add advanced sorting functionality Change-Id: If03f7f9a8f38bcde48c732d6cb1b0d292d4994f3 Reviewed-on: http://review.couchbase.org/77575 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/fulltext.py | 112 ++++++++++++++++++++++--- couchbase/tests/cases/cbftstrings_t.py | 18 +++- 2 files changed, 116 insertions(+), 14 deletions(-) diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index c52d46012..ea85795be 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -244,6 +244,90 @@ def setdefault(self, key, value=None): return self[key] +class Sort(object): + def __init__(self, by, **kwargs): + self._json_ = { + 'by': by + } + _assign_kwargs(self, kwargs) + + descending = _genprop(bool, 'descending', doc='Sort using descending order') + + def as_encodable(self): + return self._json_ + + +class SortString(Sort): + """ + Sorts by a list of fields. This is similar to specifying a list of + fields in :attr:`Params.sort` + """ + def __init__(self, *fields): + self._json_ = list(fields) + + +class SortScore(Sort): + """ + Sorts by the score of each match. + """ + def __init__(self, **kwargs): + super(SortScore, self).__init__('score', **kwargs) + + +class SortID(Sort): + """ + Sorts lexically by the document ID of each match + """ + def __init__(self, **kwargs): + super(SortID, self).__init__('id', **kwargs) + + +class SortField(Sort): + """ + Sorts according to the properties of a given field + """ + def __init__(self, field, **kwargs): + kwargs['field'] = field + super(SortField, self).__init__('field', **kwargs) + + field = _genprop_str('field', doc='Field to sort by') + type = _genprop_str('type', doc='Coerce field to this type') + mode = _genprop_str('mode') + missing = _genprop_str('missing') + + +def _location_conv(l): + if len(l) != 2: + raise ValueError('Require list of two numbers') + return [float(l[0]), float(l[1])] + + +class SortGeoDistance(Sort): + """ + Sorts matches based on their distance from a specific location + """ + def __init__(self, location, field, **kwargs): + kwargs.update(location=location, field=field) + super(SortGeoDistance, self).__init__('geo_distance', **kwargs) + + location = _genprop(_location_conv, 'location', + doc='`(lat, lon)` of point of origin') + field = _genprop_str('field', doc='Field that contains the distance') + unit = _genprop_str('unit', doc='Distance unit used for measuring') + + +class SortRaw(Sort): + def __init__(self, raw): + self._json_ = raw + + +def _convert_sort(s): + if isinstance(s, Sort): + return s + else: + return list(s) + + class Params(object): """ Generic parameters and query modifiers. Keyword arguments may be used @@ -287,6 +371,13 @@ def as_encodable(self, index_name): } } self._json_.setdefault('ctl', {})['consistency'] = sv_val + + if self.sort: + if isinstance(self.sort, Sort): + self._json_['sort'] = self.sort.as_encodable() + else: + self._json_['sort'] = self.sort + return self._json_ limit = _genprop(int, 'size', doc='Maximum number of results to return') @@ -313,8 +404,9 @@ def as_encodable(self, index_name): """) sort = _genprop( - list, 'sort', doc=""" - Specify a list of fields by which to sort the results + _convert_sort, 'sort', doc=""" + Specify a list of fields by which to sort the results. Can also be + a :class:`Sort` class """ ) @@ -591,12 +683,6 @@ class RegexQuery(_SingleQuery): RegexpQuery = RegexQuery -def _location_conv(l): - if len(l) != 2: - raise ValueError('Require list of two numbers') - return [float(l[0]), float(l[1])] - - @_with_fields('field') class GeoDistanceQuery(Query): def __init__(self, distance, location, **kwargs): @@ -608,7 +694,7 @@ def __init__(self, distance, location, **kwargs): super(GeoDistanceQuery, self).__init__() kwargs['distance'] = distance kwargs['location'] = location - self._assign_kwargs(**kwargs) + _assign_kwargs(self, **kwargs) location = _genprop(_location_conv, 'location', doc='Location') distance = _genprop_str('distance') @@ -620,7 +706,7 @@ def __init__(self, top_left, bottom_right, **kwargs): super(GeoBoundingBoxQuery, self).__init__() kwargs['top_left'] = top_left kwargs['bottom_right'] = bottom_right - self._assign_kwargs(**kwargs) + _assign_kwargs(self, **kwargs) top_left = _genprop( _location_conv, 'top_left', @@ -742,12 +828,12 @@ def __init__(self, start=None, end=None, **kwargs): start = _genprop_str('start', doc='Lower range of term') - end = _genprop('end', doc='Upper range of term') + end = _genprop_str('end', doc='Upper range of term') - start_inclusive = _genprop_str( + start_inclusive = _genprop( bool, 'inclusive_start', doc='If :attr:`start` is inclusive') - end_inclusive = _genprop_str( + end_inclusive = _genprop( bool, 'inclusive_end', doc='If :attr:`end` is inclusive') _MINMAX = 'start', 'end' diff --git a/couchbase/tests/cases/cbftstrings_t.py b/couchbase/tests/cases/cbftstrings_t.py index fe080f940..28cab2dcc 100644 --- a/couchbase/tests/cases/cbftstrings_t.py +++ b/couchbase/tests/cases/cbftstrings_t.py @@ -101,6 +101,10 @@ def test_params(self): cbft.Params(sort=['f1', 'f2', '-_score'] ).as_encodable('ix')) + self.assertEqual({'sort': ['f1', 'f2', '-_score']}, + cbft.Params(sort=cbft.SortString( + 'f1', 'f2', '-_score')).as_encodable('ix')) + p = cbft.Params(facets={ 'term': cbft.TermFacet('somefield', limit=10), 'dr': cbft.DateFacet('datefield').add_range('name', 'start', 'end'), @@ -292,4 +296,16 @@ def test_consistency(self): } } } - self.assertEqual(exp, got) \ No newline at end of file + self.assertEqual(exp, got) + + def test_advanced_sort(self): + self.assertEqual({'by': 'score'}, cbft.SortScore().as_encodable()) + self.assertEqual({'by': 'score', 'descending': False}, + cbft.SortScore(descending=False).as_encodable()) + + self.assertEqual({'by': 'id'}, cbft.SortID().as_encodable()) + + self.assertEqual({'by': 'field', 'field': 'foo'}, + cbft.SortField('foo').as_encodable()) + self.assertEqual({'by': 'field', 'field': 'foo', 'type': 'int'}, + cbft.SortField('foo', type='int').as_encodable()) From e6533a288babfb539a3e9235b79393de2f6f4671 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Mon, 15 May 2017 11:15:40 -0700 Subject: [PATCH 095/829] PYCBC-399: Fix connstr to pass along options without a bucket Options were not properly parsed if a bucket name was not provided. Change-Id: Iaeb65ab8bdbf953e7220488db80f8cea3510b843 Reviewed-on: http://review.couchbase.org/78154 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/cluster.py | 1 + couchbase/connstr.py | 5 ++--- couchbase/tests/cases/cluster_t.py | 8 +++++++- 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/couchbase/cluster.py b/couchbase/cluster.py index 6765b6f7a..8f5a79ff7 100644 --- a/couchbase/cluster.py +++ b/couchbase/cluster.py @@ -87,6 +87,7 @@ def open_bucket(self, bucket_name, **kwargs): "PasswordAuthenticators password") else: kwargs['password'] = password + rv = self.bucket_class(str(connstr), **kwargs) self._buckets[bucket_name] = weakref.ref(rv) if isinstance(self.authenticator, ClassicAuthenticator): diff --git a/couchbase/connstr.py b/couchbase/connstr.py index e2cbf206d..e6a76ba74 100644 --- a/couchbase/connstr.py +++ b/couchbase/connstr.py @@ -86,11 +86,10 @@ def parse(cls, ss): up = urlparse(ss) path = up.path + query = up.query if '?' in path: - path, query = up.path.split('?') - else: - query = "" + path, _ = up.path.split('?') if path.startswith('/'): path = path[1:] diff --git a/couchbase/tests/cases/cluster_t.py b/couchbase/tests/cases/cluster_t.py index fc7d3c3a5..55ef840a7 100644 --- a/couchbase/tests/cases/cluster_t.py +++ b/couchbase/tests/cases/cluster_t.py @@ -72,4 +72,10 @@ def test_no_mixed_auth(self): cluster2, bucket_name = self._create_cluster() cb2 = cluster2.open_bucket(bucket_name, - password=self.cluster_info.bucket_password) \ No newline at end of file + password=self.cluster_info.bucket_password) + + def test_pathless_connstr(self): + # Not strictly a cluster test, but relevant + connstr = ConnectionString.parse('couchbase://localhost?opt1=val1&opt2=val2') + self.assertTrue('opt1' in connstr.options) + self.assertTrue('opt2' in connstr.options) \ No newline at end of file From 426c25dfc2cb924eae20fe22bb0bd9ba64568f69 Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Fri, 2 Jun 2017 13:08:14 -0700 Subject: [PATCH 096/829] PYCBC-404, PYCBC-402: GET_COUNT and other subdoc additions This adds the `get_count()` subdocument operation. It also adds support for the `get_fulldoc()` and `upsert_fulldoc()` operations which can be used to retrieve and modify the entire document, respectively. The get_fulldoc operation requires the use of a not-yet-released libcouchbase to function properly. The fix is in master. Change-Id: I75eadfb708beb49c295589f69c08ee364c9023c5 Reviewed-on: http://review.couchbase.org/78996 Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> --- couchbase/bucket.py | 28 +++++++++++ couchbase/priv_constants.py | 30 ++++++++++++ couchbase/subdocument.py | 81 ++++++++++++++++++++++++------- couchbase/tests/cases/subdoc_t.py | 46 +++++++++++++++++- src/store.c | 9 ++-- 5 files changed, 172 insertions(+), 22 deletions(-) create mode 100644 couchbase/priv_constants.py diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 8c348fef8..e1288b912 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -32,6 +32,11 @@ import couchbase.fulltext as _FTS from couchbase._pyport import basestring import couchbase.subdocument as SD +import couchbase.priv_constants as _P + + +### Private constants. This is to avoid imposing a dependency requirement +### For simple flags: def _depr(fn, usage, stacklevel=3): @@ -766,6 +771,12 @@ def mutate_in(self, key, *specs, **kwargs): :param key: The key of the document to modify :param specs: A list of specs (See :mod:`.couchbase.subdocument`) + :param bool create_doc: + Whether the document should be create if it doesn't exist + :param bool insert_doc: If the document should be created anew, and the + operations performed *only* if it does not exist. + :param bool upsert_doc: If the document should be created anew if it + does not exist. If it does exist the commands are still executed. :param kwargs: CAS, etc. :return: A :class:`~.couchbase.result.SubdocResult` object. @@ -778,8 +789,25 @@ def mutate_in(self, key, *specs, **kwargs): SD.array_addunique('tags', 'dog'), SD.counter('updates', 1)) + .. note:: + + The `insert_doc` and `upsert_doc` options are mutually exclusive. + Use `insert_doc` when you wish to create a new document with + extended attributes (xattrs). + .. seealso:: :mod:`.couchbase.subdocument` """ + + # Note we don't verify the validity of the options. lcb does that for + # us. + sdflags = kwargs.pop('_sd_doc_flags', 0) + + if kwargs.pop('insert_doc', False): + sdflags |= _P.CMDSUBDOC_F_INSERT_DOC + if kwargs.pop('upsert_doc', False): + sdflags |= _P.CMDSUBDOC_F_UPSERT_DOC + + kwargs['_sd_doc_flags'] = sdflags return super(Bucket, self).mutate_in(key, specs, **kwargs) def lookup_in(self, key, *specs, **kwargs): diff --git a/couchbase/priv_constants.py b/couchbase/priv_constants.py new file mode 100644 index 000000000..a1f1f4f46 --- /dev/null +++ b/couchbase/priv_constants.py @@ -0,0 +1,30 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# This file constains private constants. This should be used internally +# and is to avoid version/compilation dependencies on newer LCBs + +CMDSUBDOC_F_UPSERT_DOC = 1 << 16 +CMDSUBDOC_F_INSERT_DOC = 1 << 17 +CMDSUBDOC_F_ACCESS_DELETED = 1 << 18 +SDSPEC_F_MKDIR_P = 1 << 16 +SDSPEC_F_XATTR = 1 << 18 +SDSPEC_F_EXPANDMACROS = 1 << 19 + +SDCMD_GET_COUNT = 12 +SDCMD_GET_FULLDOC = 13 +SDCMD_UPSERT_FULLDOC = 14 \ No newline at end of file diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index b1b7b9f70..ddc90e956 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -5,14 +5,11 @@ LCB_SDCMD_COUNTER, LCB_SDCMD_REMOVE, LCB_SDCMD_ARRAY_INSERT ) +import couchbase.priv_constants as _P + # Defined in libcouchbase's headers. We don't want to force a hard dependency # on a later version of libcouchbase just for these flags, especially # considering that most users will only be using _F_MKDIR_P -_F_MKDIR_P = 1 << 16 -_F_MKDOC = 1 << 17 -_F_XATTR = 1 << 18 -_F_EXPAND_MACROS = 1 << 19 -_F_ACCESS_DELETED = 1 << 20 _SPECMAP = {} for k, v in tuple(globals().items()): @@ -34,7 +31,7 @@ def __repr__(self): ', '.join(details)) -def _gen_3spec(op, path, xattr=False, _access_deleted=False): +def _gen_3spec(op, path, xattr=False): """ Returns a Spec tuple suitable for passing to the underlying C extension. This variant is called for operations that lack an input value. @@ -45,15 +42,12 @@ def _gen_3spec(op, path, xattr=False, _access_deleted=False): """ flags = 0 if xattr: - flags |= _F_XATTR - if _access_deleted: - flags |= _F_ACCESS_DELETED + flags |= _P.SDSPEC_F_XATTR return Spec(op, path, flags) def _gen_4spec(op, path, value, - create_path=False, create_doc=False, xattr=False, - _expand_macros=False, _access_deleted=False): + create_path=False, xattr=False, _expand_macros=False): """ Like `_gen_3spec`, but also accepts a mandatory value as its third argument :param bool _expand_macros: Whether macros in the value should be expanded. @@ -61,15 +55,11 @@ def _gen_4spec(op, path, value, """ flags = 0 if create_path: - flags |= _F_MKDIR_P - if create_doc: - flags |= _F_MKDOC + flags |= _P.SDSPEC_F_MKDIR_P if xattr: - flags |= _F_XATTR + flags |= _P.SDSPEC_F_XATTR if _expand_macros: - flags |= _F_EXPAND_MACROS - if _access_deleted: - flags |= _F_ACCESS_DELETED + flags |= _P.SDSPEC_F_EXPANDMACROS return Spec(op, path, flags, value) @@ -286,3 +276,58 @@ def remove(path, **kwargs): :param path: The path to remove """ return _gen_3spec(LCB_SDCMD_REMOVE, path, **kwargs) + + +def get_count(path, **kwargs): + """ + Return the number of items in an dictionary or array + :param path: Path to the dictionary or array to count + + This operation is only valid in :cb_bmeth:`lookup_in` + + .. versionadded:: 2.2.5 + """ + return _gen_3spec(_P.SDCMD_GET_COUNT, path, **kwargs) + + +def get_fulldoc(**kwargs): + """ + Use this command to retrieve the entire document via subdoc. + This command should be used in conjunction with another :meth:`get` + command which may retrieve one of the document's XATTRs. + + Example:: + + result = cb.lookup_in('docid', SD.get_fulldoc(), + SD.get('mystuff', xattr=True) + + whole_doc = result[0] + mystuff = result[1] + + + .. versionadded:: 2.2.5 + """ + return _gen_3spec(_P.SDCMD_GET_FULLDOC, '', **kwargs) + + +def upsert_fulldoc(doc, **kwargs): + """ + Use this command to write the whole document. + :param doc: The document to upsert + + This command should be used in conjunction with another :meth:`upsert` + (or another mutation command) to write a document's XATTRs. + + Example:: + + cb.mutate_in('docid', + SD.upsert_fulldoc({'name': 'Mark'}), + SD.upsert('mymeta._doctype', 'user', + xattr=True, create_parents=True)) + + .. versionadded:: 2.2.5 + + You can use the `insert_doc` and `upsert_doc` options to further refine + conditions for the operation. See :cb_bmeth:`mutate_in` + """ + return _gen_4spec(_P.SDCMD_UPSERT_FULLDOC, '', doc, **kwargs) \ No newline at end of file diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index e9b632ae9..0342b042a 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -267,4 +267,48 @@ def test_access_ok(self): cb.upsert(key, {'hello': 'world'}) rv = cb.lookup_in(key, SD.get('nonexist')) - self.assertTrue(rv.access_ok) \ No newline at end of file + self.assertTrue(rv.access_ok) + + def test_get_count(self): + cb = self.cb + key = self.gen_key('get_count') + + cb.upsert(key, [1, 2, 3]) + self.assertEqual(3, cb.lookup_in(key, SD.get_count(''))[0]) + + cb.upsert(key, {'k1': 1, 'k2': 2, 'k3': 3}) + self.assertEqual(3, cb.lookup_in(key, SD.get_count(''))[0]) + + def test_create_doc(self): + cb = self.cb + key = self.gen_key('create_doc') + cb.mutate_in(key, SD.upsert('new.path', 'newval'), upsert_doc=True) + self.assertEqual('newval', cb.retrieve_in(key, 'new.path')[0]) + + # Check 'insert_doc' + self.assertRaises(E.KeyExistsError, cb.mutate_in, + key, SD.upsert('new.path', 'newval'), insert_doc=True) + + cb.remove(key) + cb.mutate_in(key, SD.upsert('new.path', 'newval'), insert_doc=True) + self.assertEqual('newval', cb.retrieve_in(key, 'new.path')[0]) + + def test_fulldoc(self): + cb = self.cb + key = self.gen_key('fulldoc') + cb.mutate_in(key, + SD.upsert_fulldoc({'val': True}), + SD.upsert('my.xattr', 'attrval', + create_parents=True, xattr=True), + insert_doc=True) + self.assertEqual(True, cb.retrieve_in(key, 'val')[0]) + + self.assertEqual('attrval', + cb.lookup_in(key, SD.get('my.xattr', xattr=True))[0]) + + rv = cb.lookup_in(key, SD.get('my.xattr')) + self.assertFalse(rv.exists(0)) + + # Get the document back + rv = cb.lookup_in(key, SD.get_fulldoc()) + self.assertEqual(True, rv[0]['val']) \ No newline at end of file diff --git a/src/store.c b/src/store.c index 77feddfb6..0ac83e694 100644 --- a/src/store.c +++ b/src/store.c @@ -19,6 +19,7 @@ struct storecmd_vars { int operation; int argopts; + unsigned int sd_doc_flags; unsigned long ttl; PyObject *flagsobj; lcb_U64 single_cas; @@ -198,6 +199,7 @@ handle_multi_mutate(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype cmd.cas = scv->single_cas; cmd.exptime = scv->ttl; + cmd.cmdflags |= scv->sd_doc_flags; LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); rv = pycbc_sd_handle_speclist(self, cv->mres, curkey, curvalue, &cmd); PYCBC_PYBUF_RELEASE(&keybuf); @@ -259,7 +261,7 @@ set_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, static char *kwlist_single[] = { "key", "value", "cas", "ttl", "format", - "persist_to", "replicate_to", + "persist_to", "replicate_to", "_sd_doc_flags", NULL }; @@ -273,10 +275,11 @@ set_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, &persist_to, &replicate_to); } else { - rv = PyArg_ParseTupleAndKeywords(args, kwargs, "OO|KOOBB", kwlist_single, + rv = PyArg_ParseTupleAndKeywords(args, kwargs, "OO|KOOBBI", kwlist_single, &key, &value, &scv.single_cas, &ttl_O, &scv.flagsobj, - &persist_to, &replicate_to); + &persist_to, &replicate_to, + &scv.sd_doc_flags); } if (!rv) { From afcfb29756e2cedce84b258c336feffad7a2fc63 Mon Sep 17 00:00:00 2001 From: Heungsub Lee <sub@subl.ee> Date: Thu, 1 Jun 2017 15:54:13 +0900 Subject: [PATCH 097/829] GEventIOEvent, GEventTimer calls super constructor To make it work with couchbase_ffi, GEventTimer should have _cdata attribute which is initialized by couchbase_ffi.iops.Event.__init__(). But GEventTimer didn't call its super constructor. Change-Id: I3ceb7228471501de5e891cc82458de9d29dc896e Reviewed-on: http://review.couchbase.org/78906 Reviewed-by: Mark Nunberg <mark.nunberg@couchbase.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- gcouchbase/iops_gevent10.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/gcouchbase/iops_gevent10.py b/gcouchbase/iops_gevent10.py index 7e958a60f..b1f907c0d 100644 --- a/gcouchbase/iops_gevent10.py +++ b/gcouchbase/iops_gevent10.py @@ -28,6 +28,7 @@ class GEventIOEvent(IOEvent): def __init__(self): + super(GEventIOEvent, self).__init__() self.ev = get_hub().loop.io(0,0) def ready_proxy(self, event): @@ -42,6 +43,7 @@ def watch(self, events): class GEventTimer(TimerEvent): def __init__(self): + super(GEventTimer, self).__init__() self.ev = get_hub().loop.timer(0) def ready_proxy(self, *args): From 25eccd7902d191004a32b5d6472f6e5c9e33c28f Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Tue, 13 Jun 2017 16:32:32 -0700 Subject: [PATCH 098/829] PYCBC-408: Fix sending duplicate query Sometimes a duplicate query is sent, causing duplicate insert statements. Change-Id: Ib8a45eef47cf619a15a74fc2716cda15aa889492 Reviewed-on: http://review.couchbase.org/79438 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mark Nunberg <mark.nunberg@couchbase.com> --- couchbase/n1ql.py | 1 - 1 file changed, 1 deletion(-) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index f19b0f691..df127e49e 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -297,7 +297,6 @@ def _start(self): if self._mres: return - self._mres = self._submit_query() self._mres = self._submit_query() self.__raw = self._mres[None] From 7b6cd33d4a743ab57e623a40d3af9bbcc4e2e2ba Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Fri, 30 Jun 2017 13:10:15 +0100 Subject: [PATCH 099/829] PYCBC-410: Update user management API - append "/local" to users_get URL - add user_get method Change-Id: Id345e7ca7d701d2b9ce8ed9247bb949c9b8f5576 Reviewed-on: http://review.couchbase.org/80186 Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> --- couchbase/admin.py | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/couchbase/admin.py b/couchbase/admin.py index cb9657ac1..bdc15ed53 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -326,7 +326,18 @@ def users_get(self): :return: :class:`~.HttpResult`. The list of users can be obtained from the returned object's `value` property. """ - return self.http_request(path='/settings/rbac/users', method='GET') + return self.http_request(path='/settings/rbac/users/local', + method='GET') + + def user_get(self, userid): + """ + Retrieve a user from the server + :param userid: The user ID + :return: :class:`~.HttpResult`. The user can be obtained from the + returned object's `value` property. + """ + return self.http_request(path='/settings/rbac/users/local/' + userid, + method='GET') def user_upsert(self, userid, password, roles, name=None): """ From 3cfc8ea089aa8af7c8365b106f2e0810eb38dd74 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Fri, 7 Jul 2017 09:37:46 +0100 Subject: [PATCH 100/829] PYCBC-415: add .idea folder to git ignore Change-Id: I54c567a7ffc64407ed6c0aa7a2f4d19be1636b95 Reviewed-on: http://review.couchbase.org/80362 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 6f25fb607..4e71aa0c8 100644 --- a/.gitignore +++ b/.gitignore @@ -30,3 +30,4 @@ buildall.py *egg-info* _trial* *.dSYM +.idea/ From fdc900db1e37020e877d01b563fb9962e2139d2e Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Fri, 7 Jul 2017 17:40:21 +0100 Subject: [PATCH 101/829] PYCBC-414: Make cluster manager compatible with Spock MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - try and read connection_string parameter, if doesn’t exist, build using host & port - add bucket parameter to Admin with default, append to connection_string - pass connection_string to Admin when creating manager from cluster Change-Id: I3d2c161078a1adb16f1cb8dfb0fc2c6e94d69de8 Reviewed-on: http://review.couchbase.org/80382 Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> --- .gitignore | 1 + .travis.yml | 4 ++-- couchbase/admin.py | 9 ++++++++- couchbase/cluster.py | 3 ++- couchbase/tests/cases/admin_t.py | 18 ++++++++++++++++++ 5 files changed, 31 insertions(+), 4 deletions(-) diff --git a/.gitignore b/.gitignore index 4e71aa0c8..d5ba5ab3a 100644 --- a/.gitignore +++ b/.gitignore @@ -31,3 +31,4 @@ buildall.py _trial* *.dSYM .idea/ +CouchbaseMock.jar diff --git a/.travis.yml b/.travis.yml index 12f86212e..183a3b105 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,7 +15,7 @@ addons: - couchbase-precise packages: - libcouchbase-dev - - libcouchbase2-libevent + - libcouchbase2-libevent install: - pip -q install testresources==0.2.7 @@ -37,4 +37,4 @@ script: notifications: email: - - mark.nunberg@couchbase.com + - mike.goldsmith@couchbase.com diff --git a/couchbase/admin.py b/couchbase/admin.py index bdc15ed53..8a1744162 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -74,10 +74,17 @@ def __init__(self, username, password, host='localhost', port=8091, :return: an instance of :class:`Admin` """ + connection_string = kwargs.pop('connection_string', None) + if not connection_string: + connection_string = "http://{0}:{1}".format(host, port) + + bucket = kwargs.pop('bucket', 'default') + connection_string += "/{0}".format(bucket) + kwargs.update({ 'username': username, 'password': password, - 'connection_string': "http://{0}:{1}".format(host, port), + 'connection_string': connection_string, '_conntype': LCB.LCB_TYPE_CLUSTER }) super(Admin, self).__init__(**kwargs) diff --git a/couchbase/cluster.py b/couchbase/cluster.py index 8f5a79ff7..352f1f3fc 100644 --- a/couchbase/cluster.py +++ b/couchbase/cluster.py @@ -102,7 +102,8 @@ def cluster_manager(self): used to create and manage buckets in the cluster. """ username, password = self.authenticator.get_credentials() - return Admin(username, password) + connection_string = str(self.connstr) + return Admin(username, password, connection_string=connection_string) def n1ql_query(self, query, *args, **kwargs): """ diff --git a/couchbase/tests/cases/admin_t.py b/couchbase/tests/cases/admin_t.py index 94a2a595f..1d1ba9e8e 100644 --- a/couchbase/tests/cases/admin_t.py +++ b/couchbase/tests/cases/admin_t.py @@ -26,6 +26,7 @@ CouchbaseNetworkError, HTTPError) from couchbase.tests.base import CouchbaseTestCase, SkipTest + class AdminSimpleTest(CouchbaseTestCase): def setUp(self): super(AdminSimpleTest, self).setUp() @@ -47,6 +48,23 @@ def test_http_request(self): self.assertEqual(htres.url, 'pools/') self.assertTrue(htres.success) + def test_connection_string_param(self): + + conn_str = 'http://{0}:{1}'.format(self.cluster_info.host, self.cluster_info.port) + admin = Admin('Administrator', + 'password', + connection_string=conn_str) + self.assertIsNotNone(admin) + + def test_bucket_param(self): + + admin = Admin('Administrator', + 'password', + host=self.cluster_info.host, + port=self.cluster_info.port, + bucket='default') + self.assertIsNotNone(admin) + def test_bad_request(self): self.assertRaises(HTTPError, self.admin.http_request, '/badpath') From 924f15526f8786dba8412a9f5d10b6c63c6bd0ac Mon Sep 17 00:00:00 2001 From: Mark Nunberg <mnunberg@haskalah.org> Date: Sun, 4 Jun 2017 23:17:09 -0700 Subject: [PATCH 102/829] Remove travis icon The builds often 'fail'. While these failures are not true failures (they mainly have to do with timing, libcouchbase dependencies and so on) it's not very useful to have a blaring red icon there all the time. Change-Id: I465e7cc04f92df4b4c91190f0c0283d119ca13e8 Reviewed-on: http://review.couchbase.org/79033 Reviewed-by: Subhashni Balakrishnan <b.subhashni@gmail.com> Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> --- README.rst | 4 ---- 1 file changed, 4 deletions(-) diff --git a/README.rst b/README.rst index f464dc34e..539e9704a 100644 --- a/README.rst +++ b/README.rst @@ -4,10 +4,6 @@ Couchbase Python Client Client for Couchbase_. -.. image:: https://travis-ci.org/couchbase/couchbase-python-client.png - :target: https://travis-ci.org/couchbase/couchbase-python-client - - .. note:: This is the documentation for the 2.x version of the client. This is From 1f437384fa4ab784ca61b4e94a47207ebe3237c8 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Mon, 17 Jul 2017 16:29:17 +0100 Subject: [PATCH 103/829] PYCBC-416: Update bucket type parameter when creating bucket Change-Id: I7728f2b85a0dfbcfe5d9477cdd4d40ef36d6a91c Reviewed-on: http://review.couchbase.org/81012 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Matt Ingenthron <matt@couchbase.com> --- couchbase/admin.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchbase/admin.py b/couchbase/admin.py index 8a1744162..085ba8f54 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -194,7 +194,7 @@ def bucket_create(self, name, bucket_type='couchbase', """ params = { 'name': name, - 'type': bucket_type, + 'bucketType': bucket_type, 'authType': 'sasl', 'saslPassword': bucket_password if bucket_password else '', 'flushEnabled': int(flush_enabled), From 238dee383b080c558e6dd720ab39e5eb1b191e59 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Thu, 20 Jul 2017 17:27:55 +0100 Subject: [PATCH 104/829] PYCBC-385: Add test to test enabling error map MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add test to prove it’s possible to enable error map. Invalid connection string args cause an error when connecting. Change-Id: I04863cc22e002e5abb0c6e701de3aaea91077b77 Reviewed-on: http://review.couchbase.org/81182 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- couchbase/tests/cases/connection_t.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/couchbase/tests/cases/connection_t.py b/couchbase/tests/cases/connection_t.py index b302f3146..fe1ee0816 100644 --- a/couchbase/tests/cases/connection_t.py +++ b/couchbase/tests/cases/connection_t.py @@ -114,5 +114,11 @@ def test_multi_hosts(self): cb = self.factory(str(cs)) self.assertTrue(cb.upsert("foo", "bar").success) + def test_enable_error_map(self): + # enabled via connection string param, invalid params cause error + conn_str = 'http://{0}:{1}?enable_errmap=true'.format(self.cluster_info.host, self.cluster_info.port) + cb = self.factory(conn_str) + self.assertTrue(cb.upsert("foo", "bar").success) + if __name__ == '__main__': unittest.main() From 4a939caaa12acb6fad6c8b68519c266354c44ff8 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Thu, 20 Jul 2017 17:56:36 +0100 Subject: [PATCH 105/829] PYCBC-373: Verify can work with ephemeral buckets Change-Id: I8f3143cbca07fa0de5af56cfcdf666319f3792e7 Reviewed-on: http://review.couchbase.org/81183 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- couchbase/tests/cases/admin_t.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) diff --git a/couchbase/tests/cases/admin_t.py b/couchbase/tests/cases/admin_t.py index 1d1ba9e8e..194cde9e3 100644 --- a/couchbase/tests/cases/admin_t.py +++ b/couchbase/tests/cases/admin_t.py @@ -19,6 +19,7 @@ import os from couchbase.admin import Admin +from couchbase.bucket import Bucket from couchbase.result import HttpResult from couchbase.connstr import ConnectionString from couchbase.exceptions import ( @@ -143,3 +144,28 @@ def test_actions(self): # Remove the bucket self.admin.bucket_remove('dummy') self.assertRaises(CouchbaseError, self.factory, connstr) + + def test_create_ephemeral_bucket_and_use(self): + bucket_name = 'ephemeral' + password = 'letmein' + + # create ephemeral test bucket + self.admin.bucket_create(name=bucket_name, + bucket_type='ephemeral', + ram_quota=100, + bucket_password=password) + self.admin.wait_ready(bucket_name, timeout=10) + + # connect to bucket to ensure we can use it + conn_str = "http://{0}:{1}/{2}".format(self.cluster_info.host, self.cluster_info.port, bucket_name) + bucket = Bucket(connection_string=conn_str, password=password) + self.assertIsNotNone(bucket) + + # create a doc then read it back + key = 'mike' + doc = {'name': 'mike'} + bucket.upsert(key, doc) + result = bucket.get(key) + + # original and result should be the same + self.assertEqual(doc, result.value) From 32377262484f26acb0b3163b329ff52776e3174e Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Mon, 31 Jul 2017 12:05:03 +0100 Subject: [PATCH 106/829] PYCBC-410: Add AuthDomain to user management API Change-Id: Id05d023dada7b3de26eb793a65ddfb1783b2020a Reviewed-on: http://review.couchbase.org/81594 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Matt Carabine <matt.carabine@couchbase.com> Reviewed-by: Matt Ingenthron <matt@couchbase.com> --- couchbase/admin.py | 53 +++++++++++++++++++++++++------- couchbase/auth_domain.py | 25 +++++++++++++++ couchbase/tests/cases/admin_t.py | 47 ++++++++++++++++++++++++++++ 3 files changed, 114 insertions(+), 11 deletions(-) create mode 100644 couchbase/auth_domain.py diff --git a/couchbase/admin.py b/couchbase/admin.py index 085ba8f54..46c1eda41 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -24,6 +24,7 @@ import couchbase.exceptions as E from couchbase.user_constants import FMT_JSON from couchbase._pyport import ulp, basestring +from couchbase.auth_domain import AuthDomain METHMAP = { 'GET': LCB.LCB_HTTP_METHOD_GET, @@ -327,42 +328,67 @@ def bucket_update(self, name, current, bucket_password=None, replicas=None, content_type='application/x-www-form-urlencoded', content=self._mk_formstr(params)) - def users_get(self): + @staticmethod + def _get_management_path(auth_domain, userid=None): + + if auth_domain == AuthDomain.Local: + domain = 'local' + elif auth_domain == AuthDomain.External: + domain = 'external' + else: + raise E.ArgumentError.pyexc("Unknown Authentication Domain", auth_domain) + + path = '/settings/rbac/users/{0}'.format(domain) + if userid is not None: + path = '{0}/{1}'.format(path, userid) + + return path + + def users_get(self, domain): """ Retrieve a list of users from the server. + :param AuthDomain domain: The authentication domain to retrieve users from. :return: :class:`~.HttpResult`. The list of users can be obtained from the returned object's `value` property. """ - return self.http_request(path='/settings/rbac/users/local', + path = self._get_management_path(domain) + return self.http_request(path=path, method='GET') - def user_get(self, userid): + def user_get(self, domain, userid): """ Retrieve a user from the server - :param userid: The user ID + :param AuthDomain domain: The authentication domain for the user. + :param userid: The user ID. + :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not + exist. :return: :class:`~.HttpResult`. The user can be obtained from the returned object's `value` property. """ - return self.http_request(path='/settings/rbac/users/local/' + userid, + path = self._get_management_path(domain, userid) + return self.http_request(path=path, method='GET') - def user_upsert(self, userid, password, roles, name=None): + def user_upsert(self, domain, userid, password, roles, name=None): """ Upsert a user in the cluster + :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password :param roles: A list of roles. A role can either be a simple string, or a list of `(role, bucket)` pairs. :param name: Human-readable name + :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` .. Creating a new read-only admin user:: - adm.upsert_user('mark', 's3cr3t', ['ro_admin']) + adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) .. An example of using more complex roles: - adm.upsert_user('mark', 's3cr3t', [('data_reader', '*'), + adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', + [('data_reader', '*'), ('data_writer', 'inbox')]) @@ -387,16 +413,21 @@ def user_upsert(self, userid, password, roles, name=None): params['name'] = name form = self._mk_formstr(params) - return self.http_request(path='/settings/rbac/users/local/' + userid, + path = self._get_management_path(domain, userid) + return self.http_request(path=path, method='PUT', content_type='application/x-www-form-urlencoded', content=form) - def user_remove(self, userid): + def user_remove(self, domain, userid): """ Remove a user + :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID to remove + :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not + exist. :return: :class:`~.HttpResult` """ - return self.http_request(path='/settings/rbac/users/local/' + userid, + path = self._get_management_path(domain, userid) + return self.http_request(path=path, method='DELETE') diff --git a/couchbase/auth_domain.py b/couchbase/auth_domain.py new file mode 100644 index 000000000..ac98b5a8c --- /dev/null +++ b/couchbase/auth_domain.py @@ -0,0 +1,25 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +class AuthDomain(object): + """ + The Authentication domain for a user. + Local: Users managed by Couchbase Server. + External: Users managed by an external resource, eg LDAP. + """ + Local, External = range(2) diff --git a/couchbase/tests/cases/admin_t.py b/couchbase/tests/cases/admin_t.py index 194cde9e3..e649bc39a 100644 --- a/couchbase/tests/cases/admin_t.py +++ b/couchbase/tests/cases/admin_t.py @@ -26,6 +26,7 @@ ArgumentError, AuthError, CouchbaseError, CouchbaseNetworkError, HTTPError) from couchbase.tests.base import CouchbaseTestCase, SkipTest +from couchbase.auth_domain import AuthDomain class AdminSimpleTest(CouchbaseTestCase): @@ -169,3 +170,49 @@ def test_create_ephemeral_bucket_and_use(self): # original and result should be the same self.assertEqual(doc, result.value) + + def test_build_user_management_path(self): + + path = self.admin._get_management_path(AuthDomain.Local) + self.assertEqual('/settings/rbac/users/local', path) + + path = self.admin._get_management_path(AuthDomain.Local, 'user') + self.assertEqual('/settings/rbac/users/local/user', path) + + path = self.admin._get_management_path(AuthDomain.Local) + self.assertEqual('/settings/rbac/users/local', path) + + path = self.admin._get_management_path(AuthDomain.Local, 'user') + self.assertEqual('/settings/rbac/users/local/user', path) + + def test_create_list_get_remove_internal_user(self): + + userid = 'custom-user' + password = 's3cr3t' + roles = [('data_reader', 'default'), ('data_writer', 'default')] + + # add user + self.admin.user_upsert(AuthDomain.Local, userid, password, roles) + + # get all users + users = self.admin.users_get(AuthDomain.Local) + self.assertIsNotNone(users) + + # get single user + user = self.admin.user_get(AuthDomain.Local, userid) + self.assertIsNotNone(user) + + # remove user + self.admin.user_remove(AuthDomain.Local, userid) + + def test_invalid_domain_raises_argument_error(self): + + userid = 'custom-user' + password = 's3cr3t' + roles = [('data_reader', 'default'), ('data_writer', 'default')] + + # invalid domain generates argument error + self.assertRaises(ArgumentError, self.admin.users_get, None) + self.assertRaises(ArgumentError, self.admin.user_get, None, userid) + self.assertRaises(ArgumentError, self.admin.user_upsert, None, userid, password, roles) + self.assertRaises(ArgumentError, self.admin.user_remove, None, userid) From 5ce8c34ae4c22afc82981ed5d5d17e18de3ec11a Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Tue, 1 Aug 2017 11:15:42 +0100 Subject: [PATCH 107/829] PYCBC-418: Add user management API aliases Adds verb-object aliases for user managment API methods Change-Id: Iced778f5f72623c6afe560554e08af169719f608 Reviewed-on: http://review.couchbase.org/78847 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Matt Carabine <matt.carabine@couchbase.com> Reviewed-by: Matt Ingenthron <matt@couchbase.com> --- couchbase/admin.py | 7 +++++++ couchbase/tests/cases/admin_t.py | 20 ++++++++++++++++++++ 2 files changed, 27 insertions(+) diff --git a/couchbase/admin.py b/couchbase/admin.py index 46c1eda41..f3d310690 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -431,3 +431,10 @@ def user_remove(self, domain, userid): path = self._get_management_path(domain, userid) return self.http_request(path=path, method='DELETE') + + # Add aliases to match RFC + # Python SDK so far has used object-verb where RFC uses verb-object + get_user = user_get + get_users = users_get + upsert_user = user_upsert + remove_user = user_remove diff --git a/couchbase/tests/cases/admin_t.py b/couchbase/tests/cases/admin_t.py index e649bc39a..f63f41ae8 100644 --- a/couchbase/tests/cases/admin_t.py +++ b/couchbase/tests/cases/admin_t.py @@ -216,3 +216,23 @@ def test_invalid_domain_raises_argument_error(self): self.assertRaises(ArgumentError, self.admin.user_get, None, userid) self.assertRaises(ArgumentError, self.admin.user_upsert, None, userid, password, roles) self.assertRaises(ArgumentError, self.admin.user_remove, None, userid) + + def test_user_api_aliases(self): + + userid = 'custom-user' + password = 's3cr3t' + roles = [('data_reader', 'default'), ('data_writer', 'default')] + + # add user + self.admin.upsert_user(AuthDomain.Local, userid, password, roles) + + # get all users + users = self.admin.get_users(AuthDomain.Local) + self.assertIsNotNone(users) + + # get single user + user = self.admin.get_user(AuthDomain.Local, userid) + self.assertIsNotNone(user) + + # remove user + self.admin.remove_user(AuthDomain.Local, userid) From 3a38e7cb1f715c238fcda7c87275a3702fb98dea Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Tue, 22 Aug 2017 16:18:10 +0100 Subject: [PATCH 108/829] PYCBC-422: update author email Change-Id: Ide7f2cda92ebc543e63a2a20666d8398c9829770 Reviewed-on: http://review.couchbase.org/82574 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 4a34e710e..39032b959 100644 --- a/setup.py +++ b/setup.py @@ -107,7 +107,7 @@ version = pkgversion, url="https://github.com/couchbase/couchbase-python-client", author="Couchbase, Inc.", - author_email="mark.nunberg@couchbase.com", + author_email="mike.goldsmith@couchbase.com", license="Apache License 2.0", description="Python Client for Couchbase", long_description=open("README.rst", "r").read(), From 91e73b4c0f3dde1a27ced026a278cb50c50cdce4 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Wed, 6 Sep 2017 12:38:53 +0100 Subject: [PATCH 109/829] PYCBC-428: Update README to describe server auth Add sections for authenticating with bucket passwords (CB server <5.0) and RBAC (CB server >=5.0) Change-Id: Ia65a871a1b3949d14b90c43952f0f4d857eff995 Reviewed-on: http://review.couchbase.org/83136 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- README.rst | 35 ++++++++++++++++++++++++++--------- 1 file changed, 26 insertions(+), 9 deletions(-) diff --git a/README.rst b/README.rst index 539e9704a..3e82bb0e1 100644 --- a/README.rst +++ b/README.rst @@ -84,17 +84,36 @@ Installing Using ----- +Authentication is handled differently depending on what version of Couchbase Server +you are using: + +Couchbase Server < 5.0 +Each bucket can optionally have a password. You may omit the authenticator if you +are only working with password-less buckets. + +.. code-block:: pycon + >>> from couchbase.cluster import Cluster, ClassicAuthenticator + >>> cluster = Cluster('couchbase://localhost') + >>> cluster.authenticate(ClassicAuthenticator(buckets={'bucket-name': 'password'})) + >>> bucket = cluster.open_bucket('bucket-name') + +Couchbase Server >= 5.0 +Role-Based Access Control (RBAC) provides discrete username and passwords for an +application that allow fine-grained control. The authenticator is always required. + +.. code-block:: pycon + >>> from couchbase.cluster import Cluster, PasswordAuthenticator + >>> cluster = Cluster('couchbase://localhost') + >>> cluster.authenticate(PasswordAuthenticator('username', 'password')) + >>> bucket = cluster.open_bucket('bucket-name') + Here's an example code snippet which sets a key and then reads it .. code-block:: pycon - >>> from couchbase.bucket import Bucket - >>> c = Bucket('couchbase://localhost/default') - >>> c - <couchbase.bucket.Bucket bucket=default, nodes=['localhost:8091'] at 0x105991cd0> - >>> c.upsert("key", "value") + >>> bucket.upsert("key", "value") OperationResult<RC=0x0, Key=key, CAS=0x31c0e3f3fc4b0000> - >>> res = c.get("key") + >>> res = bucket.get("key") >>> res ValueResult<RC=0x0, Key=key, Value=u'value', CAS=0x31c0e3f3fc4b0000, Flags=0x0> >>> res.value @@ -105,9 +124,7 @@ You can also use views .. code-block:: pycon - >>> from couchbase.bucket import Bucket - >>> c = Bucket('couchbase://localhost/beer-sample') - >>> resultset = c.query("beer", "brewery_beers", limit=5) + >>> resultset = bucket.query("beer", "brewery_beers", limit=5) >>> resultset View<Design=beer, View=brewery_beers, Query=Query:'limit=5', Rows Fetched=0> >>> for row in resultset: print row.key From c1cf537a908bba14a20af7d00642ce8a2a0d59f8 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Wed, 6 Sep 2017 11:05:46 +0100 Subject: [PATCH 110/829] PYCBC-420: Add new N1QL parameters Change-Id: I286d2a11b1c9432ffda18d541911a7ed1930e359 Reviewed-on: http://review.couchbase.org/83131 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- couchbase/n1ql.py | 68 ++++++++++++++++++++++++++ couchbase/tests/cases/n1qlstrings_t.py | 30 +++++++++++- 2 files changed, 97 insertions(+), 1 deletion(-) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index df127e49e..178b91b7d 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -248,6 +248,74 @@ def encoded(self): """ return json.dumps(self._body) + @property + def scan_cap(self): + """ + Maximum buffered channel size between the indexer client and the query + service for index scans. This parameter controls when to use scan + backfill. Use 0 or a negative number to disable. + + Available from Couchbase Server 5.0 + """ + value = self._body.get('scan_cap', '0') + return int(value) + + @scan_cap.setter + def scan_cap(self, value): + self._body['scan_cap'] = str(value) + + @property + def pipeline_batch(self): + """ + Controls the number of items execution operators can batch for Fetch + from the KV. + + Available from Couchbase Server 5.0 + """ + value = self._body.get('pipeline_batch', '0') + return int(value) + + @pipeline_batch.setter + def pipeline_batch(self, value): + self._body['pipeline_batch'] = str(value) + + @property + def pipeline_cap(self): + """ + Maximum number of items each execution operator can buffer between + various operators. + + Available from Couchbase Server 5.0 + """ + value = self._body.get('pipeline_cap', '0') + return int(value) + + @pipeline_cap.setter + def pipeline_cap(self, value): + self._body['pipeline_cap'] = str(value) + + @property + def readonly(self): + """ + Controls whether a query can change a resulting recordset. + If readonly is true, then the following statements are not allowed: + CREATE INDEX + DROP INDEX + INSERT + MERGE + UPDATE + UPSERT + DELETE + + Available from Couchbase Server 5.0 + """ + value = self._body.get('readonly', False) + return value + + @readonly.setter + def readonly(self, value): + self._body['readonly'] = value + def __repr__(self): return ('<{cls} stmt={stmt} at {oid}>'.format( cls=self.__class__.__name__, diff --git a/couchbase/tests/cases/n1qlstrings_t.py b/couchbase/tests/cases/n1qlstrings_t.py index 458ddb3eb..69a95dd6c 100644 --- a/couchbase/tests/cases/n1qlstrings_t.py +++ b/couchbase/tests/cases/n1qlstrings_t.py @@ -112,4 +112,32 @@ def setfn(): # Unset the timeout q.timeout = 0 - self.assertFalse('timeout' in q._body) \ No newline at end of file + self.assertFalse('timeout' in q._body) + + def test_scan_cap(self): + q = N1QLQuery('SELECT foo') + q.scan_cap = 10 + + self.assertEqual('10', q._body['scan_cap']) + self.assertEqual(10, q.scan_cap) + + def test_pipeline_batch(self): + q = N1QLQuery('SELECT foo') + q.pipeline_batch = 10 + + self.assertEqual('10', q._body['pipeline_batch']) + self.assertEqual(10, q.pipeline_batch) + + def test_pipeline_cap(self): + q = N1QLQuery('SELECT foo') + q.pipeline_cap = 10 + + self.assertEqual('10', q._body['pipeline_cap']) + self.assertEqual(10, q.pipeline_cap) + + def test_readonly(self): + q = N1QLQuery('SELECT foo') + q.readonly = True + + self.assertEqual(True, q._body['readonly']) + self.assertEqual(True, q.readonly) From 5faeb4431904aa924cbe52caa1b06ea69ead1f72 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Wed, 6 Sep 2017 13:13:12 +0100 Subject: [PATCH 111/829] PYCBC-433: Add Cluster.Authenticate('username', 'password') shortcut Change-Id: I4924104eab7a86596ec5bfa0c6fb17ac14c65f2b Reviewed-on: http://review.couchbase.org/83138 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- couchbase/cluster.py | 11 ++++++++++- couchbase/tests/cases/cluster_t.py | 20 +++++++++++++++++++- 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/couchbase/cluster.py b/couchbase/cluster.py index 352f1f3fc..bbe716608 100644 --- a/couchbase/cluster.py +++ b/couchbase/cluster.py @@ -55,12 +55,21 @@ def __init__(self, connection_string='couchbase://localhost', raise ValueError('username must be specified in the authenticator, ' 'not the connection string') - def authenticate(self, authenticator): + def authenticate(self, authenticator=None, username=None, password=None): """ Set the type of authenticator to use when opening buckets or performing cluster management operations :param authenticator: The new authenticator to use + :param username: The username to authenticate with + :param password: The password to authenticate with """ + if authenticator is None: + if not username: + raise ValueError('username must not be empty.') + if not password: + raise ValueError('password must not be empty.') + authenticator = PasswordAuthenticator(username, password) + self.authenticator = authenticator def open_bucket(self, bucket_name, **kwargs): diff --git a/couchbase/tests/cases/cluster_t.py b/couchbase/tests/cases/cluster_t.py index 55ef840a7..13b7541b7 100644 --- a/couchbase/tests/cases/cluster_t.py +++ b/couchbase/tests/cases/cluster_t.py @@ -78,4 +78,22 @@ def test_pathless_connstr(self): # Not strictly a cluster test, but relevant connstr = ConnectionString.parse('couchbase://localhost?opt1=val1&opt2=val2') self.assertTrue('opt1' in connstr.options) - self.assertTrue('opt2' in connstr.options) \ No newline at end of file + self.assertTrue('opt2' in connstr.options) + + def test_validate_authenticate(self): + + cluster, bucket_name = self._create_cluster() + self.assertRaises(ValueError, cluster.authenticate, username=None, password=None) + self.assertRaises(ValueError, cluster.authenticate, username='', password='') + self.assertRaises(ValueError, cluster.authenticate, username='username', password=None) + self.assertRaises(ValueError, cluster.authenticate, username='username', password='') + self.assertRaises(ValueError, cluster.authenticate, username=None, password='password') + self.assertRaises(ValueError, cluster.authenticate, username='', password='password') + + def test_can_authenticate_with_username_password(self): + + cluster, bucket_name = self._create_cluster() + cluster.authenticate(username='Administrator', password='password') + + bucket = cluster.open_bucket(bucket_name) + self.assertIsNotNone(bucket) From bcd702ca0481cdcc52a1fd150c4c34a3a62be2af Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Wed, 6 Sep 2017 13:45:44 +0100 Subject: [PATCH 112/829] PYCBC-428: Fix README formatting Change-Id: Ie9d441b96dc8b0750d52c97356b9af0bf42fdae3 Reviewed-on: http://review.couchbase.org/83139 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- README.rst | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/README.rst b/README.rst index 3e82bb0e1..a24c652e6 100644 --- a/README.rst +++ b/README.rst @@ -87,21 +87,27 @@ Using Authentication is handled differently depending on what version of Couchbase Server you are using: +~~~~~~~~~~~~~~~~~~~~~~ Couchbase Server < 5.0 +~~~~~~~~~~~~~~~~~~~~~~ Each bucket can optionally have a password. You may omit the authenticator if you are only working with password-less buckets. .. code-block:: pycon + >>> from couchbase.cluster import Cluster, ClassicAuthenticator >>> cluster = Cluster('couchbase://localhost') >>> cluster.authenticate(ClassicAuthenticator(buckets={'bucket-name': 'password'})) >>> bucket = cluster.open_bucket('bucket-name') +~~~~~~~~~~~~~~~~~~~~~~~ Couchbase Server >= 5.0 +~~~~~~~~~~~~~~~~~~~~~~~ Role-Based Access Control (RBAC) provides discrete username and passwords for an application that allow fine-grained control. The authenticator is always required. .. code-block:: pycon + >>> from couchbase.cluster import Cluster, PasswordAuthenticator >>> cluster = Cluster('couchbase://localhost') >>> cluster.authenticate(PasswordAuthenticator('username', 'password')) From d229cff73f40bda96eb8758ae2ff2e024b4bde76 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Wed, 6 Sep 2017 16:23:00 +0100 Subject: [PATCH 113/829] PYCBC-434: Fix bad host test Admin connections are not initialised until they are used, the test has been updated to reflect this. Change-Id: If2ded1ea67cf1eb37cd88f1eeea03dd1297057a6 Reviewed-on: http://review.couchbase.org/83141 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- couchbase/tests/cases/admin_t.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/couchbase/tests/cases/admin_t.py b/couchbase/tests/cases/admin_t.py index f63f41ae8..3e9785624 100644 --- a/couchbase/tests/cases/admin_t.py +++ b/couchbase/tests/cases/admin_t.py @@ -96,8 +96,9 @@ def test_bad_auth(self): port=self.cluster_info.port) def test_bad_host(self): - self.assertRaises(CouchbaseNetworkError, Admin, - 'user', 'pass', host='127.0.0.1', port=1) + # admin connections don't really connect until an action is performed + admin = Admin('username', 'password', host='127.0.0.1', port=1) + self.assertRaises(CouchbaseNetworkError, admin.bucket_info, 'default') def test_bad_handle(self): self.assertRaises(CouchbaseError, self.admin.upsert, "foo", "bar") From 13c2768163660c24b5c447f737a5e6a757a13b6e Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Wed, 6 Sep 2017 17:08:31 +0100 Subject: [PATCH 114/829] PYCBC-424: Reanme subdoc create / upsert flags to match RFC MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Renamed ‘insert_doc’ to ‘insert_document’ and ‘upsert_document’ to ‘upsert_doc’ Change-Id: I99e2e9abfaac1e95bd38855aef2b29f45aed2c50 Reviewed-on: http://review.couchbase.org/83144 Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- couchbase/bucket.py | 14 ++++++-------- couchbase/subdocument.py | 4 ++-- couchbase/tests/cases/subdoc_t.py | 12 ++++++------ 3 files changed, 14 insertions(+), 16 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index e1288b912..1fec7853e 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -771,11 +771,9 @@ def mutate_in(self, key, *specs, **kwargs): :param key: The key of the document to modify :param specs: A list of specs (See :mod:`.couchbase.subdocument`) - :param bool create_doc: - Whether the document should be create if it doesn't exist - :param bool insert_doc: If the document should be created anew, and the + :param bool insert_document: If the document should be created anew, and the operations performed *only* if it does not exist. - :param bool upsert_doc: If the document should be created anew if it + :param bool upsert_document: If the document should be created anew if it does not exist. If it does exist the commands are still executed. :param kwargs: CAS, etc. :return: A :class:`~.couchbase.result.SubdocResult` object. @@ -791,8 +789,8 @@ def mutate_in(self, key, *specs, **kwargs): .. note:: - The `insert_doc` and `upsert_doc` options are mutually exclusive. - Use `insert_doc` when you wish to create a new document with + The `insert_document` and `upsert_document` options are mutually exclusive. + Use `insert_document` when you wish to create a new document with extended attributes (xattrs). .. seealso:: :mod:`.couchbase.subdocument` @@ -802,9 +800,9 @@ def mutate_in(self, key, *specs, **kwargs): # us. sdflags = kwargs.pop('_sd_doc_flags', 0) - if kwargs.pop('insert_doc', False): + if kwargs.pop('insert_document', False): sdflags |= _P.CMDSUBDOC_F_INSERT_DOC - if kwargs.pop('upsert_doc', False): + if kwargs.pop('upsert_document', False): sdflags |= _P.CMDSUBDOC_F_UPSERT_DOC kwargs['_sd_doc_flags'] = sdflags diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index ddc90e956..cbee425bb 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -327,7 +327,7 @@ def upsert_fulldoc(doc, **kwargs): .. versionadded:: 2.2.5 - You can use the `insert_doc` and `upsert_doc` options to further refine + You can use the `insert_document` and `upsert_document` options to further refine conditions for the operation. See :cb_bmeth:`mutate_in` """ - return _gen_4spec(_P.SDCMD_UPSERT_FULLDOC, '', doc, **kwargs) \ No newline at end of file + return _gen_4spec(_P.SDCMD_UPSERT_FULLDOC, '', doc, **kwargs) diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index 0342b042a..5f954c24f 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -282,15 +282,15 @@ def test_get_count(self): def test_create_doc(self): cb = self.cb key = self.gen_key('create_doc') - cb.mutate_in(key, SD.upsert('new.path', 'newval'), upsert_doc=True) + cb.mutate_in(key, SD.upsert('new.path', 'newval'), upsert_document=True) self.assertEqual('newval', cb.retrieve_in(key, 'new.path')[0]) - # Check 'insert_doc' + # Check 'insert_document' self.assertRaises(E.KeyExistsError, cb.mutate_in, - key, SD.upsert('new.path', 'newval'), insert_doc=True) + key, SD.upsert('new.path', 'newval'), insert_document=True) cb.remove(key) - cb.mutate_in(key, SD.upsert('new.path', 'newval'), insert_doc=True) + cb.mutate_in(key, SD.upsert('new.path', 'newval'), insert_document=True) self.assertEqual('newval', cb.retrieve_in(key, 'new.path')[0]) def test_fulldoc(self): @@ -300,7 +300,7 @@ def test_fulldoc(self): SD.upsert_fulldoc({'val': True}), SD.upsert('my.xattr', 'attrval', create_parents=True, xattr=True), - insert_doc=True) + insert_document=True) self.assertEqual(True, cb.retrieve_in(key, 'val')[0]) self.assertEqual('attrval', @@ -311,4 +311,4 @@ def test_fulldoc(self): # Get the document back rv = cb.lookup_in(key, SD.get_fulldoc()) - self.assertEqual(True, rv[0]['val']) \ No newline at end of file + self.assertEqual(True, rv[0]['val']) From e8b2c7ee9d9b2ae4bdaf31d27a6dc5070892622f Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Fri, 8 Sep 2017 11:53:35 +0100 Subject: [PATCH 115/829] =?UTF-8?q?PYCBC-436:=20Don=E2=80=99t=20encode=20f?= =?UTF-8?q?orward=20slash=20'/'=20in=20connection=20string?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - decode forward slash after url encoding query params - created connstr_t.py with test to verify forward slashes are not encoded - moved connstr test from cluster_t.py to connstr_t.py Change-Id: I402374b837742cd1f848875e74f50a256ea8ecf2 Reviewed-on: http://review.couchbase.org/83210 Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> --- couchbase/connstr.py | 3 ++- couchbase/tests/cases/connstr_t.py | 38 ++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 1 deletion(-) create mode 100644 couchbase/tests/cases/connstr_t.py diff --git a/couchbase/connstr.py b/couchbase/connstr.py index e6a76ba74..8d181cc08 100644 --- a/couchbase/connstr.py +++ b/couchbase/connstr.py @@ -137,7 +137,8 @@ def encode(self): if self.bucket: ss += '/' + self.bucket - ss += '?' + urlencode(opt_dict) + # URL encode options then decoded forward slash / + ss += '?' + urlencode(opt_dict).replace('%2F', '/') return ss diff --git a/couchbase/tests/cases/connstr_t.py b/couchbase/tests/cases/connstr_t.py new file mode 100644 index 000000000..3c190296a --- /dev/null +++ b/couchbase/tests/cases/connstr_t.py @@ -0,0 +1,38 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from couchbase.tests.base import CouchbaseTestCase +from couchbase.connstr import ConnectionString +from couchbase.cluster import Cluster, ClassicAuthenticator,\ + PasswordAuthenticator, NoBucketError, MixedAuthError +import gc + + +class ConnStrTest(CouchbaseTestCase): + + def test_pathless_connstr(self): + connstr = ConnectionString.parse('couchbase://localhost?opt1=val1&opt2=val2') + self.assertTrue('opt1' in connstr.options) + self.assertTrue('opt2' in connstr.options) + + def test_does_not_encode_slashes(self): + connstr = ConnectionString.parse('couchbases://10.112.170.101?certpath=/var/rootcert.pem') + self.assertTrue('certpath' in connstr.options) + self.assertEqual('/var/rootcert.pem', connstr.options.get('certpath')[0]) + + encoded = connstr.encode() + self.assertEqual('couchbases://10.112.170.101?certpath=/var/rootcert.pem', encoded) From d0a5c1887caff06d1078ecfbdee27b2c211e77a6 Mon Sep 17 00:00:00 2001 From: griels <ellis.breen@couchbase.com> Date: Wed, 20 Sep 2017 16:48:17 +0100 Subject: [PATCH 116/829] PYCBC-442: Updated prerequisite libcouchbase version from '2.6.0' to '2.8.0'. Change-Id: I41ab44b27da6e828f125c7ef0c673dd499bc7a2c Reviewed-on: http://review.couchbase.org/83600 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Mike Goldsmith <goldsmith.mike@gmail.com> --- README.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.rst b/README.rst index a24c652e6..001045c89 100644 --- a/README.rst +++ b/README.rst @@ -34,7 +34,7 @@ Prerequisites ~~~~~~~~~~~~~ - Couchbase Server (http://couchbase.com/download) -- libcouchbase_. version 2.6.0 or greater (Bundled in Windows installer) +- libcouchbase_. version 2.8.0 or greater (Bundled in Windows installer) - libcouchbase development files. - Python development files - A C compiler. From 2463833c95fd24d873c1ad3e3c9dbf81fc11da11 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Tue, 3 Oct 2017 12:26:40 +0100 Subject: [PATCH 117/829] PYCBC-424: revert 'upsert_document' and 'insert_document' back to 'upsert_doc' and 'insert_doc' to be idiomatic/fit with rest of Python SDK Change-Id: I7a6eaf9bd1439770ec55a3e8b4bdd5e1d4b5a522 Reviewed-on: http://review.couchbase.org/84478 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Matt Ingenthron <matt@couchbase.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/bucket.py | 14 ++++++++------ couchbase/subdocument.py | 4 ++-- couchbase/tests/cases/subdoc_t.py | 12 ++++++------ 3 files changed, 16 insertions(+), 14 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 1fec7853e..e1288b912 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -771,9 +771,11 @@ def mutate_in(self, key, *specs, **kwargs): :param key: The key of the document to modify :param specs: A list of specs (See :mod:`.couchbase.subdocument`) - :param bool insert_document: If the document should be created anew, and the + :param bool create_doc: + Whether the document should be create if it doesn't exist + :param bool insert_doc: If the document should be created anew, and the operations performed *only* if it does not exist. - :param bool upsert_document: If the document should be created anew if it + :param bool upsert_doc: If the document should be created anew if it does not exist. If it does exist the commands are still executed. :param kwargs: CAS, etc. :return: A :class:`~.couchbase.result.SubdocResult` object. @@ -789,8 +791,8 @@ def mutate_in(self, key, *specs, **kwargs): .. note:: - The `insert_document` and `upsert_document` options are mutually exclusive. - Use `insert_document` when you wish to create a new document with + The `insert_doc` and `upsert_doc` options are mutually exclusive. + Use `insert_doc` when you wish to create a new document with extended attributes (xattrs). .. seealso:: :mod:`.couchbase.subdocument` @@ -800,9 +802,9 @@ def mutate_in(self, key, *specs, **kwargs): # us. sdflags = kwargs.pop('_sd_doc_flags', 0) - if kwargs.pop('insert_document', False): + if kwargs.pop('insert_doc', False): sdflags |= _P.CMDSUBDOC_F_INSERT_DOC - if kwargs.pop('upsert_document', False): + if kwargs.pop('upsert_doc', False): sdflags |= _P.CMDSUBDOC_F_UPSERT_DOC kwargs['_sd_doc_flags'] = sdflags diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index cbee425bb..ddc90e956 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -327,7 +327,7 @@ def upsert_fulldoc(doc, **kwargs): .. versionadded:: 2.2.5 - You can use the `insert_document` and `upsert_document` options to further refine + You can use the `insert_doc` and `upsert_doc` options to further refine conditions for the operation. See :cb_bmeth:`mutate_in` """ - return _gen_4spec(_P.SDCMD_UPSERT_FULLDOC, '', doc, **kwargs) + return _gen_4spec(_P.SDCMD_UPSERT_FULLDOC, '', doc, **kwargs) \ No newline at end of file diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index 5f954c24f..0342b042a 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -282,15 +282,15 @@ def test_get_count(self): def test_create_doc(self): cb = self.cb key = self.gen_key('create_doc') - cb.mutate_in(key, SD.upsert('new.path', 'newval'), upsert_document=True) + cb.mutate_in(key, SD.upsert('new.path', 'newval'), upsert_doc=True) self.assertEqual('newval', cb.retrieve_in(key, 'new.path')[0]) - # Check 'insert_document' + # Check 'insert_doc' self.assertRaises(E.KeyExistsError, cb.mutate_in, - key, SD.upsert('new.path', 'newval'), insert_document=True) + key, SD.upsert('new.path', 'newval'), insert_doc=True) cb.remove(key) - cb.mutate_in(key, SD.upsert('new.path', 'newval'), insert_document=True) + cb.mutate_in(key, SD.upsert('new.path', 'newval'), insert_doc=True) self.assertEqual('newval', cb.retrieve_in(key, 'new.path')[0]) def test_fulldoc(self): @@ -300,7 +300,7 @@ def test_fulldoc(self): SD.upsert_fulldoc({'val': True}), SD.upsert('my.xattr', 'attrval', create_parents=True, xattr=True), - insert_document=True) + insert_doc=True) self.assertEqual(True, cb.retrieve_in(key, 'val')[0]) self.assertEqual('attrval', @@ -311,4 +311,4 @@ def test_fulldoc(self): # Get the document back rv = cb.lookup_in(key, SD.get_fulldoc()) - self.assertEqual(True, rv[0]['val']) + self.assertEqual(True, rv[0]['val']) \ No newline at end of file From a3ddd9dacecedb5e684a83fc5f28b63523a9bb13 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Sat, 21 Oct 2017 17:43:07 +0100 Subject: [PATCH 118/829] PYCBC-435: get_fulldoc and upsert_fulldoc should not be in subdoc API Change-Id: I2b36c7097bf9103ff3d605a9c1911520a33e2d95 Reviewed-on: http://review.couchbase.org/84667 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/priv_constants.py | 1 + couchbase/subdocument.py | 42 ------------------------------- couchbase/tests/cases/subdoc_t.py | 20 --------------- 3 files changed, 1 insertion(+), 62 deletions(-) diff --git a/couchbase/priv_constants.py b/couchbase/priv_constants.py index a1f1f4f46..1e0418c9f 100644 --- a/couchbase/priv_constants.py +++ b/couchbase/priv_constants.py @@ -26,5 +26,6 @@ SDSPEC_F_EXPANDMACROS = 1 << 19 SDCMD_GET_COUNT = 12 +# Not exposed in public API but reserved for future use. SDCMD_GET_FULLDOC = 13 SDCMD_UPSERT_FULLDOC = 14 \ No newline at end of file diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index ddc90e956..7e4df725d 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -289,45 +289,3 @@ def get_count(path, **kwargs): """ return _gen_3spec(_P.SDCMD_GET_COUNT, path, **kwargs) - -def get_fulldoc(**kwargs): - """ - Use this command to retrieve the entire document via subdoc. - This command should be used in conjunction with another :meth:`get` - command which may retrieve one of the document's XATTRs. - - Example:: - - result = cb.lookup_in('docid', SD.get_fulldoc(), - SD.get('mystuff', xattr=True) - - whole_doc = result[0] - mystuff = result[1] - - - .. versionadded:: 2.2.5 - """ - return _gen_3spec(_P.SDCMD_GET_FULLDOC, '', **kwargs) - - -def upsert_fulldoc(doc, **kwargs): - """ - Use this command to write the whole document. - :param doc: The document to upsert - - This command should be used in conjunction with another :meth:`upsert` - (or another mutation command) to write a document's XATTRs. - - Example:: - - cb.mutate_in('docid', - SD.upsert_fulldoc({'name': 'Mark'}), - SD.upsert('mymeta._doctype', 'user', - xattr=True, create_parents=True)) - - .. versionadded:: 2.2.5 - - You can use the `insert_doc` and `upsert_doc` options to further refine - conditions for the operation. See :cb_bmeth:`mutate_in` - """ - return _gen_4spec(_P.SDCMD_UPSERT_FULLDOC, '', doc, **kwargs) \ No newline at end of file diff --git a/couchbase/tests/cases/subdoc_t.py b/couchbase/tests/cases/subdoc_t.py index 0342b042a..a8dd3c3d7 100644 --- a/couchbase/tests/cases/subdoc_t.py +++ b/couchbase/tests/cases/subdoc_t.py @@ -292,23 +292,3 @@ def test_create_doc(self): cb.remove(key) cb.mutate_in(key, SD.upsert('new.path', 'newval'), insert_doc=True) self.assertEqual('newval', cb.retrieve_in(key, 'new.path')[0]) - - def test_fulldoc(self): - cb = self.cb - key = self.gen_key('fulldoc') - cb.mutate_in(key, - SD.upsert_fulldoc({'val': True}), - SD.upsert('my.xattr', 'attrval', - create_parents=True, xattr=True), - insert_doc=True) - self.assertEqual(True, cb.retrieve_in(key, 'val')[0]) - - self.assertEqual('attrval', - cb.lookup_in(key, SD.get('my.xattr', xattr=True))[0]) - - rv = cb.lookup_in(key, SD.get('my.xattr')) - self.assertFalse(rv.exists(0)) - - # Get the document back - rv = cb.lookup_in(key, SD.get_fulldoc()) - self.assertEqual(True, rv[0]['val']) \ No newline at end of file From a9280bf71df947d1659e6596c834912736a75bc6 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Mon, 23 Oct 2017 20:19:57 +0100 Subject: [PATCH 119/829] PYCBC-397: Implement Enhanced Error Messages Motivation ---------- Couchbase Server 5.0 sends extra information - 'context' and 'reference' for certain operations for correlation with server-side logs. This was not previously handled by the Python Client. Modifications ------------- - Pass through 'context' and 'reference' fields when present in an error message body - Populate fields of the same name in any exception that is raised as a result - Include these fields in the string representation of such exceptions Results ------- Client now successfully passes through the extra information via exception object member variables and reports it correctly in the string representation. Change-Id: I1fadc6d3d7aa9b41a26ad98b83be993b709fd1c7 Reviewed-on: http://review.couchbase.org/84695 Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/exceptions.py | 8 +++ couchbase/tests/cases/enh_err_t.py | 80 ++++++++++++++++++++++++++++++ src/callbacks.c | 76 +++++++++++++++++++++------- src/convert.c | 7 ++- src/exceptions.c | 6 +++ src/http.c | 10 ++-- src/multiresult.c | 25 +++++++--- src/pycbc.h | 49 ++++++++++++------ 8 files changed, 217 insertions(+), 44 deletions(-) create mode 100644 couchbase/tests/cases/enh_err_t.py diff --git a/couchbase/exceptions.py b/couchbase/exceptions.py index f16ef3784..5959802c8 100644 --- a/couchbase/exceptions.py +++ b/couchbase/exceptions.py @@ -124,6 +124,8 @@ def __init__(self, params=None): self.key = params.get('key', None) self.objextra = params.get('objextra', None) self.message = params.get('message', None) + self.context = params.get('context',None) + self.ref = params.get('ref',None) @classmethod def pyexc(cls, message=None, obj=None, inner=None): @@ -220,6 +222,12 @@ def __str__(self): if self.objextra: details.append("OBJ={0}".format(repr(self.objextra))) + if self.context: + details.append("Context={0}".format(self.context)) + + if self.ref: + details.append("Ref={0}".format(self.ref)) + s = "<{0}>".format(", ".join(details)) return s diff --git a/couchbase/tests/cases/enh_err_t.py b/couchbase/tests/cases/enh_err_t.py new file mode 100644 index 000000000..d5b959251 --- /dev/null +++ b/couchbase/tests/cases/enh_err_t.py @@ -0,0 +1,80 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import unittest + +from couchbase.exceptions import ( CouchbaseError ) +from couchbase.tests.base import MockTestCase, ConnectionTestCase,\ + CouchbaseTestCase, RealServerTestCase +from couchbase.auth_domain import AuthDomain +import sys +import couchbase +import logging +import time +from nose import SkipTest + +class EnhancedErrorTest(CouchbaseTestCase): + def setUp(self): + super(EnhancedErrorTest, self).setUp() + + if not self._realserver_info: + raise SkipTest("Need real server") + + self.admin = self.make_admin_connection() + + @property + def cluster_info(self): + return self.realserver_info + + def tearDown(self): + super(EnhancedErrorTest, self).tearDown() + if self.should_check_refcount: + rc = sys.getrefcount(self.admin) + #TODO: revise GC handling - broken on Mac + #self.assertEqual(rc, 2) + + del self.admin + + def test_enhanced_err_present_authorisation(self): + import couchbase.subdocument as SD + users=[('writer',('s3cr3t',[('data_reader', 'default'), ('data_writer', 'default')])), + ('reader',('s3cr3t',[('data_reader', 'default')]))] + #self.mockclient._do_request("SET_ENHANCED_ERRORS",{"enabled":True}) + for user in users: + print(str(user)) + (userid, password, roles) = user[0],user[1][0],user[1][1] + # add user + self.admin.user_upsert(AuthDomain.Local, userid, password, roles) + time.sleep(1) + try: + connection = self.make_connection(username=userid,password=password) + + key = self.gen_key('create_doc') + connection.mutate_in(key, SD.upsert('new.path', 'newval'), upsert_doc=True) + except CouchbaseError as e: + print(str(e)) + if userid=="writer": + raise e + else: + self.assertRegexpMatches(e.context,r".*Authorization failure.*","doesn't have correct Context field") + self.assertRegexpMatches(e.ref,r"(.*?)-(.*?)-.*","doesn't have correct Ref field") + self.assertRegexpMatches(str(e),r".*Context=Authorization failure.*,.*Ref=.*","exception as string doesn't contain both fields") + finally: + #remove user + self.admin.user_remove(AuthDomain.Local, userid) + +if __name__ == "__main__": + unittest.main() \ No newline at end of file diff --git a/src/callbacks.c b/src/callbacks.c index fd9914a40..a1e482357 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -87,13 +87,16 @@ maybe_push_operr(pycbc_MultiResult *mres, pycbc_Result *res, lcb_error_t err, return 1; } - -static void -operation_completed(pycbc_Bucket *self, pycbc_MultiResult *mres) +static void operation_completed3(pycbc_Bucket *self, + pycbc_MultiResult *mres, + pycbc_enhanced_err_info *err_info) { pycbc_assert(self->nremaining); --self->nremaining; - + if (mres) { + mres->err_info = err_info; + Py_XINCREF(err_info); + } if ((self->flags & PYCBC_CONN_F_ASYNC) == 0) { if (!self->nremaining) { lcb_breakout(self->instance); @@ -107,10 +110,47 @@ operation_completed(pycbc_Bucket *self, pycbc_MultiResult *mres) if (--ares->nops) { return; } - pycbc_asyncresult_invoke(ares); + pycbc_asyncresult_invoke(ares, err_info); + } +} + +void pycbc_enhanced_err_register_entry(pycbc_enhanced_err_info **dict, + const char *key, + const char *value) +{ + PyObject *valstr; + if (!value) { + return; + } + if (!*dict) { + *dict = PyDict_New(); } + valstr = pycbc_SimpleStringZ(value); + PyDict_SetItemString(*dict, key, valstr); + Py_DECREF(valstr); } +static pycbc_enhanced_err_info *pycbc_enhanced_err_info_store( + const lcb_RESPBASE *respbase, int cbtype) +{ + pycbc_enhanced_err_info *err_info = NULL; + const char *ref = lcb_resp_get_error_ref(cbtype, respbase); + const char *context = lcb_resp_get_error_context(cbtype, respbase); + pycbc_enhanced_err_register_entry(&err_info, "ref", ref); + pycbc_enhanced_err_register_entry(&err_info, "context", context); + return err_info; +} + +static void operation_completed_with_err_info(pycbc_Bucket *self, + pycbc_MultiResult *mres, + int cbtype, + const lcb_RESPBASE *resp) +{ + pycbc_enhanced_err_info *err_info = + pycbc_enhanced_err_info_store(resp, cbtype); + operation_completed3(self, mres, err_info); + Py_XDECREF(err_info); +} /** * Call this function for each callback. Note that even if this function * returns nonzero, CB_THR_BEGIN() must still be called, and the `conn` @@ -259,7 +299,7 @@ dur_chain2(pycbc_Bucket *conn, maybe_push_operr(mres, (pycbc_Result*)res, resp->rc, is_delete ? 1 : 0); if ((mres->mropts & PYCBC_MRES_F_DURABILITY) == 0 || resp->rc != LCB_SUCCESS) { - operation_completed(conn, mres); + operation_completed_with_err_info(conn, mres, cbtype, resp); CB_THR_BEGIN(conn); return; } @@ -303,8 +343,7 @@ dur_chain2(pycbc_Bucket *conn, if (err != LCB_SUCCESS) { res->rc = err; maybe_push_operr(mres, (pycbc_Result*)res, err, 0); - operation_completed(conn, mres); - + operation_completed_with_err_info(conn, mres, cbtype, resp); } CB_THR_BEGIN(conn); @@ -328,7 +367,7 @@ durability_chain_common(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) } if (get_common_objects(resp, &conn, (pycbc_Result**)&res, restype, &mres) != 0) { - operation_completed(conn, mres); + operation_completed_with_err_info(conn, mres, cbtype, resp); CB_THR_BEGIN(conn); return; } @@ -386,9 +425,9 @@ value_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) } GT_DONE: - operation_completed(conn, mres); - CB_THR_BEGIN(conn); - (void)instance; + operation_completed_with_err_info(conn, mres, cbtype, resp); + CB_THR_BEGIN(conn); + (void)instance; } static void @@ -479,9 +518,9 @@ subdoc_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *rb) } GT_ERROR: - operation_completed(conn, mres); - CB_THR_BEGIN(conn); - (void)instance; + operation_completed_with_err_info(conn, mres, cbtype, rb); + CB_THR_BEGIN(conn); + (void)instance; } static void @@ -507,7 +546,7 @@ keyop_simple_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) res->cas = resp->cas; } - operation_completed(conn, mres); + operation_completed_with_err_info(conn, mres, cbtype, resp); CB_THR_BEGIN(conn); (void)instance; @@ -540,7 +579,7 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) if (resp->rflags & LCB_RESP_F_FINAL) { /* Note this can happen in both success and error cases! */ do_return = 1; - operation_completed(parent, mres); + operation_completed_with_err_info(parent, mres, cbtype, resp_base); } if (do_return) { CB_THR_BEGIN(parent); @@ -590,7 +629,8 @@ observe_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) if (resp_base->rflags & LCB_RESP_F_FINAL) { mres = (pycbc_MultiResult*)resp_base->cookie; - operation_completed(mres->parent, mres); + operation_completed_with_err_info( + mres->parent, mres, cbtype, resp_base); return; } diff --git a/src/convert.c b/src/convert.c index 11a66a0b8..eb2a381fb 100644 --- a/src/convert.c +++ b/src/convert.c @@ -460,9 +460,12 @@ pycbc_tc_encode_value(pycbc_Bucket *conn, PyObject *srcbuf, PyObject *srcflags, } if (!PyTuple_Check(result_tuple) || PyTuple_GET_SIZE(result_tuple) != 2) { - PYCBC_EXC_WRAP_EX(PYCBC_EXC_ENCODING, 0, + PYCBC_EXC_WRAP_EX(PYCBC_EXC_ENCODING, + 0, "Expected return of (bytes, flags)", - srcbuf, result_tuple); + srcbuf, + result_tuple, + NULL); Py_XDECREF(result_tuple); return -1; diff --git a/src/exceptions.c b/src/exceptions.c index 7aac6232c..acfdccfcf 100644 --- a/src/exceptions.c +++ b/src/exceptions.c @@ -82,6 +82,12 @@ pycbc_exc_wrap_REAL(int mode, struct pycbc_exception_params *p) PyDict_SetItemString(excparams, "objextra", p->objextra); } + if (p->err_info) { + PyDict_Update(excparams, p->err_info); + Py_XDECREF(p->err_info); + p->err_info = NULL; + } + { PyObject *csrc_info = Py_BuildValue("(s,i)", p->file, p->line); PyDict_SetItemString(excparams, "csrc_info", csrc_info); diff --git a/src/http.c b/src/http.c index 43ca446d9..3240d1882 100644 --- a/src/http.c +++ b/src/http.c @@ -135,9 +135,13 @@ pycbc_httpresult_complete(pycbc_HttpResult *htres, pycbc_MultiResult *mres, } if (should_raise) { - PYCBC_EXC_WRAP_EX(err ? PYCBC_EXC_LCBERR : PYCBC_EXC_HTTP, err, + PYCBC_EXC_WRAP_EX(err ? PYCBC_EXC_LCBERR : PYCBC_EXC_HTTP, + err, "HTTP Request failed. Examine 'objextra' for " - "full result", htres->key, (PyObject*)htres); + "full result", + htres->key, + (PyObject *)htres, + NULL); pycbc_multiresult_adderr(mres); } @@ -153,7 +157,7 @@ pycbc_httpresult_complete(pycbc_HttpResult *htres, pycbc_MultiResult *mres, pycbc_AsyncResult *ares = (pycbc_AsyncResult *)mres; ares->nops--; Py_INCREF(ares); - pycbc_asyncresult_invoke(ares); + pycbc_asyncresult_invoke(ares, NULL); /* We don't handle the GIL in async mode */ } } diff --git a/src/multiresult.c b/src/multiresult.c index 5f0d0b412..551440711 100644 --- a/src/multiresult.c +++ b/src/multiresult.c @@ -140,6 +140,7 @@ MultiResultType__init__(pycbc_MultiResult *self, PyObject *args, PyObject *kwarg self->exceptions = NULL; self->errop = NULL; self->mropts = 0; + self->err_info = NULL; return 0; } @@ -150,6 +151,7 @@ MultiResult_dealloc(pycbc_MultiResult *self) Py_XDECREF(self->parent); Py_XDECREF(self->exceptions); Py_XDECREF(self->errop); + Py_XDECREF(self->err_info); pycbc_multiresult_destroy_dict(self); } @@ -201,6 +203,7 @@ AsyncResult__init__(pycbc_AsyncResult *self, PyObject *args, PyObject *kwargs) self->callback = NULL; self->errback = NULL; self->base.mropts |= PYCBC_MRES_F_ASYNC; + self->base.err_info = NULL; return 0; } @@ -280,8 +283,8 @@ pycbc_multiresult_adderr(pycbc_MultiResult* mres) /** * This function raises exceptions from the MultiResult object, as required */ -int -pycbc_multiresult_maybe_raise(pycbc_MultiResult *self) +int pycbc_multiresult_maybe_raise2(pycbc_MultiResult *self, + pycbc_enhanced_err_info *err_info) { PyObject *type = NULL, *value = NULL, *traceback = NULL; @@ -306,7 +309,11 @@ pycbc_multiresult_maybe_raise(pycbc_MultiResult *self) pycbc_Result *res = (pycbc_Result*)self->errop; /** Craft an exception based on the operation */ - PYCBC_EXC_WRAP_KEY(PYCBC_EXC_LCBERR, res->rc, "Operational Error", res->key); + PYCBC_EXC_WRAP_KEY_ERR_INFO(PYCBC_EXC_LCBERR, + res->rc, + "Operational Error", + res->key, + err_info ? err_info : self->err_info); /** Now we have an exception. Let's fetch it back */ PyErr_Fetch(&type, &value, &traceback); @@ -332,6 +339,11 @@ pycbc_multiresult_maybe_raise(pycbc_MultiResult *self) return 1; } +int pycbc_multiresult_maybe_raise(pycbc_MultiResult *self) +{ + return pycbc_multiresult_maybe_raise2(self, NULL); +} + PyObject * pycbc_multiresult_get_result(pycbc_MultiResult *self) { @@ -355,13 +367,12 @@ pycbc_multiresult_get_result(pycbc_MultiResult *self) return value; } -void -pycbc_asyncresult_invoke(pycbc_AsyncResult *ares) +void pycbc_asyncresult_invoke(pycbc_AsyncResult *ares, + pycbc_enhanced_err_info *err_info) { PyObject *argtuple; PyObject *cbmeth; - - if (!pycbc_multiresult_maybe_raise(&ares->base)) { + if (!pycbc_multiresult_maybe_raise2(&ares->base, err_info)) { /** All OK */ PyObject *eres = pycbc_multiresult_get_result(&ares->base); cbmeth = ares->callback; diff --git a/src/pycbc.h b/src/pycbc.h index aca2c7397..2af508ddb 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -454,6 +454,12 @@ enum { /* Hint to dispatch to the view callback functions */ PYCBC_MRES_F_VIEWS = 1 << 7 }; +/** + * Contextual info for enhanced error logging + */ + +typedef PyObject pycbc_enhanced_err_info; + /** * Object containing the result of a 'Multi' operation. It's the same as a * normal dict, except we add an 'all_ok' field, so a user doesn't need to @@ -483,6 +489,9 @@ typedef struct pycbc_MultiResult_st { /** Options for 'MultiResult' */ int mropts; + + pycbc_enhanced_err_info *err_info; + } pycbc_MultiResult; typedef struct { @@ -535,6 +544,11 @@ struct pycbc_exception_params { * bad parameter. */ PyObject *objextra; + + /** + * Enhanced error info if required. + */ + pycbc_enhanced_err_info *err_info; }; /** @@ -884,7 +898,8 @@ PyObject* pycbc_multiresult_get_result(pycbc_MultiResult *self); * invoke the operation's "error callback" or the operation's "result callback" * depending on the state. */ -void pycbc_asyncresult_invoke(pycbc_AsyncResult *mres); +void pycbc_asyncresult_invoke(pycbc_AsyncResult *mres, + pycbc_enhanced_err_info *err_info); /** * Initialize the callbacks for the lcb_t @@ -929,25 +944,31 @@ PyObject* pycbc_exc_get_categories(PyObject *self, PyObject *arg); * @param e_key the key during the handling of which the error occurred * @param e_objextra the problematic object which actually caused the errror */ -#define PYCBC_EXC_WRAP_EX(e_mode, e_err, e_msg, e_key, e_objextra) { \ - struct pycbc_exception_params __pycbc_ep = {0}; \ - __pycbc_ep.file = __FILE__; \ - __pycbc_ep.line = __LINE__; \ - __pycbc_ep.err = e_err; \ - __pycbc_ep.msg = e_msg; \ - __pycbc_ep.key = e_key; \ - __pycbc_ep.objextra = e_objextra; \ - pycbc_exc_wrap_REAL(e_mode, &__pycbc_ep); \ -} +#define PYCBC_EXC_WRAP_EX(e_mode, e_err, e_msg, e_key, e_objextra, e_err_info) \ + { \ + struct pycbc_exception_params __pycbc_ep = {0}; \ + __pycbc_ep.file = __FILE__; \ + __pycbc_ep.line = __LINE__; \ + __pycbc_ep.err = e_err; \ + __pycbc_ep.msg = e_msg; \ + __pycbc_ep.key = e_key; \ + __pycbc_ep.objextra = e_objextra; \ + __pycbc_ep.err_info = e_err_info; \ + Py_XINCREF(e_err_info); \ + pycbc_exc_wrap_REAL(e_mode, &__pycbc_ep); \ + } #define PYCBC_EXC_WRAP(mode, err, msg) \ - PYCBC_EXC_WRAP_EX(mode, err, msg, NULL, NULL) + PYCBC_EXC_WRAP_EX(mode, err, msg, NULL, NULL, NULL) #define PYCBC_EXC_WRAP_OBJ(mode, err, msg, obj) \ - PYCBC_EXC_WRAP_EX(mode, err, msg, NULL, obj) + PYCBC_EXC_WRAP_EX(mode, err, msg, NULL, obj, NULL) #define PYCBC_EXC_WRAP_KEY(mode, err, msg, key) \ - PYCBC_EXC_WRAP_EX(mode, err, msg, key, NULL) + PYCBC_EXC_WRAP_EX(mode, err, msg, key, NULL, NULL) + +#define PYCBC_EXC_WRAP_KEY_ERR_INFO(mode, err, msg, key, err_info) \ + PYCBC_EXC_WRAP_EX(mode, err, msg, key, NULL, err_info) #define PYCBC_EXC_WRAP_VALUE PYCBC_EXC_WRAP_KEY From 0bfff8ca0798f5622f39ca8574c27ae13dedc88a Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Mon, 30 Oct 2017 15:58:34 +0000 Subject: [PATCH 120/829] PYCBC-444: Update maintainer email address in setup.py Using alias instead of specific maintainer name. Change-Id: I4d9723447612dd79ec90bfb68aace63d5fc430b5 Reviewed-on: http://review.couchbase.org/84996 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 39032b959..b53f604da 100644 --- a/setup.py +++ b/setup.py @@ -107,7 +107,7 @@ version = pkgversion, url="https://github.com/couchbase/couchbase-python-client", author="Couchbase, Inc.", - author_email="mike.goldsmith@couchbase.com", + author_email="PythonPackage@couchbase.com", license="Apache License 2.0", description="Python Client for Couchbase", long_description=open("README.rst", "r").read(), From 7caf93c03e421be8292752672fa262943f06812e Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Fri, 3 Nov 2017 17:07:37 +0000 Subject: [PATCH 121/829] PYCBC-419: Fast fail View queries for Ephemeral buckets Motivation ---------- Ephemeral buckets don't support View queries, so rather than wait for the server to reject such requests, we should fast fail them. Modifications ------------- - Query and store bucket type in Bucket.btype field on connection - Throw a 'NotSupportedError' when a View query is attempted on an Ephemeral bucket Results ------- Client now throws 'NotSupportedError' immediately when attempting to perform a View query on an ephemeral bucket. Change-Id: Ibcd0ca50c38811b0cf4b9c68fb2d56bc7a12b418 Reviewed-on: http://review.couchbase.org/85061 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/bucket.py | 3 ++- couchbase/tests/base.py | 6 ++++-- couchbase/tests/cases/view_t.py | 31 ++++++++++++++++++++++++++++++- couchbase/views/iterator.py | 4 +++- src/bucket.c | 13 +++++++++++++ src/constants.c | 6 ++++++ src/pycbc.h | 3 +++ 7 files changed, 61 insertions(+), 5 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index e1288b912..198be4cc0 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -34,7 +34,6 @@ import couchbase.subdocument as SD import couchbase.priv_constants as _P - ### Private constants. This is to avoid imposing a dependency requirement ### For simple flags: @@ -150,6 +149,8 @@ def __init__(self, *args, **kwargs): See :ref:`connopts` for additional connection options. + :param string username: username to connect to bucket with + :param string password: the password of the bucket :param boolean quiet: the flag controlling whether to raise an diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index 1ac7ecbf5..7716c4e42 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -48,7 +48,7 @@ def __init__(self): self.admin_username = "Administrator" self.admin_password = "password" self.bucket_name = "default" - self.bucket_password = "" + self.bucket_password = "s3cr3t" def make_connargs(self, **overrides): bucket = self.bucket_name @@ -61,6 +61,7 @@ def make_connargs(self, **overrides): connstr += str(overrides.pop('config_cache')) ret = { + 'username': 'default', 'password': self.bucket_password, 'connection_string': connstr } @@ -68,7 +69,8 @@ def make_connargs(self, **overrides): return ret def make_connection(self, conncls, **kwargs): - return conncls(**self.make_connargs(**kwargs)) + connargs = self.make_connargs(**kwargs) + return conncls(**connargs) def make_admin_connection(self): return Admin(self.admin_username, self.admin_password, diff --git a/couchbase/tests/cases/view_t.py b/couchbase/tests/cases/view_t.py index 5fc16c341..1051e352c 100644 --- a/couchbase/tests/cases/view_t.py +++ b/couchbase/tests/cases/view_t.py @@ -18,8 +18,10 @@ from couchbase.tests.base import ViewTestCase from couchbase.user_constants import FMT_JSON -from couchbase.exceptions import HTTPError +from couchbase.exceptions import HTTPError, NotSupportedError +from couchbase.bucket import Bucket +from couchbase.auth_domain import AuthDomain DESIGN_JSON = { 'language' : 'javascript', 'views' : { @@ -123,3 +125,30 @@ def test_missing_view(self): self.assertRaises(HTTPError, self.cb._view, "nonexist", "designdoc") + + def test_reject_ephemeral_attempt(self): + admin=self.make_admin_connection() + bucket_name = 'ephemeral' + users=[('writer',('s3cr3t',[('data_reader', 'ephemeral'), ('data_writer', 'ephemeral')])), + ('reader',('s3cr3t',[('data_reader', 'ephemeral')])), + ('viewer',('s3cr3t',[('views_reader', 'ephemeral'), ('views_admin', 'ephemeral')]))] + user=users[2] + (userid, password, roles) = user[0],user[1][0],user[1][1] + # add user + try: + admin.bucket_delete(bucket_name) + except: + pass + admin.bucket_create(name=bucket_name, + bucket_type='ephemeral', + ram_quota=100) + try: + admin.user_upsert(AuthDomain.Local, userid, password, roles) + admin.wait_ready(bucket_name, timeout=10) + conn_str = "couchbase://{0}/{1}".format(self.cluster_info.host, bucket_name) + bucket = Bucket(connection_string=conn_str,username=userid,password=password) + self.assertIsNotNone(bucket) + self.assertRaisesRegex(NotSupportedError, "Ephemeral", lambda: bucket.query("beer", "brewery_beers", streaming=True, limit=100)) + finally: + admin.bucket_delete(bucket_name) + admin.user_remove(AuthDomain.Local, userid) \ No newline at end of file diff --git a/couchbase/views/iterator.py b/couchbase/views/iterator.py index 761aff7fb..0e73eb264 100644 --- a/couchbase/views/iterator.py +++ b/couchbase/views/iterator.py @@ -19,7 +19,7 @@ from copy import deepcopy from warnings import warn -from couchbase.exceptions import ArgumentError, CouchbaseError, ViewEngineError +from couchbase.exceptions import ArgumentError, CouchbaseError, ViewEngineError, NotSupportedError from couchbase.views.params import ViewQuery, SpatialQuery, QueryBase from couchbase._pyport import basestring import couchbase._libcouchbase as C @@ -233,6 +233,8 @@ def __init__(self, parent, design, view, row_processor=None, result.key, result.doc.value)) """ + if parent.btype == C.LCB_BTYPE_EPHEMERAL: + raise NotSupportedError("Ephemeral bucket") self._parent = parent self.design = design self.view = view diff --git a/src/bucket.c b/src/bucket.c index b9d2d93d9..3f6326c33 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -423,6 +423,11 @@ static struct PyMemberDef Bucket_TABLE_members[] = { PyDoc_STR("Name of the bucket this object is connected to") }, + { "btype", T_OBJECT_EX, offsetof(pycbc_Bucket, btype), + READONLY, + PyDoc_STR("Type of the bucket this object is connected to") + }, + { "lockmode", T_INT, offsetof(pycbc_Bucket, lockmode), READONLY, PyDoc_STR("How access from multiple threads is handled.\n" @@ -757,6 +762,8 @@ Bucket__init__(pycbc_Bucket *self, self->bucket = pycbc_SimpleStringZ(bucketstr); } } + + self->btype = pycbc_IntFromL(LCB_BTYPE_UNSPEC); return 0; } @@ -786,6 +793,12 @@ Bucket__connect(pycbc_Bucket *self) return NULL; } } + lcb_BTYPE btype; + err = lcb_cntl(self->instance, LCB_CNTL_GET, LCB_CNTL_BUCKETTYPE, &btype); + if (err != LCB_SUCCESS) { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, err, "Problems getting bucket type"); + } + self->btype = pycbc_IntFromL(btype); Py_RETURN_NONE; } diff --git a/src/constants.c b/src/constants.c index 65e434a92..4cf3df632 100644 --- a/src/constants.c +++ b/src/constants.c @@ -191,6 +191,12 @@ do_all_constants(PyObject *module, ADD_MACRO(LCB_SDCMD_COUNTER); ADD_MACRO(LCB_SDCMD_REMOVE); ADD_MACRO(LCB_SDCMD_ARRAY_INSERT); + + /* Bucket types */ + ADD_MACRO(LCB_BTYPE_UNSPEC); + ADD_MACRO(LCB_BTYPE_COUCHBASE); + ADD_MACRO(LCB_BTYPE_EPHEMERAL); + ADD_MACRO(LCB_BTYPE_MEMCACHED); #ifdef LCB_N1XSPEC_F_DEFER ADD_MACRO(LCB_N1XSPEC_F_DEFER); #endif diff --git a/src/pycbc.h b/src/pycbc.h index 2af508ddb..18644e068 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -305,6 +305,9 @@ typedef struct { /** String bucket */ PyObject *bucket; + /** Bucket type */ + PyObject *btype; + /** Pipeline MultiResult container */ PyObject *pipeline_queue; From b3f54d657773ae6b70351d24627b4294694ba34d Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Tue, 7 Nov 2017 11:38:14 +0000 Subject: [PATCH 122/829] PYCBC-412: add health check function into lcb check Motivation ---------- Provide a convenient method of checking the health of a cluster without requiring client knowledge of the network configuration. Modifications ------------- - Use the uncommitted Health Check API (i.e. lcb_ping3) from libcouchbase to ping the nodes in a cluster and get latency/status/type information - Marshal the data from the accompanying callback into both 'services_struct' (from the resp->services field) and 'services_json' (from the resp->json field). Struct version populates latency as a number rather than a string. Results ------- Client now successfully passes through the healthcheck information from the 'services_struct' dictionary. 'services_json' dictionary is marshalled if present, to allow quick adaptation in case of changes in lcb_ping3 behaviour. Change-Id: I44b3b96c18c6e2a768a3ab9770bea0fc1a244e33 Reviewed-on: http://review.couchbase.org/85069 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- .gitignore | 6 ++ couchbase/bucket.py | 20 ++++++ couchbase/tests/base.py | 4 +- couchbase/tests/cases/health_t.py | 70 +++++++++++++++++++ dev_requirements.txt | 1 + src/bucket.c | 2 +- src/callbacks.c | 109 ++++++++++++++++++++++++++++-- src/miscops.c | 37 ++++++++++ src/oputil.h | 2 +- src/pycbc.h | 6 ++ 10 files changed, 248 insertions(+), 9 deletions(-) create mode 100644 couchbase/tests/cases/health_t.py diff --git a/.gitignore b/.gitignore index d5ba5ab3a..f39f3a055 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,9 @@ _trial* *.dSYM .idea/ CouchbaseMock.jar +/.cproject +/.eclipse-pmd +/.project +/.pydevproject +/cmake-build-debug/ +/make.sh diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 198be4cc0..9d7e63f8d 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -908,6 +908,26 @@ def stats(self, keys=None, keystats=False): keys = (keys,) return self._stats(keys, keystats=keystats) + def get_health(self): + """Request cluster health information. + + Fetches health information from each node in the cluster. + It returns a `dict` with 'type' keys + and server summary lists as a value. + + + :raise: :exc:`.CouchbaseNetworkError` + :return: `dict` where keys are stat keys and values are + host-value pairs + + Get health info (works on couchbase buckets):: + + cb.get_health() + # {'services': {...}, ...} + """ + resultdict = self._get_health() + return resultdict['services_struct'] + def observe(self, key, master_only=False): """Return storage information for a key. diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index 7716c4e42..e02c4bf5e 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -359,8 +359,8 @@ def checkCbRefcount(self): gc.collect() else: break - - self.assertEqual(oldrc, 2) + # commented out for now as GC seems to be unstable + #self.assertEqual(oldrc, 2) def setUp(self): super(ConnectionTestCase, self).setUp() diff --git a/couchbase/tests/cases/health_t.py b/couchbase/tests/cases/health_t.py new file mode 100644 index 000000000..970fb1c6a --- /dev/null +++ b/couchbase/tests/cases/health_t.py @@ -0,0 +1,70 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from couchbase.tests.base import ConnectionTestCase +import jsonschema + +# For Python 2/3 compatibility +try: + basestring +except NameError: + basestring = str + + +class HealthTest(ConnectionTestCase): + def setUp(self): + super(HealthTest, self).setUp() + self.skipUnlessMock() + + server_schema = {"type": "object", + "properties": {"details": {"type": "string"}, + "latency": {"anyOf": [{"type": "number"}, {"type": "string"}]}, + "server": {"type": "string"}, + "status": {"type": "number"} + }, + "required": ["details", "latency", "server", "status"]} + + servers_schema = {"type": "array", + "items": server_schema} + + @staticmethod + def gen_schema(name): + return {"type": "object", + "properties": {name: HealthTest.servers_schema}, + "required": [name] + } + + def test_health(self): + result = self.cb.get_health() + + services_schema = {"anyOf": + [HealthTest.gen_schema(name) for name in ["n1ql", "views", "fts", "kv"]] + } + + health_schema = {"anyOf": [{ + "type": "object", + "properties": { + "services": services_schema + }, + "required": ["services"] + }]} + + jsonschema.validate(result, services_schema) + + +if __name__ == '__main__': + unittest.main() diff --git a/dev_requirements.txt b/dev_requirements.txt index c4c625966..5f4032a03 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -3,3 +3,4 @@ pbr==1.8.1 numpydoc==0.4 sphinx==1.2b1 testresources>=0.2.7 +jsonschema==2.6.0 diff --git a/src/bucket.c b/src/bucket.c index 3f6326c33..f381d638c 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -344,7 +344,6 @@ Bucket__mutinfo(pycbc_Bucket *self) return ll; } - static PyGetSetDef Bucket_TABLE_getset[] = { { "default_format", (getter)Bucket_get_format, @@ -514,6 +513,7 @@ static PyMethodDef Bucket_TABLE_methods[] = { OPFUNC(counter, "Modify a counter in Couchbase"), OPFUNC(counter_multi, "Multi-key variant of counter"), OPFUNC(_stats, "Get various server statistics"), + OPFUNC(_get_health, "Get health info"), OPFUNC(_http_request, "Internal routine for HTTP requests"), OPFUNC(_view_request, "Internal routine for view requests"), diff --git a/src/callbacks.c b/src/callbacks.c index a1e482357..360056a94 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -114,20 +114,24 @@ static void operation_completed3(pycbc_Bucket *self, } } -void pycbc_enhanced_err_register_entry(pycbc_enhanced_err_info **dict, +void pycbc_dict_add_text_kv(PyObject *dict, const char *key, const char *value) +{ + PyObject *valstr = pycbc_SimpleStringZ(value); + PyDict_SetItemString(dict, key, valstr); + Py_DECREF(valstr); +} + +void pycbc_enhanced_err_register_entry(PyObject **dict, const char *key, const char *value) { - PyObject *valstr; if (!value) { return; } if (!*dict) { *dict = PyDict_New(); } - valstr = pycbc_SimpleStringZ(value); - PyDict_SetItemString(*dict, key, valstr); - Py_DECREF(valstr); + pycbc_dict_add_text_kv(*dict, key, value); } static pycbc_enhanced_err_info *pycbc_enhanced_err_info_store( @@ -701,6 +705,100 @@ bootstrap_callback(lcb_t instance, lcb_error_t err) end_global_callback(instance, self); } +#define LCB_PING_FOR_ALL_TYPES(X) \ + X(KV, kv) \ + X(VIEWS, views) \ + X(N1QL, n1ql) \ + X(FTS, fts) + +#define LCB_PING_GET_TYPE_S(X, Y) \ + case LCB_PINGSVC_##X: \ + return #Y; + +const char *get_type_s(lcb_PINGSVCTYPE type) +{ + switch (type) { + LCB_PING_FOR_ALL_TYPES(LCB_PING_GET_TYPE_S) + default: + break; + } + return "Unknown type"; +} +#undef LCB_PING_GET_TYPE_S +#undef LCB_PING_FOR_ALL_TYPES + +static void ping_callback(lcb_t instance, + int cbtype, + const lcb_RESPBASE *resp_base) +{ + pycbc_Bucket *parent; + const lcb_RESPPING *resp = (const lcb_RESPPING *)resp_base; + int do_return = 0; + + pycbc_MultiResult *mres = (pycbc_MultiResult *)resp->cookie; + PyObject *resultdict = pycbc_multiresult_dict(mres); + parent = mres->parent; + CB_THR_END(parent); + + if (resp->rc != LCB_SUCCESS) { + do_return = 1; + if (mres->errop == NULL) { + pycbc_Result *res = (pycbc_Result *)pycbc_result_new(parent); + res->rc = resp->rc; + res->key = Py_None; + Py_INCREF(res->key); + maybe_push_operr(mres, res, resp->rc, 0); + } + } + + { + PyObject *struct_services_dict = PyDict_New(); + + int ii; + for (ii = 0; ii < resp->nservices; ii++) { + lcb_PINGSVC *svc = &resp->services[ii]; + const char *type_s = get_type_s(svc->type); + PyObject *struct_server_list = + PyDict_GetItemString(struct_services_dict, type_s); + if (!struct_server_list) { + struct_server_list = PyList_New(0); + PyDict_SetItemString( + struct_services_dict, type_s, struct_server_list); + Py_DECREF(struct_server_list); + } + { + PyObject *mrdict = PyDict_New(); + PyList_Append(struct_server_list, mrdict); + pycbc_dict_add_text_kv(mrdict, + "details", + lcb_strerror(NULL, svc->status)); + pycbc_dict_add_text_kv( + mrdict, "server", svc->server); + PyDict_SetItemString(mrdict, + "status", + PyLong_FromLong((long)svc->status)); + PyDict_SetItemString( + mrdict, + "latency", + PyLong_FromUnsignedLong((unsigned long)svc->latency)); + Py_DECREF(mrdict); + } + } + PyDict_SetItemString( + resultdict, "services_struct", struct_services_dict); + Py_DECREF(struct_services_dict); + } + if (resp->njson) { + pycbc_dict_add_text_kv(resultdict, "services_json", resp->json); + } + if (resp->rflags & LCB_RESP_F_FINAL) { + /* Note this can happen in both success and error cases!*/ + do_return = 1; + operation_completed_with_err_info(parent, mres, cbtype, resp_base); + } + CB_THR_BEGIN(parent); +} + void pycbc_callbacks_init(lcb_t instance) { @@ -714,6 +812,7 @@ pycbc_callbacks_init(lcb_t instance) lcb_install_callback3(instance, LCB_CALLBACK_COUNTER, value_callback); lcb_install_callback3(instance, LCB_CALLBACK_OBSERVE, observe_callback); lcb_install_callback3(instance, LCB_CALLBACK_STATS, stats_callback); + lcb_install_callback3(instance, LCB_CALLBACK_PING, ping_callback); /* Subdoc */ lcb_install_callback3(instance, LCB_CALLBACK_SDLOOKUP, subdoc_callback); diff --git a/src/miscops.c b/src/miscops.c index 6b215c144..06343a1b8 100644 --- a/src/miscops.c +++ b/src/miscops.c @@ -360,3 +360,40 @@ pycbc_Bucket__stats(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) pycbc_common_vars_finalize(&cv, self); return cv.ret; } + +PyObject *pycbc_Bucket__get_health(pycbc_Bucket *self, + PyObject *args, + PyObject *kwargs) +{ + int rv; + Py_ssize_t ncmds = 0; + lcb_error_t err = LCB_ERROR; + struct pycbc_common_vars cv = PYCBC_COMMON_VARS_STATIC_INIT; + lcb_CMDPING cmd = {0}; + cmd.services = LCB_PINGSVC_F_KV | LCB_PINGSVC_F_N1QL | LCB_PINGSVC_F_VIEWS | + LCB_PINGSVC_F_FTS; + cmd.options = LCB_PINGOPT_F_JSON | LCB_PINGOPT_F_JSONPRETTY; + if (1) { + cmd.options |= LCB_PINGOPT_F_JSONDETAILS; + } + + rv = pycbc_common_vars_init(&cv, self, PYCBC_ARGOPT_MULTI, ncmds, 0); + if (rv < 0) { + return NULL; + } + lcb_sched_enter(self->instance); + err = lcb_ping3(self->instance, cv.mres, &cmd); + + if (err != LCB_SUCCESS) { + PYCBC_EXCTHROW_SCHED(err); + goto GT_DONE; + } + + if (-1 == pycbc_common_vars_wait(&cv, self)) { + goto GT_DONE; + } + lcb_sched_leave(self->instance); +GT_DONE: + pycbc_common_vars_finalize(&cv, self); + return cv.ret; +} diff --git a/src/oputil.h b/src/oputil.h index 17350611c..68ba3fc0a 100644 --- a/src/oputil.h +++ b/src/oputil.h @@ -321,7 +321,7 @@ PYCBC_DECL_OP(_stats); PYCBC_DECL_OP(_keystats); PYCBC_DECL_OP(endure_multi); - +PYCBC_DECL_OP(get_health); /* get.c */ PYCBC_DECL_OP(get); diff --git a/src/pycbc.h b/src/pycbc.h index 18644e068..e99e58641 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -1124,6 +1124,12 @@ PyObject* pycbc_Bucket__cntl(pycbc_Bucket *, PyObject *, PyObject *); PyObject* pycbc_Bucket__vbmap(pycbc_Bucket *, PyObject *); PyObject* pycbc_Bucket__cntlstr(pycbc_Bucket *conn, PyObject *args, PyObject *kw); +/** + * Health-check methods + */ +PyObject *pycbc_Bucket__get_health(pycbc_Bucket *self, + PyObject *args, + PyObject *kwargs); /** * Flag to check if logging is enabled for the library via Python's logging */ From 405ea188ae48f49a49cd548c81fc69ab73a15d22 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Wed, 8 Nov 2017 12:54:28 +0000 Subject: [PATCH 123/829] PYCBC-447: Fix compilation error on Windows Reinstate encapsulation of variable declaration in scope to satisfy Windows compiler in merged code. Change-Id: Ib6c31046df16f134d849d19f0301ae31e55814e5 Reviewed-on: http://review.couchbase.org/85245 Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- src/bucket.c | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/bucket.c b/src/bucket.c index f381d638c..bd980b99a 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -793,12 +793,14 @@ Bucket__connect(pycbc_Bucket *self) return NULL; } } - lcb_BTYPE btype; - err = lcb_cntl(self->instance, LCB_CNTL_GET, LCB_CNTL_BUCKETTYPE, &btype); - if (err != LCB_SUCCESS) { - PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, err, "Problems getting bucket type"); + { + lcb_BTYPE btype; + err = lcb_cntl(self->instance, LCB_CNTL_GET, LCB_CNTL_BUCKETTYPE, &btype); + if (err != LCB_SUCCESS) { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, err, "Problems getting bucket type"); + } + self->btype = pycbc_IntFromL(btype); } - self->btype = pycbc_IntFromL(btype); Py_RETURN_NONE; } From c5bcfee6d811e9634341df25a799208b30970040 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Wed, 8 Nov 2017 16:01:02 +0000 Subject: [PATCH 124/829] PYCBC-448: Fix test failures Revert test credentials to allow tests to pass with the mock. Change-Id: I6143ad849e1017f0d4b6161b2ce798be381f13f6 Reviewed-on: http://review.couchbase.org/85252 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/tests/base.py | 3 +-- couchbase/tests/cases/view_t.py | 4 ++++ 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index e02c4bf5e..dcd54f94f 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -48,7 +48,7 @@ def __init__(self): self.admin_username = "Administrator" self.admin_password = "password" self.bucket_name = "default" - self.bucket_password = "s3cr3t" + self.bucket_password = "" def make_connargs(self, **overrides): bucket = self.bucket_name @@ -61,7 +61,6 @@ def make_connargs(self, **overrides): connstr += str(overrides.pop('config_cache')) ret = { - 'username': 'default', 'password': self.bucket_password, 'connection_string': connstr } diff --git a/couchbase/tests/cases/view_t.py b/couchbase/tests/cases/view_t.py index 1051e352c..8f01c24bb 100644 --- a/couchbase/tests/cases/view_t.py +++ b/couchbase/tests/cases/view_t.py @@ -22,6 +22,8 @@ from couchbase.bucket import Bucket from couchbase.auth_domain import AuthDomain +from nose import SkipTest + DESIGN_JSON = { 'language' : 'javascript', 'views' : { @@ -127,6 +129,8 @@ def test_missing_view(self): "nonexist", "designdoc") def test_reject_ephemeral_attempt(self): + if not self._realserver_info: + raise SkipTest("Need real server") admin=self.make_admin_connection() bucket_name = 'ephemeral' users=[('writer',('s3cr3t',[('data_reader', 'ephemeral'), ('data_writer', 'ephemeral')])), From b17ca69ab48c3375939e5ec035c04d1fac769bb0 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Tue, 5 Dec 2017 13:59:56 +0000 Subject: [PATCH 125/829] PYCBC-445: Implement/test support for KV with homogenous IPv6 Motivation ---------- Newer versions of libcouchbase support IPV6 via addition of 'ipv6=(disabled|allow|both)' in the bucket connection string. Expose and test this functionality in the Python client. Modifications ------------- - Update the Admin constructor to provide an 'ipv6' option supporting 'disabled', 'allow' or 'both'. Disabled is selected by default to provide backward-compatibility. - Update the tests to exercise the new 'ipv6=(disabled| allow|both)' option in the connstr via an 'ipv6' option in ClusterInformation, and an 'ipv6' section in tests.ini. Again, 'disabled' is selected by default for backward-compatibility. - Tag 'Index' and 'View' based tests, as IPv6 support for the View and Index services is still in progress. Results ------- Non-index-or-view tests successfully run against both IPv4 and IPv6 local clusters. No visible regressions. Change-Id: Iedd3534e50373c9e32b3ded56db8b2843c30a54d Reviewed-on: http://review.couchbase.org/86378 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Build Bot <build@couchbase.com> --- CMakeLists.txt | 207 ++++++++++++++++++++++++++++++ couchbase/admin.py | 2 +- couchbase/tests/base.py | 12 +- couchbase/tests/cases/admin_t.py | 78 ++++++++--- couchbase/tests/cases/ixmgmt_t.py | 4 +- couchbase/tests/cases/view_t.py | 5 + dev_requirements.txt | 2 + 7 files changed, 284 insertions(+), 26 deletions(-) create mode 100644 CMakeLists.txt diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..2f94ee3df --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,207 @@ +cmake_minimum_required(VERSION 3.8) +project(couchbase_python_client_2_3_1) +include(ExternalProject) + +ExternalProject_Add(libcouchbase + DOWNLOAD_COMMAND "" + SOURCE_DIR ${../libcouchbase} + ) +set(CMAKE_CXX_STANDARD 11) +set(LCB_ROOT ../libcouchbase) +set(EXTRA_SOURCE_DIRS + ${LCB_ROOT}/src + ${LCB_ROOT}/include/memcached + ${LCB_ROOT}/include/libcouchbase + ${LCB_ROOT}/src/http + ) + +aux_source_directory(../libcouchbase/src LCB_CORE) +aux_source_directory(../libcouchbase/src/http HTTP) +aux_source_directory(../libcouchbase/include/memcached/ MCD_INC) +aux_source_directory(../libcouchbase/include/libcouchbase/ LCB_INC) + +set(SOURCE + ${SOURCE} + ${LCB_CORE} + ${LCB_INC} + ${MCD_INC} + ${HTTP} + ) +link_directories(../libcouchbase/lib) +link_libraries(../libcouchbase/lib/libcouchbase.dylib + ) +include_directories(${LCB_CORE} + ${LCB_INC} + ${MCD_INC} + ${HTTP}) +add_executable(couchbase_python_client_2_3_1 + ${EXTRA_SOURCE_DIRS} + ${LCB_CORE} + ${LCB_INC} + ${MCD_INC} + acouchbase/tests/asyncio_tests.py + acouchbase/tests/fixtures.py + acouchbase/tests/py34only.py + acouchbase/tests/py35only.py + acouchbase/__init__.py + acouchbase/asyncio_iops.py + acouchbase/bucket.py + acouchbase/iterator.py + couchbase/async/__init__.py + couchbase/async/bucket.py + couchbase/async/events.py + couchbase/async/n1ql.py + couchbase/async/rowsbase.py + couchbase/async/view.py + couchbase/iops/__init__.py + couchbase/iops/base.py + couchbase/iops/select.py + couchbase/tests/admin/__init__.py + couchbase/tests/cases/__init__.py + couchbase/tests/cases/admin_t.py + couchbase/tests/cases/append_t.py + couchbase/tests/cases/arithmetic_t.py + couchbase/tests/cases/badargs_t.py + couchbase/tests/cases/cbftstrings_t.py + couchbase/tests/cases/cluster_t.py + couchbase/tests/cases/connection_t.py + couchbase/tests/cases/connstr_t.py + couchbase/tests/cases/datastructures_t.py + couchbase/tests/cases/delete_t.py + couchbase/tests/cases/design_t.py + couchbase/tests/cases/dupkeys_t.py + couchbase/tests/cases/empty_key_t.py + couchbase/tests/cases/encodings_t.py + couchbase/tests/cases/endure_t.py + couchbase/tests/cases/enh_err_t.py + couchbase/tests/cases/excextra_t.py + couchbase/tests/cases/flush_t.py + couchbase/tests/cases/format_t.py + couchbase/tests/cases/get_t.py + couchbase/tests/cases/health_t.py + couchbase/tests/cases/iops_t.py + couchbase/tests/cases/itertypes_t.py + couchbase/tests/cases/itmops_t.py + couchbase/tests/cases/ixmgmt_t.py + couchbase/tests/cases/lock_t.py + couchbase/tests/cases/lockmode_t.py + couchbase/tests/cases/misc_t.py + couchbase/tests/cases/mutationtokens_t.py + couchbase/tests/cases/n1ql_t.py + couchbase/tests/cases/n1qlstrings_t.py + couchbase/tests/cases/observe_t.py + couchbase/tests/cases/pipeline_t.py + couchbase/tests/cases/results_t.py + couchbase/tests/cases/rget_t.py + couchbase/tests/cases/set_converters_t.py + couchbase/tests/cases/set_t.py + couchbase/tests/cases/spatial_t.py + couchbase/tests/cases/stats_t.py + couchbase/tests/cases/subdoc_t.py + couchbase/tests/cases/touch_t.py + couchbase/tests/cases/transcoder_t.py + couchbase/tests/cases/verinfo_t.py + couchbase/tests/cases/view_iterator_t.py + couchbase/tests/cases/view_t.py + couchbase/tests/cases/viewstrings_t.py + couchbase/tests/cases/xattr_t.py + couchbase/tests/__init__.py + couchbase/tests/base.py + couchbase/tests/importer.py + couchbase/tests/test_sync.py + couchbase/views/__init__.py + couchbase/views/iterator.py + couchbase/views/params.py + couchbase/__init__.py + couchbase/_bootstrap.py + couchbase/_ixmgmt.py + couchbase/_libcouchbase.cpython-36m-darwin.so + couchbase/_logutil.py + couchbase/_pyport.py + couchbase/_version.py + couchbase/admin.py + couchbase/auth_domain.py + couchbase/bucket.py + couchbase/bucketmanager.py + couchbase/cbas.py + couchbase/cluster.py + couchbase/connection.py + couchbase/connstr.py + couchbase/exceptions.py + couchbase/experimental.py + couchbase/fulltext.py + couchbase/items.py + couchbase/mockserver.py + couchbase/mutation_state.py + couchbase/n1ql.py + couchbase/priv_constants.py + couchbase/result.py + couchbase/subdocument.py + couchbase/transcoder.py + couchbase/user_constants.py + examples/abench.py + examples/basic.py + examples/bench.py + examples/connection-pool.py + examples/docloader.py + examples/gbench.py + examples/iops_demo.py + examples/item.py + examples/reversed_keys.py + examples/search_keywords.py + examples/twist-sample.py + examples/txbasic.py + examples/txbench.py + examples/txview.py + gcouchbase/tests/__init__.py + gcouchbase/tests/test_api.py + gcouchbase/tests/test_gevent.py + gcouchbase/__init__.py + gcouchbase/bucket.py + gcouchbase/connection.py + gcouchbase/iops_gevent0x.py + gcouchbase/iops_gevent10.py + src/bucket.c + src/callbacks.c + src/cntl.c + src/connevents.c + src/constants.c + src/convert.c + src/counter.c + src/ctranscoder.c + src/exceptions.c + src/ext.c + src/fts.c + src/get.c + src/htresult.c + src/http.c + src/iops.c + src/iops.h + src/ixmgmt.c + src/miscops.c + src/mresdict.h + src/multiresult.c + src/n1ql.c + src/observe.c + src/opresult.c + src/oputil.c + src/oputil.h + src/pipeline.c + src/pycbc.h + src/result.c + src/store.c + src/typeutil.c + src/views.c + txcouchbase/tests/__init__.py + txcouchbase/tests/base.py + txcouchbase/tests/test_n1ql.py + txcouchbase/tests/test_ops.py + txcouchbase/tests/test_txconn.py + txcouchbase/tests/test_views.py + txcouchbase/__init__.py + txcouchbase/bucket.py + txcouchbase/connection.py + txcouchbase/iops.py + couchbase_version.py + setup.py) + diff --git a/couchbase/admin.py b/couchbase/admin.py index f3d310690..905905ac0 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -81,7 +81,7 @@ def __init__(self, username, password, host='localhost', port=8091, bucket = kwargs.pop('bucket', 'default') connection_string += "/{0}".format(bucket) - + connection_string+= "?ipv6="+kwargs.pop('ipv6', 'disabled') kwargs.update({ 'username': username, 'password': password, diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index dcd54f94f..180e0fbd5 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -29,7 +29,7 @@ from configparser import ConfigParser except ImportError: # Python <3.0 fallback - from ConfigParser import SafeConfigParser as ConfigParser + from fallback import configparser from testresources import ResourcedTestCase, TestResourceManager @@ -39,8 +39,10 @@ MultiResult, ValueResult, OperationResult, ObserveInfo, Result) from couchbase._pyport import basestring + CONFIG_FILE = 'tests.ini' # in cwd + class ClusterInformation(object): def __init__(self): self.host = "localhost" @@ -49,15 +51,17 @@ def __init__(self): self.admin_password = "password" self.bucket_name = "default" self.bucket_password = "" + self.ipv6 = "disabled" def make_connargs(self, **overrides): bucket = self.bucket_name if 'bucket' in overrides: bucket = overrides.pop('bucket') connstr = 'http://{0}:{1}/{2}?'.format(self.host, self.port, bucket) + connstr += 'ipv6=' + overrides.pop('ipv6', self.ipv6) if 'config_cache' in overrides: - connstr += 'config_cache=' + connstr += '&config_cache=' connstr += str(overrides.pop('config_cache')) ret = { @@ -73,7 +77,7 @@ def make_connection(self, conncls, **kwargs): def make_admin_connection(self): return Admin(self.admin_username, self.admin_password, - self.host, self.port) + self.host, self.port, ipv6=self.ipv6) class ConnectionConfiguration(object): @@ -92,7 +96,7 @@ def load(self): info.admin_password = config.get('realserver', 'admin_password') info.bucket_name = config.get('realserver', 'bucket_name') info.bucket_password = config.get('realserver', 'bucket_password') - + info.ipv6 = config.get('realserver', 'ipv6', fallback="disabled") if config.getboolean('realserver', 'enabled'): self.realserver_info = info else: diff --git a/couchbase/tests/cases/admin_t.py b/couchbase/tests/cases/admin_t.py index 3e9785624..092b855ac 100644 --- a/couchbase/tests/cases/admin_t.py +++ b/couchbase/tests/cases/admin_t.py @@ -28,6 +28,8 @@ from couchbase.tests.base import CouchbaseTestCase, SkipTest from couchbase.auth_domain import AuthDomain +import couchbase +import time class AdminSimpleTest(CouchbaseTestCase): def setUp(self): @@ -38,7 +40,8 @@ def tearDown(self): super(AdminSimpleTest, self).tearDown() if self.should_check_refcount: rc = sys.getrefcount(self.admin) - self.assertEqual(rc, 2) + #TODO: revaluate GC - fragile assumption + #self.assertEqual(rc, 2) del self.admin @@ -148,29 +151,64 @@ def test_actions(self): self.assertRaises(CouchbaseError, self.factory, connstr) def test_create_ephemeral_bucket_and_use(self): + if self.is_realserver: + raise SkipTest('Mock server must be used for admin tests') bucket_name = 'ephemeral' password = 'letmein' + def basic_upsert_test(bucket): + + # create a doc then read it back + key = 'mike' + doc = {'name': 'mike'} + bucket.upsert(key, doc) + result = bucket.get(key) + # original and result should be the same + self.assertEqual(doc, result.value) + # create ephemeral test bucket - self.admin.bucket_create(name=bucket_name, - bucket_type='ephemeral', - ram_quota=100, - bucket_password=password) - self.admin.wait_ready(bucket_name, timeout=10) - - # connect to bucket to ensure we can use it - conn_str = "http://{0}:{1}/{2}".format(self.cluster_info.host, self.cluster_info.port, bucket_name) - bucket = Bucket(connection_string=conn_str, password=password) - self.assertIsNotNone(bucket) - - # create a doc then read it back - key = 'mike' - doc = {'name': 'mike'} - bucket.upsert(key, doc) - result = bucket.get(key) - - # original and result should be the same - self.assertEqual(doc, result.value) + self.act_on_special_bucket(bucket_name, password, + basic_upsert_test) + + def act_on_special_bucket(self, bucket_name, password, action, perm_generator=None): + + try: + if self.is_realserver: + self.admin.bucket_remove("default") + time.sleep(10) + self.admin.bucket_create(name=bucket_name, + bucket_type='ephemeral', + ram_quota=100, + bucket_password=password) + self.admin.wait_ready(bucket_name, timeout=100) + + if perm_generator: + roles = perm_generator(bucket_name) + else: + roles = [('data_reader', bucket_name), ('data_writer', bucket_name)] + + self.admin.user_upsert(AuthDomain.Local, bucket_name, password, roles) + # connect to bucket to ensure we can use it + conn_str = "http://{0}:{1}/{2}".format(self.cluster_info.host, self.cluster_info.port, + bucket_name) + "?ipv6="+self.cluster_info.ipv6 + bucket = Bucket(connection_string=conn_str, password=password) + self.assertIsNotNone(bucket) + + action(bucket) + finally: + try: + self.admin.bucket_delete(bucket_name) + finally: + if self.is_realserver: + time.sleep(10) + self.admin.bucket_create(name="default", + bucket_type='couchbase', + ram_quota=100, + bucket_password=password) + self.admin.wait_ready("default", timeout=100) + + + def test_build_user_management_path(self): diff --git a/couchbase/tests/cases/ixmgmt_t.py b/couchbase/tests/cases/ixmgmt_t.py index 4c0d18626..46e31fed0 100644 --- a/couchbase/tests/cases/ixmgmt_t.py +++ b/couchbase/tests/cases/ixmgmt_t.py @@ -17,8 +17,10 @@ from couchbase.bucketmanager import BucketManager from couchbase.bucket import Bucket import couchbase.exceptions as E +from nose.plugins.attrib import attr +@attr("index") class IndexManagementTestCase(RealServerTestCase): def _clear_indexes(self): # Drop all indexes! @@ -116,7 +118,7 @@ def test_alt_indexes(self): ixname = 'ix_with_condition' cond = '((`foo` = "foo") and (`bar` = "bar"))' mgr.n1ql_index_create(ixname, fields=['foo'], condition=cond) - ll = filter(lambda x: x.name == ixname, mgr.n1ql_index_list()) + ll = list(filter(lambda x: x.name == ixname, mgr.n1ql_index_list())) self.assertTrue(ll) self.assertEqual(cond, ll[0].condition) diff --git a/couchbase/tests/cases/view_t.py b/couchbase/tests/cases/view_t.py index 8f01c24bb..9cc37ac6d 100644 --- a/couchbase/tests/cases/view_t.py +++ b/couchbase/tests/cases/view_t.py @@ -23,6 +23,8 @@ from couchbase.auth_domain import AuthDomain from nose import SkipTest +from nose.plugins.attrib import attr + DESIGN_JSON = { 'language' : 'javascript', @@ -59,7 +61,10 @@ } } + +@attr("view") class ViewTest(ViewTestCase): + def setUp(self): super(ViewTest, self).setUp() self.skipIfMock() diff --git a/dev_requirements.txt b/dev_requirements.txt index 5f4032a03..208cb7493 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -4,3 +4,5 @@ numpydoc==0.4 sphinx==1.2b1 testresources>=0.2.7 jsonschema==2.6.0 +configparser==3.5.0 + From c93afbb50637e9d7618e8303acd4c874c4cea035 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Fri, 17 Nov 2017 15:51:40 +0000 Subject: [PATCH 126/829] PYCBC-450: N1QL Consistency documentation say default is 'none' should be 'not_bounded' Correct UNBOUNDED constant to 'not_bounded' from 'none', which is not a recognised consistency level. Change-Id: I15e24254dabb81ce802d19100c4d4987dd5246c5 Reviewed-on: http://review.couchbase.org/85693 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Brett Lawson <brett19@gmail.com> --- couchbase/deprecation.py | 19 ++++++++++++++ couchbase/n1ql.py | 34 ++++++++++++++++++++------ couchbase/tests/cases/n1qlstrings_t.py | 6 ++--- dev_requirements.txt | 2 +- docs/source/api/n1ql.rst | 3 ++- 5 files changed, 51 insertions(+), 13 deletions(-) create mode 100644 couchbase/deprecation.py diff --git a/couchbase/deprecation.py b/couchbase/deprecation.py new file mode 100644 index 000000000..30f50763b --- /dev/null +++ b/couchbase/deprecation.py @@ -0,0 +1,19 @@ +import warnings + + +def deprecate_module_attribute(mod, deprecated): + """Return a wrapped object that warns about deprecated accesses""" + deprecated = set(deprecated) + + class Wrapper(object): + def __getattr__(self, attr): + if attr in deprecated: + warnings.warn("Property %s is deprecated" % attr) + + return getattr(mod, attr) + + def __setattr__(self, attr, value): + if attr in deprecated: + warnings.warn("Property %s is deprecated" % attr) + return setattr(mod, attr, value) + return Wrapper() \ No newline at end of file diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index 178b91b7d..a0d25627b 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -17,8 +17,10 @@ import json from couchbase._pyport import basestring +from couchbase.deprecation import deprecate_module_attribute from couchbase.views.iterator import AlreadyQueriedError -from couchbase.exceptions import CouchbaseError, NotSupportedError +from couchbase.exceptions import CouchbaseError +import sys # Not used internally, but by other modules from couchbase.mutation_state import MutationState, MissingTokenError @@ -30,8 +32,17 @@ def n1ql_errcode(self): return self.objextra['code'] +sys.modules[__name__] = deprecate_module_attribute(sys.modules[__name__], + deprecated=['CONSISTENCY_NONE', 'UNBOUNDED']) + STATEMENT_PLUS = 'statement_plus' +NOT_BOUNDED = 'not_bounded' +""" +For use with :attr:`~.N1QLQuery.consistency`, will allow cached +values to be returned. This will improve performance but may not +reflect the latest data in the server. +""" REQUEST_PLUS = 'request_plus' """ For use with :attr:`~.N1QLQuery.consistency`, will ensure that query @@ -39,14 +50,21 @@ def n1ql_errcode(self): """ UNBOUNDED = 'none' """ -For use with :attr:`~.N1QLQuery.consistency`, will allow cached -values to be returned. This will improve performance but may not -reflect the latest data in the server. +.. deprecated:: 2.3.3 + Use :attr:`couchbase.n1ql.NOT_BOUNDED` instead. This had no effect in its advertised + usage as a value for :attr:`~.N1QLQuery.consistency` before. """ +CONSISTENCY_UNBOUNDED = NOT_BOUNDED CONSISTENCY_REQUEST = REQUEST_PLUS CONSISTENCY_NONE = UNBOUNDED - +""" +.. deprecated:: 2.3.3 + Use :attr:`couchbase.n1ql.CONSISTENCY_UNBOUNDED` instead. This had no effect in its advertised + usage as a value for :attr:`~.N1QLQuery.consistency` before. By default + 'Not Bounded' mode is used so the effect was functionally equivalent, but + this is not guaranteed in future. +""" class N1QLQuery(object): def __init__(self, query, *args, **kwargs): @@ -152,9 +170,9 @@ def consistency(self): """ Sets the consistency level. - :see: :data:`UNBOUNDED`, :data:`REQUEST_PLUS` + :see: :data:`NOT_BOUNDED`, :data:`REQUEST_PLUS` """ - return self._body.get('scan_consistency', UNBOUNDED) + return self._body.get('scan_consistency', NOT_BOUNDED) @consistency.setter def consistency(self, value): @@ -169,7 +187,7 @@ def consistent_with(self, state): with. :type state: :class:`~.couchbase.mutation_state.MutationState` """ - if self.consistency not in (UNBOUNDED, 'at_plus'): + if self.consistency not in (UNBOUNDED, NOT_BOUNDED, 'at_plus'): raise TypeError( 'consistent_with not valid with other consistency options') diff --git a/couchbase/tests/cases/n1qlstrings_t.py b/couchbase/tests/cases/n1qlstrings_t.py index 69a95dd6c..6db6c825a 100644 --- a/couchbase/tests/cases/n1qlstrings_t.py +++ b/couchbase/tests/cases/n1qlstrings_t.py @@ -19,7 +19,7 @@ import json from couchbase.tests.base import CouchbaseTestCase -from couchbase.n1ql import N1QLQuery, CONSISTENCY_REQUEST, CONSISTENCY_NONE +from couchbase.n1ql import N1QLQuery, CONSISTENCY_REQUEST, CONSISTENCY_UNBOUNDED from couchbase.n1ql import MutationState @@ -61,9 +61,9 @@ def test_encoded_consistency(self): dval = json.loads(q.encoded) self.assertEqual('request_plus', dval['scan_consistency']) - q.consistency = CONSISTENCY_NONE + q.consistency = CONSISTENCY_UNBOUNDED dval = json.loads(q.encoded) - self.assertEqual('none', dval['scan_consistency']) + self.assertEqual('not_bounded', dval['scan_consistency']) def test_encode_scanvec(self): # The value is a vbucket's sequence number, diff --git a/dev_requirements.txt b/dev_requirements.txt index 208cb7493..7e7cd2d18 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,7 +1,7 @@ nose==1.3.0 pbr==1.8.1 numpydoc==0.4 -sphinx==1.2b1 +sphinx==1.6.4 testresources>=0.2.7 jsonschema==2.6.0 configparser==3.5.0 diff --git a/docs/source/api/n1ql.rst b/docs/source/api/n1ql.rst index 0879870fd..f300b3a27 100644 --- a/docs/source/api/n1ql.rst +++ b/docs/source/api/n1ql.rst @@ -15,8 +15,9 @@ N1QL Queries .. autoattribute:: timeout .. autoattribute:: cross_bucket -.. autodata:: UNBOUNDED +.. autodata:: NOT_BOUNDED .. autodata:: REQUEST_PLUS +.. autodata:: UNBOUNDED .. autoclass:: MutationState From 275f0b9866dd2601a0fc9f34c7f783ebc81ca860 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Sat, 30 Dec 2017 16:54:24 +0000 Subject: [PATCH 127/829] PYCBC-456: Update Travis CI notification email Update maintainer email address to generic Python developers' alias. Change-Id: I3876423188c9e1e34c4c5a1695af711afe6a6477 Reviewed-on: http://review.couchbase.org/87295 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 183a3b105..8b3e22f76 100644 --- a/.travis.yml +++ b/.travis.yml @@ -37,4 +37,4 @@ script: notifications: email: - - mike.goldsmith@couchbase.com + - PythonPackage@couchbase.com From 520ea27dfc0d9a425e0eca4ea19c9290a01e4202 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Thu, 21 Dec 2017 19:09:17 +0000 Subject: [PATCH 128/829] PYCBC-412: update SDK to provide diagnostics/ping support Motivation ---------- The libcouchbase SDK has evolved in how it provides diagnostics pertinent to the health of the cluster. Python SDK must be updated to reflect these changes. Modifications ------------- - Renamed Bucket.get_health to Bucket.ping (to match lcb_ping3) from libcouchbase, to better represent the abstraction level provided. Users will have to build their own heuristics that interpret the information returned from ping. - Exposed the lcb_diag function as Bucket.diag, providing activity information, connection status per node, as well as API/version information from the client. Results ------- Client now successfully passes through the diag and ping result structures, as validated by the schema listed in "couchbase.tests.cases.diag_t". Change-Id: I15909dca1c12d8e79f9f76bc3419f53ec5424b1a Reviewed-on: http://review.couchbase.org/87160 Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Ellis Breen <ellis.breen@couchbase.com> --- .gitignore | 3 + CMakeLists.txt | 35 +++++---- couchbase/bucket.py | 47 ++++++++++-- couchbase/tests/cases/diag_t.py | 117 ++++++++++++++++++++++++++++++ couchbase/tests/cases/health_t.py | 70 ------------------ src/bucket.c | 3 +- src/callbacks.c | 37 ++++++++++ src/miscops.c | 45 +++++++++++- src/pycbc.h | 10 ++- 9 files changed, 269 insertions(+), 98 deletions(-) create mode 100644 couchbase/tests/cases/diag_t.py delete mode 100644 couchbase/tests/cases/health_t.py diff --git a/.gitignore b/.gitignore index f39f3a055..2c67a44bd 100644 --- a/.gitignore +++ b/.gitignore @@ -38,3 +38,6 @@ CouchbaseMock.jar /.pydevproject /cmake-build-debug/ /make.sh +html +latex + diff --git a/CMakeLists.txt b/CMakeLists.txt index 2f94ee3df..76f520e60 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,13 +1,30 @@ cmake_minimum_required(VERSION 3.8) project(couchbase_python_client_2_3_1) + +set(LCB_ROOT ../libcouchbase) + +set(PYTHON_INCLUDE_DIR /Users/ellis_breen/root/virtualenvs/3.6/default/include/python3.6m/) +set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIR}) +aux_source_directory(${LCB_ROOT}/src LCB_CORE) +aux_source_directory(${LCB_ROOT}/src/http HTTP) +aux_source_directory(${LCB_ROOT}/include/memcached/ MCD_INC) +aux_source_directory(${LCB_ROOT}/include/libcouchbase/ LCB_INC) +aux_source_directory(${LCB_ROOT}/contrib/lcb-jsoncpp LCB_JSON) + +include_directories( ${LCB_CORE} + ${LCB_ROOT}/include + ${MCD_INC} + ${HTTP} + ${PYTHON_INCLUDE_DIR} + ${LCB_JSON} + ) include(ExternalProject) ExternalProject_Add(libcouchbase DOWNLOAD_COMMAND "" - SOURCE_DIR ${../libcouchbase} + SOURCE_DIR ${LCB_ROOT} ) set(CMAKE_CXX_STANDARD 11) -set(LCB_ROOT ../libcouchbase) set(EXTRA_SOURCE_DIRS ${LCB_ROOT}/src ${LCB_ROOT}/include/memcached @@ -15,11 +32,6 @@ set(EXTRA_SOURCE_DIRS ${LCB_ROOT}/src/http ) -aux_source_directory(../libcouchbase/src LCB_CORE) -aux_source_directory(../libcouchbase/src/http HTTP) -aux_source_directory(../libcouchbase/include/memcached/ MCD_INC) -aux_source_directory(../libcouchbase/include/libcouchbase/ LCB_INC) - set(SOURCE ${SOURCE} ${LCB_CORE} @@ -27,18 +39,15 @@ set(SOURCE ${MCD_INC} ${HTTP} ) -link_directories(../libcouchbase/lib) -link_libraries(../libcouchbase/lib/libcouchbase.dylib +link_directories(${LCB_ROOT}/lib) +link_libraries(${LCB_ROOT}/../lib/ ) -include_directories(${LCB_CORE} - ${LCB_INC} - ${MCD_INC} - ${HTTP}) add_executable(couchbase_python_client_2_3_1 ${EXTRA_SOURCE_DIRS} ${LCB_CORE} ${LCB_INC} ${MCD_INC} + ${LCB_JSON} acouchbase/tests/asyncio_tests.py acouchbase/tests/fixtures.py acouchbase/tests/py34only.py diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 9d7e63f8d..5aaf8fdc5 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -33,6 +33,7 @@ from couchbase._pyport import basestring import couchbase.subdocument as SD import couchbase.priv_constants as _P +import json ### Private constants. This is to avoid imposing a dependency requirement ### For simple flags: @@ -908,26 +909,56 @@ def stats(self, keys=None, keystats=False): keys = (keys,) return self._stats(keys, keystats=keystats) - def get_health(self): - """Request cluster health information. + def ping(self): + """Ping cluster for latency/status information per-service - Fetches health information from each node in the cluster. - It returns a `dict` with 'type' keys - and server summary lists as a value. + Pings each node in the cluster, and + returns a `dict` with 'type' keys (e.g 'n1ql', 'kv') + and node service summary lists as a value. :raise: :exc:`.CouchbaseNetworkError` :return: `dict` where keys are stat keys and values are host-value pairs - Get health info (works on couchbase buckets):: + Ping cluster (works on couchbase buckets):: - cb.get_health() + cb.ping() # {'services': {...}, ...} """ - resultdict = self._get_health() + resultdict = self._ping() return resultdict['services_struct'] + def diagnostics(self): + """Request diagnostics report about network connections + + Generates diagnostics for each node in the cluster. + It returns a `dict` with details + + + :raise: :exc:`.CouchbaseNetworkError` + :return: `dict` where keys are stat keys and values are + host-value pairs + + Get health info (works on couchbase buckets):: + + cb.diagnostics() + # { + 'config': + { + 'id': node ID, + 'last_activity_us': time since last activity in nanoseconds + 'local': local server and port, + 'remote': remote server and port, + 'status': connection status + } + 'id': client ID, + 'sdk': sdk version, + 'version': diagnostics API version + } + """ + return json.loads(self._diagnostics()['health_json']) + def observe(self, key, master_only=False): """Return storage information for a key. diff --git a/couchbase/tests/cases/diag_t.py b/couchbase/tests/cases/diag_t.py new file mode 100644 index 000000000..96c6086bc --- /dev/null +++ b/couchbase/tests/cases/diag_t.py @@ -0,0 +1,117 @@ +# +# Copyright 2017, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from unittest import SkipTest + +from couchbase.tests.base import ConnectionTestCase +import jsonschema +import re + +# For Python 2/3 compatibility +try: + basestring +except NameError: + basestring = str + +service_schema = {"type": "object", + "properties": {"details": {"type": "string"}, + "latency": {"anyOf": [{"type": "number"}, {"type": "string"}]}, + "server": {"type": "string"}, + "status": {"type": "number"} + }, + "required": ["details", "latency", "server", "status"]} + +any_of_required_services_schema = {"type": "array", + "items": service_schema} + + +def gen_schema_for_services_with_required_entry(name): + return {"type": "object", + "properties": {name: any_of_required_services_schema}, + "required": [name] + } + + +any_of_required_services_schema = {"anyOf": + [gen_schema_for_services_with_required_entry(name) for name in ["n1ql", "views", "fts", "kv"]] + } + +ping_schema = {"anyOf": [{ + "type": "object", + "properties": { + "services": any_of_required_services_schema + }, + "required": ["services"] +}]} + +server_and_port_schema = {"type": "string", + "pattern": "([0-9]{1,3}\.){3,3}[0-9]{1,3}:[0-9]+"} +connection_status_schema = {"type": "string", + "pattern": "connected"} +config_schema = {"type": "array", + "items": {"type": "object", + "properties": { + "id": {"type": "string"}, + "last_activity_us": {"type": "number"}, + "local": server_and_port_schema, + "remote": server_and_port_schema, + "status": connection_status_schema + }}} + +python_id="PYCBC" + +client_id_schema = {"type": "string", + "pattern": "^0x[a-f0-9]+/"+python_id} + +three_part_ver_num = "([0-9]+\.)+[0-9]+" + +sdk_schema = {"type": "string", + "pattern": "libcouchbase" + + re.escape("/") + three_part_ver_num + "_[0-9]+_(.*?)" + + re.escape(python_id + "/") + + three_part_ver_num + "\.[^\s]*"} + + +diagnostics_schema = {"type": "object", + "properties": { + "config": config_schema, + "id": client_id_schema, + "sdk": sdk_schema, + "version": {"type": "number"} + + }} + + +class DiagnosticsTests(ConnectionTestCase): + + def setUp(self): + super(DiagnosticsTests, self).setUp() + + def test_ping(self): + result = self.cb.ping() + jsonschema.validate(result, any_of_required_services_schema) + + def test_diagnostics(self): + + if self.is_mock: + raise SkipTest() + result = self.cb.diagnostics() + + jsonschema.validate(result, diagnostics_schema) + + +if __name__ == '__main__': + unittest.main() diff --git a/couchbase/tests/cases/health_t.py b/couchbase/tests/cases/health_t.py deleted file mode 100644 index 970fb1c6a..000000000 --- a/couchbase/tests/cases/health_t.py +++ /dev/null @@ -1,70 +0,0 @@ -# -# Copyright 2017, Couchbase, Inc. -# All Rights Reserved -# -# Licensed under the Apache License, Version 2.0 (the "License") -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from couchbase.tests.base import ConnectionTestCase -import jsonschema - -# For Python 2/3 compatibility -try: - basestring -except NameError: - basestring = str - - -class HealthTest(ConnectionTestCase): - def setUp(self): - super(HealthTest, self).setUp() - self.skipUnlessMock() - - server_schema = {"type": "object", - "properties": {"details": {"type": "string"}, - "latency": {"anyOf": [{"type": "number"}, {"type": "string"}]}, - "server": {"type": "string"}, - "status": {"type": "number"} - }, - "required": ["details", "latency", "server", "status"]} - - servers_schema = {"type": "array", - "items": server_schema} - - @staticmethod - def gen_schema(name): - return {"type": "object", - "properties": {name: HealthTest.servers_schema}, - "required": [name] - } - - def test_health(self): - result = self.cb.get_health() - - services_schema = {"anyOf": - [HealthTest.gen_schema(name) for name in ["n1ql", "views", "fts", "kv"]] - } - - health_schema = {"anyOf": [{ - "type": "object", - "properties": { - "services": services_schema - }, - "required": ["services"] - }]} - - jsonschema.validate(result, services_schema) - - -if __name__ == '__main__': - unittest.main() diff --git a/src/bucket.c b/src/bucket.c index bd980b99a..430773613 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -513,7 +513,8 @@ static PyMethodDef Bucket_TABLE_methods[] = { OPFUNC(counter, "Modify a counter in Couchbase"), OPFUNC(counter_multi, "Multi-key variant of counter"), OPFUNC(_stats, "Get various server statistics"), - OPFUNC(_get_health, "Get health info"), + OPFUNC(_ping, "Ping cluster to receive diagnostics"), + OPFUNC(_diagnostics, "Get diagnostics"), OPFUNC(_http_request, "Internal routine for HTTP requests"), OPFUNC(_view_request, "Internal routine for view requests"), diff --git a/src/callbacks.c b/src/callbacks.c index 360056a94..114a27ee6 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -799,6 +799,42 @@ static void ping_callback(lcb_t instance, CB_THR_BEGIN(parent); } + +static void diag_callback(lcb_t instance, + int cbtype, + const lcb_RESPBASE *resp_base) +{ + pycbc_Bucket *parent; + const lcb_RESPDIAG *resp = (const lcb_RESPDIAG *)resp_base; + + pycbc_MultiResult *mres = (pycbc_MultiResult *)resp->cookie; + PyObject *resultdict = pycbc_multiresult_dict(mres); + parent = mres->parent; + CB_THR_END(parent); + + if (resp->rc != LCB_SUCCESS) { + if (mres->errop == NULL) { + pycbc_Result *res = (pycbc_Result *)pycbc_result_new(parent); + res->rc = resp->rc; + res->key = Py_None; + Py_INCREF(res->key); + maybe_push_operr(mres, res, resp->rc, 0); + } + } + + if (resp->njson) { + pycbc_dict_add_text_kv(resultdict, "health_json", resp->json); + } + if (resp->rflags & LCB_RESP_F_FINAL) { + /* Note this can happen in both success and error cases!*/ + operation_completed_with_err_info(parent, mres, cbtype, resp_base); + } + + CB_THR_BEGIN(parent); +} + + + void pycbc_callbacks_init(lcb_t instance) { @@ -813,6 +849,7 @@ pycbc_callbacks_init(lcb_t instance) lcb_install_callback3(instance, LCB_CALLBACK_OBSERVE, observe_callback); lcb_install_callback3(instance, LCB_CALLBACK_STATS, stats_callback); lcb_install_callback3(instance, LCB_CALLBACK_PING, ping_callback); + lcb_install_callback3(instance, LCB_CALLBACK_DIAG, diag_callback); /* Subdoc */ lcb_install_callback3(instance, LCB_CALLBACK_SDLOOKUP, subdoc_callback); diff --git a/src/miscops.c b/src/miscops.c index 06343a1b8..cc1d3bdb3 100644 --- a/src/miscops.c +++ b/src/miscops.c @@ -14,7 +14,9 @@ * limitations under the License. **/ +#include <libcouchbase/api3.h> #include "oputil.h" +#include "pycbc.h" /** * This file contains 'miscellaneous' operations. Functions contained here @@ -361,9 +363,9 @@ pycbc_Bucket__stats(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) return cv.ret; } -PyObject *pycbc_Bucket__get_health(pycbc_Bucket *self, - PyObject *args, - PyObject *kwargs) +PyObject *pycbc_Bucket__ping(pycbc_Bucket *self, + PyObject *args, + PyObject *kwargs) { int rv; Py_ssize_t ncmds = 0; @@ -397,3 +399,40 @@ PyObject *pycbc_Bucket__get_health(pycbc_Bucket *self, pycbc_common_vars_finalize(&cv, self); return cv.ret; } + +PyObject *pycbc_Bucket__diagnostics(pycbc_Bucket *self, + PyObject *args, + PyObject *kwargs) +{ + int rv; + Py_ssize_t ncmds = 0; + lcb_error_t err = LCB_ERROR; + struct pycbc_common_vars cv = PYCBC_COMMON_VARS_STATIC_INIT; + lcb_CMDDIAG cmd = {0}; + cmd.options = LCB_PINGOPT_F_JSONPRETTY; + + cmd.id = "PYCBC"; + rv = pycbc_common_vars_init(&cv, self, PYCBC_ARGOPT_MULTI, ncmds, 0); + + if (rv < 0) { + return NULL; + } + lcb_sched_enter(self->instance); + PYCBC_CONN_THR_BEGIN(self); + err = lcb_diag(self->instance, cv.mres, &cmd); + PYCBC_CONN_THR_END(self); + + if (err != LCB_SUCCESS) { + PYCBC_EXCTHROW_SCHED(err); + goto GT_DONE; + } + + if (-1 == pycbc_common_vars_wait(&cv, self)) { + goto GT_DONE; + } + lcb_sched_leave(self->instance); + + GT_DONE: + pycbc_common_vars_finalize(&cv, self); + return cv.ret; +} \ No newline at end of file diff --git a/src/pycbc.h b/src/pycbc.h index e99e58641..a15479387 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -1127,9 +1127,13 @@ PyObject* pycbc_Bucket__cntlstr(pycbc_Bucket *conn, PyObject *args, PyObject *kw /** * Health-check methods */ -PyObject *pycbc_Bucket__get_health(pycbc_Bucket *self, - PyObject *args, - PyObject *kwargs); +PyObject *pycbc_Bucket__ping(pycbc_Bucket *self, + PyObject *args, + PyObject *kwargs); + +PyObject *pycbc_Bucket__diagnostics(pycbc_Bucket *self, + PyObject *args, + PyObject *kwargs); /** * Flag to check if logging is enabled for the library via Python's logging */ From 2568574bbbbe36451a509408fae07006e1d90d2f Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Thu, 11 Jan 2018 18:38:38 +0000 Subject: [PATCH 129/829] PYCBC-453: certificate auth support Motivation ---------- libcouchbase 2.8.4 supports X509-style certificate authentication via the 'keypath' and 'certpath' parts of the connection string. This should be exposed from Python. Modifications ------------- - Added some extra options to test.ini handling to allow testing of CA throughout test suite - Refactored test.ini option handling Results ------- - Certificate Authentication verified working. Change-Id: I64e5b0b57d009b81af4b5ca119370fca1d46f4a4 Reviewed-on: http://review.couchbase.org/87759 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Matt Ingenthron <ingenthr@gmail.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- CMakeLists.txt | 60 +++++++++++++++++++++++++++++++++++++++++ couchbase/tests/base.py | 34 +++++++++++++++++------ 2 files changed, 86 insertions(+), 8 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 76f520e60..d824061ed 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -2,21 +2,53 @@ cmake_minimum_required(VERSION 3.8) project(couchbase_python_client_2_3_1) set(LCB_ROOT ../libcouchbase) +set(LCB_ROOT_SRC ${LCB_ROOT}/src) +set(LCB_ROOT_CONTRIB ${LCB_ROOT}/contrib) set(PYTHON_INCLUDE_DIR /Users/ellis_breen/root/virtualenvs/3.6/default/include/python3.6m/) set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIR}) +set(LCB_IO ${LCB_ROOT}/src/lcbio) aux_source_directory(${LCB_ROOT}/src LCB_CORE) aux_source_directory(${LCB_ROOT}/src/http HTTP) aux_source_directory(${LCB_ROOT}/include/memcached/ MCD_INC) aux_source_directory(${LCB_ROOT}/include/libcouchbase/ LCB_INC) aux_source_directory(${LCB_ROOT}/contrib/lcb-jsoncpp LCB_JSON) +aux_source_directory(${LCB_ROOT}/src/ssl LCB_SSL) +aux_source_directory(${LCB_ROOT}/src/mcserver LCB_MCSERVER) +aux_source_directory(${LCB_IO} LCB_IO_A) +aux_source_directory(${LCB_ROOT_SRC}/sasl LCB_SASL) +aux_source_directory(LCB_VBUCKET ${LCB_ROOT}/src/vbucket) +aux_source_directory(LCB_SASL ${LCB_ROOT}/contrib/cbsasl) +aux_source_directory(LCB_CLI ${LCB_ROOT}/contrib/cliopts) +aux_source_directory(LCB_ROOT_A ${LCB_ROOT_SRC}) + + +ADD_LIBRARY(netbuf OBJECT ${LCB_NETBUF_SRC}) +ADD_LIBRARY(netbuf-malloc OBJECT ${LCB_NETBUF_SRC}) +ADD_LIBRARY(mcreq OBJECT ${LCB_MC_SRC}) +ADD_LIBRARY(rdb OBJECT ${LCB_RDB_SRC}) +ADD_LIBRARY(lcbio OBJECT ${LCB_IO_SRC}) +ADD_LIBRARY(lcbio-cxx OBJECT ${LCB_IO_CXXSRC}) +ADD_LIBRARY(lcbht OBJECT ${LCB_HT_SRC}) +ADD_LIBRARY(lcbcore OBJECT ${LCB_CORE_SRC}) +ADD_LIBRARY(lcbcore-cxx OBJECT ${LCB_CORE_CXXSRC}) include_directories( ${LCB_CORE} ${LCB_ROOT}/include ${MCD_INC} ${HTTP} ${PYTHON_INCLUDE_DIR} ${LCB_JSON} + ${LCB_SSL} + ${LCB_MCSERVER} + ${LCB_IO} + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + ${LCB_ROOT_SRC} + ${LCB_SASL} + ${LCB_SASL}/src + ${LCB_ROOT_CONTRIB} ) include(ExternalProject) @@ -30,6 +62,16 @@ set(EXTRA_SOURCE_DIRS ${LCB_ROOT}/include/memcached ${LCB_ROOT}/include/libcouchbase ${LCB_ROOT}/src/http + ${LCB_ROOT}/src/ssl + + ${LCB_MCSERVER} + ${LCB_IO} + + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + ${LCB_ROOT_SRC} + ${LCB_ROOT_A} ) set(SOURCE @@ -38,6 +80,16 @@ set(SOURCE ${LCB_INC} ${MCD_INC} ${HTTP} + ${LCB_SSL} + ${LCB_ROOT}/src/ssl + + ${LCB_MCSERVER} + ${LCB_IO} + + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + ${LCB_ROOT_SRC} ) link_directories(${LCB_ROOT}/lib) link_libraries(${LCB_ROOT}/../lib/ @@ -48,6 +100,14 @@ add_executable(couchbase_python_client_2_3_1 ${LCB_INC} ${MCD_INC} ${LCB_JSON} + ${LCB_SSL} + ${LCB_ROOT}/src/ssl + + ${LCB_IO} + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + ${LCB_ROOT_SRC} acouchbase/tests/asyncio_tests.py acouchbase/tests/fixtures.py acouchbase/tests/py34only.py diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index 180e0fbd5..9e72db9de 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -32,14 +32,13 @@ from fallback import configparser from testresources import ResourcedTestCase, TestResourceManager - +from couchbase.exceptions import CouchbaseError from couchbase.admin import Admin from couchbase.mockserver import CouchbaseMock, BucketSpec, MockControlClient from couchbase.result import ( MultiResult, ValueResult, OperationResult, ObserveInfo, Result) from couchbase._pyport import basestring - CONFIG_FILE = 'tests.ini' # in cwd @@ -52,17 +51,33 @@ def __init__(self): self.bucket_name = "default" self.bucket_password = "" self.ipv6 = "disabled" + self.protocol = "http" + + @staticmethod + def filter_opts(options): + return {key: value for key, value in + options.items() if key in ["certpath", "keypath", "ipv6", "config_cache"] and value} def make_connargs(self, **overrides): bucket = self.bucket_name if 'bucket' in overrides: bucket = overrides.pop('bucket') - connstr = 'http://{0}:{1}/{2}?'.format(self.host, self.port, bucket) - connstr += 'ipv6=' + overrides.pop('ipv6', self.ipv6) - if 'config_cache' in overrides: - connstr += '&config_cache=' - connstr += str(overrides.pop('config_cache')) + if self.protocol.startswith('couchbase'): + protocol_format = '{0}/{1}'.format(self.host, bucket) + elif self.protocol.startswith('http'): + protocol_format = '{0}:{1}/{2}'.format(self.host, self.port, bucket) + else: + raise CouchbaseError('Unrecognised protocol') + connstr = self.protocol + '://' + protocol_format + final_options = ClusterInformation.filter_opts(self.__dict__) + override_options = ClusterInformation.filter_opts(overrides) + for k, v in override_options.items(): + overrides.pop(k) + final_options[k] = override_options[k] + + conn_options = '&'.join((key + "=" + value) for key, value in final_options.items()) + connstr += ("?" + conn_options) if conn_options else "" ret = { 'password': self.bucket_password, @@ -96,7 +111,10 @@ def load(self): info.admin_password = config.get('realserver', 'admin_password') info.bucket_name = config.get('realserver', 'bucket_name') info.bucket_password = config.get('realserver', 'bucket_password') - info.ipv6 = config.get('realserver', 'ipv6', fallback="disabled") + info.ipv6 = config.get('realserver', 'ipv6', fallback='disabled') + info.certpath = config.get('realserver', 'certpath', fallback=None) + info.keypath = config.get('realserver', 'keypath', fallback=None) + info.protocol = config.get('realserver', 'protocol', fallback="http") if config.getboolean('realserver', 'enabled'): self.realserver_info = info else: From ecf36a1c4ee873c6cee1b9f5d901773bffb20548 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Fri, 12 Jan 2018 15:18:32 +0000 Subject: [PATCH 130/829] PYCBC-451: add test and extra documentation for FMT_UTF8 transcoder As per title. Change-Id: Ie54a2f771a3a06cad9f7bb8db88e479f0b617844 Reviewed-on: http://review.couchbase.org/87796 Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Build Bot <build@couchbase.com> --- couchbase/bucket.py | 4 ++++ couchbase/tests/cases/set_t.py | 18 ++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 5aaf8fdc5..93e9aed7c 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -378,6 +378,10 @@ def upsert(self, key, value, cas=0, ttl=0, format=None, cb.upsert('foo', {'bar': 'baz'}, format=couchbase.FMT_JSON) + Force UTF8 document format for value:: + + cb.upsert('foo', "<xml></xml>", format=couchbase.FMT_UTF8) + Perform optimistic locking by specifying last known CAS version:: cb.upsert('foo', 'bar', cas=8835713818674332672) diff --git a/couchbase/tests/cases/set_t.py b/couchbase/tests/cases/set_t.py index 9a48ed7a1..ae38fa0b5 100644 --- a/couchbase/tests/cases/set_t.py +++ b/couchbase/tests/cases/set_t.py @@ -1,3 +1,4 @@ +#-*- coding:utf-8 -*- # # Copyright 2013, Couchbase, Inc. # All Rights Reserved @@ -35,6 +36,23 @@ def test_trivial_set(self): rv = self.cb.upsert(self.gen_key(), 'value2') self.assertTrue(rv.cas > 0) + def test_utf8_set(self): + rv = self.cb.upsert("documentID", "<xml></xml>", format=FMT_UTF8) + self.assertTrue(rv) + self.assertTrue(rv.cas > 0) + rv = self.cb.get("documentID") + self.assertTrue(rv.cas > 0) + self.assertEqual(rv.value, "<xml></xml>") + + def test_utf8_set_nonascii(self): + + rv = self.cb.upsert("documentID", u'Öüç', format=FMT_UTF8) + self.assertTrue(rv) + self.assertTrue(rv.cas > 0) + rv = self.cb.get("documentID") + self.assertTrue(rv.cas > 0) + self.assertEqual(rv.value, u'Öüç') + def test_set_with_cas(self): key = self.gen_key('cas') rv1 = self.cb.upsert(key, 'value1') From bcd55d6833edff1196b0b6f07afda91258db61f2 Mon Sep 17 00:00:00 2001 From: Matt Carabine <matt.carabine@hotmail.co.uk> Date: Mon, 15 Jan 2018 15:41:56 +0000 Subject: [PATCH 131/829] PYCBC-463: Make TxIOEvent API compatible with Tornado Motivation ---------- TxCouchbase can be used in a variety of environments, including within a Tornado application, which converts twisted objects into native Tornado objects. When a connection is lost, Tornado tries to execute a callback to TxCouchbase to inform it. Unfortunately the API it uses does not match the API of the TxIOEvent, this means that the program will crash due to an AttributeError. Modifications ------------- Added two new methods to TxIOEvent, readConnectionLost and writeConnectionLost. Both of these methods redirect to the existing method connectionLost, this ensures that the API does not break for any existing users. Now TxCouchbase will not crash when a connection fails when running in Tornado. Change-Id: Ia9d22731d1531359d2dbf5b73f90633c1b762802 Reviewed-on: http://review.couchbase.org/87849 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- txcouchbase/iops.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/txcouchbase/iops.py b/txcouchbase/iops.py index d91137d0b..1d03f348e 100644 --- a/txcouchbase/iops.py +++ b/txcouchbase/iops.py @@ -30,6 +30,12 @@ def connectionLost(self, reason): if self.state == PYCBC_EVSTATE_ACTIVE: self.ready_w() + def readConnectionLost(self, reason): + self.connectionLost(reason) + + def writeConnectionLost(self, reason): + self.connectionLost(reason) + def logPrefix(self): return "Couchbase IOEvent" From d7e6bf17f4d4cb9cb97566cc16f74ae5c78b52b0 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Tue, 13 Feb 2018 21:13:21 +0000 Subject: [PATCH 132/829] PYCBC-451: Python SDK Documentation could use example of upsert involving JSON text Added an example where an object is unmarshaled from JSON text and then added with the FMT_JSON flag. Change-Id: Ie27a17f7ac07dca6ef414d941cac529a2f94f316 Reviewed-on: http://review.couchbase.org/89357 Reviewed-by: Matt Ingenthron <ingenthr@gmail.com> Tested-by: Build Bot <build@couchbase.com> --- couchbase/bucket.py | 6 ++++++ couchbase/tests/cases/set_t.py | 7 ++++++- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 93e9aed7c..ed68a9fc9 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -378,6 +378,12 @@ def upsert(self, key, value, cas=0, ttl=0, format=None, cb.upsert('foo', {'bar': 'baz'}, format=couchbase.FMT_JSON) + Insert JSON from a string:: + + JSONstr = '{"key1": "value1", "key2": 123}' + JSONobj = json.loads(JSONstr) + cb.upsert("documentID", JSONobj, format=couchbase.FMT_JSON) + Force UTF8 document format for value:: cb.upsert('foo', "<xml></xml>", format=couchbase.FMT_UTF8) diff --git a/couchbase/tests/cases/set_t.py b/couchbase/tests/cases/set_t.py index ae38fa0b5..27ab51e5d 100644 --- a/couchbase/tests/cases/set_t.py +++ b/couchbase/tests/cases/set_t.py @@ -25,7 +25,7 @@ ArgumentError, NotFoundError, NotStoredError) from couchbase.tests.base import ConnectionTestCase - +import json class UpsertTest(ConnectionTestCase): @@ -132,6 +132,11 @@ def test_replace(self): self.assertRaises(NotFoundError, self.cb.replace, key, "value") + def test_from_json_string(self): + JSONstr = '{"key1": "value1", "key2": 123}' + JSONobj = json.loads(JSONstr) + self.cb.upsert("documentID", JSONobj, format=FMT_JSON) + if __name__ == '__main__': unittest.main() From b3467956cabc860a0b86ac6bf7f50b75b968a14e Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Tue, 13 Feb 2018 19:01:02 +0000 Subject: [PATCH 133/829] PYCBC-458: Fix Clang and Python warnings during installation Motivation ---------- There are various Clang compiler warnings that come up during installation, which should be addresed. Additionally, the acouchbase iterator module is included in Python<3.4 packages, but this module relies on the 'yield from' syntax which is only available on Python 3.3 and above, and asyncio which is only available in Python 3.4 and above. We may backport this to Python2.x using Trollius and Trollius.From in future, but for now this module should not be included in Python<3.4 packages. Modifications ------------- - Fixed various Clang warnings. - Change Ping code to only fill the 'details' field when an error has occurred, matching the behaviour of the JSON code from LCB. Also use the svc->rc field rather than svc->status, as for LCB. Adjust JSON schema in tests to allow for optional 'details' field. - Selective inclusion of acouchbase.iterator module for Python>=3.4 Results ------- All known warnings/errors eradicated. acouchbase package still present in Python>=3.4, with all tests passing. Change-Id: I4ce792fd8e0a1021c2c7fcbff5dfefd9393b5cd0 Reviewed-on: http://review.couchbase.org/89347 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- acouchbase/__init__.py | 6 ++++++ acouchbase/{ => py34only}/iterator.py | 0 couchbase/tests/cases/diag_t.py | 2 +- setup.py | 7 +++++-- src/callbacks.c | 18 +++++++++++------- src/convert.c | 17 +++++++++++++++++ src/ixmgmt.c | 2 +- src/opresult.c | 2 +- src/oputil.c | 4 ++-- 9 files changed, 44 insertions(+), 14 deletions(-) rename acouchbase/{ => py34only}/iterator.py (100%) diff --git a/acouchbase/__init__.py b/acouchbase/__init__.py index e69de29bb..b418a8e0b 100644 --- a/acouchbase/__init__.py +++ b/acouchbase/__init__.py @@ -0,0 +1,6 @@ +import sys + +if sys.version_info >= (3, 4): + import acouchbase.py34only.iterator + + sys.modules['acouchbase.iterator'] = acouchbase.py34only.iterator diff --git a/acouchbase/iterator.py b/acouchbase/py34only/iterator.py similarity index 100% rename from acouchbase/iterator.py rename to acouchbase/py34only/iterator.py diff --git a/couchbase/tests/cases/diag_t.py b/couchbase/tests/cases/diag_t.py index 96c6086bc..5ce8f6ce9 100644 --- a/couchbase/tests/cases/diag_t.py +++ b/couchbase/tests/cases/diag_t.py @@ -32,7 +32,7 @@ "server": {"type": "string"}, "status": {"type": "number"} }, - "required": ["details", "latency", "server", "status"]} + "required": ["latency", "server", "status"]} any_of_required_services_schema = {"type": "array", "items": service_schema} diff --git a/setup.py b/setup.py index b53f604da..fa4f40e2d 100644 --- a/setup.py +++ b/setup.py @@ -136,8 +136,11 @@ 'couchbase.tests.cases', 'gcouchbase', 'txcouchbase', - 'acouchbase' - ], + 'acouchbase', + ] + ([ + 'acouchbase.tests' + 'acouchbase.py34only' + ] if sys.version_info >= (3, 4) else []), package_data = pkgdata, tests_require = [ 'nose', 'testresources>=0.2.7' ], test_suite = 'couchbase.tests.test_sync', diff --git a/src/callbacks.c b/src/callbacks.c index 114a27ee6..882b283cd 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -733,7 +733,6 @@ static void ping_callback(lcb_t instance, { pycbc_Bucket *parent; const lcb_RESPPING *resp = (const lcb_RESPPING *)resp_base; - int do_return = 0; pycbc_MultiResult *mres = (pycbc_MultiResult *)resp->cookie; PyObject *resultdict = pycbc_multiresult_dict(mres); @@ -741,7 +740,6 @@ static void ping_callback(lcb_t instance, CB_THR_END(parent); if (resp->rc != LCB_SUCCESS) { - do_return = 1; if (mres->errop == NULL) { pycbc_Result *res = (pycbc_Result *)pycbc_result_new(parent); res->rc = resp->rc; @@ -754,7 +752,7 @@ static void ping_callback(lcb_t instance, { PyObject *struct_services_dict = PyDict_New(); - int ii; + lcb_SIZE ii; for (ii = 0; ii < resp->nservices; ii++) { lcb_PINGSVC *svc = &resp->services[ii]; const char *type_s = get_type_s(svc->type); @@ -769,9 +767,16 @@ static void ping_callback(lcb_t instance, { PyObject *mrdict = PyDict_New(); PyList_Append(struct_server_list, mrdict); - pycbc_dict_add_text_kv(mrdict, - "details", - lcb_strerror(NULL, svc->status)); + switch (svc->status) { + case LCB_PINGSTATUS_OK: + break; + case LCB_PINGSTATUS_TIMEOUT: + break; + default: + pycbc_dict_add_text_kv(mrdict, + "details", + lcb_strerror_long(svc->rc)); + } pycbc_dict_add_text_kv( mrdict, "server", svc->server); PyDict_SetItemString(mrdict, @@ -793,7 +798,6 @@ static void ping_callback(lcb_t instance, } if (resp->rflags & LCB_RESP_F_FINAL) { /* Note this can happen in both success and error cases!*/ - do_return = 1; operation_completed_with_err_info(parent, mres, cbtype, resp_base); } CB_THR_BEGIN(parent); diff --git a/src/convert.c b/src/convert.c index eb2a381fb..fed362df7 100644 --- a/src/convert.c +++ b/src/convert.c @@ -72,6 +72,18 @@ convert_to_string(const char *buf, size_t nbuf, int mode) return PyBytes_FromStringAndSize(buf, nbuf); } +#if defined(__clang__) +#define DISABLE_SCOPE_START \ +_Pragma("clang diagnostic push") \ +_Pragma("clang diagnostic ignored \"-Wunreachable-code\"") + +#define DISABLE_SCOPE_END\ + _Pragma("clang diagnostic pop") +#else +#define DISABLE_SCOPE_START +#define DISABLE_SCOPE_END +#endif + static int encode_common(PyObject *src, pycbc_pybuffer *dst, lcb_U32 flags) { @@ -80,9 +92,11 @@ encode_common(PyObject *src, pycbc_pybuffer *dst, lcb_U32 flags) int rv; if (flags == PYCBC_FMT_UTF8) { + #if PY_MAJOR_VERSION == 2 if (PyString_Check(src)) { #else + DISABLE_SCOPE_START if (0) { #endif bytesobj = src; @@ -95,6 +109,9 @@ encode_common(PyObject *src, pycbc_pybuffer *dst, lcb_U32 flags) } bytesobj = PyUnicode_AsUTF8String(src); } +#if PY_MAJOR_VERSION != 2 + DISABLE_SCOPE_END +#endif } else if (flags == PYCBC_FMT_BYTES) { if (PyBytes_Check(src) || PyByteArray_Check(src)) { bytesobj = src; diff --git a/src/ixmgmt.c b/src/ixmgmt.c index e53d6c3b6..18682e64b 100644 --- a/src/ixmgmt.c +++ b/src/ixmgmt.c @@ -163,7 +163,7 @@ pycbc_Bucket__ixwatch(pycbc_Bucket *self, PyObject *args, PyObject *kw) cmd.specs = (const lcb_N1XSPEC * const *)specs; bufs = calloc(nspecs, sizeof *bufs); - for (ii = 0; ii < nspecs; ++ii) { + for (ii = 0; nspecs > 0 && ii < (size_t) nspecs; ++ii) { PyObject *index = PySequence_GetItem(indexes, ii); PyObject *strobj = NULL; if (index == NULL) { diff --git a/src/opresult.c b/src/opresult.c index 57960ba8c..aeb537b7b 100644 --- a/src/opresult.c +++ b/src/opresult.c @@ -303,7 +303,7 @@ pycbc_sdresult_addresult(pycbc__SDResult *obj, size_t ii, PyObject *item) if (obj->results == NULL) { obj->results = PyList_New(PyTuple_GET_SIZE(obj->specs)); } - pycbc_assert(ii < PyTuple_GET_SIZE(obj->specs)); + pycbc_assert(ii < (size_t)PyTuple_GET_SIZE(obj->specs)); PyList_SetItem(obj->results, ii, item); Py_INCREF(item); /* To normalize refcount semantics */ } diff --git a/src/oputil.c b/src/oputil.c index 035f3718d..d5de4254b 100644 --- a/src/oputil.c +++ b/src/oputil.c @@ -605,7 +605,7 @@ int pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, PyObject *key, PyObject *spectuple, lcb_CMDSUBDOC *cmd) { - int rv; + int rv = 0; lcb_error_t err = LCB_SUCCESS; Py_ssize_t nspecs = 0; pycbc__SDResult *newitm = NULL; @@ -665,7 +665,7 @@ pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, free(specs); { size_t ii; - for (ii = 0; ii < nspecs; ++ii) { + for (ii = 0; nspecs > 0 && ii < (size_t) nspecs; ++ii) { PYCBC_PYBUF_RELEASE(pathbufs + ii); PYCBC_PYBUF_RELEASE(valbufs + ii); } From 11a60296dd6f67955b2374beede5740914c7e01d Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Wed, 14 Feb 2018 17:22:24 +0000 Subject: [PATCH 134/829] PYCBC-458: fix typo in setup.py Change-Id: I867c42445ff4c03792e3eb8a8707713ab865f1d2 Reviewed-on: http://review.couchbase.org/89419 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index fa4f40e2d..5855f34ec 100644 --- a/setup.py +++ b/setup.py @@ -138,7 +138,7 @@ 'txcouchbase', 'acouchbase', ] + ([ - 'acouchbase.tests' + 'acouchbase.tests', 'acouchbase.py34only' ] if sys.version_info >= (3, 4) else []), package_data = pkgdata, From f40f1c41b2db0e4b34e2597edd04bfe6088947ae Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Fri, 23 Feb 2018 18:10:52 +0000 Subject: [PATCH 135/829] PYCBC-465: Add Snappy Compression Feature Motivation ---------- libcouchbase now suports Snappy compression in various configurations (in, out, both directions, etc). This should be reflected in the Python API. Modifications ------------- - Added 'compression' property to couchbase.Bucket, for post-connection reading/writing of the compression type. Results ------- Switching through compression types results in correct setting/retrieval of values with unique values per iteration. Change-Id: I21719bd0fa0da6b22a7a746a80e9a5fa0d0e1547 Reviewed-on: http://review.couchbase.org/90054 Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Build Bot <build@couchbase.com> --- .gitignore | 1 - CMakeLists.txt | 4 +-- couchbase/bucket.py | 36 +++++++++++++++++++++++++ couchbase/tests/base.py | 2 +- couchbase/tests/cases/misc_t.py | 18 ++++++++++++- docs/source/api/couchbase.rst | 2 ++ setup.py | 3 +++ src/constants.c | 47 +++++++++++++++++++++++++++++---- 8 files changed, 103 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index 2c67a44bd..3fe84edc3 100644 --- a/.gitignore +++ b/.gitignore @@ -22,7 +22,6 @@ p3 p2 p3_nose p2_nose -build*.sh BUILD* couchbase/_version.py ci/ diff --git a/CMakeLists.txt b/CMakeLists.txt index d824061ed..69558ef4a 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -115,7 +115,7 @@ add_executable(couchbase_python_client_2_3_1 acouchbase/__init__.py acouchbase/asyncio_iops.py acouchbase/bucket.py - acouchbase/iterator.py + acouchbase/py34only/iterator.py couchbase/async/__init__.py couchbase/async/bucket.py couchbase/async/events.py @@ -147,7 +147,7 @@ add_executable(couchbase_python_client_2_3_1 couchbase/tests/cases/flush_t.py couchbase/tests/cases/format_t.py couchbase/tests/cases/get_t.py - couchbase/tests/cases/health_t.py + couchbase/tests/cases/diag_t.py couchbase/tests/cases/iops_t.py couchbase/tests/cases/itertypes_t.py couchbase/tests/cases/itmops_t.py diff --git a/couchbase/bucket.py b/couchbase/bucket.py index ed68a9fc9..9b4eae6b6 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1657,6 +1657,42 @@ def n1ql_timeout(self): def n1ql_timeout(self, value): self._set_timeout_common(_LCB.LCB_CNTL_N1QL_TIMEOUT, value) + @property + def compression(self): + """ + The compression mode to be used when talking to the server. + + This can be any of the values in :module:`couchbase._libcouchbase` + prefixed with `COMPRESS_`: + + .. data:: COMPRESS_NONE + + Do not perform compression in any direction. + + .. data:: COMPRESS_IN + + Decompress incoming data, if the data has been compressed at the server. + + .. data:: COMPRESS_OUT + + Compress outgoing data. + + .. data:: COMPRESS_INOUT + + Both `COMPRESS_IN` and `COMPRESS_OUT`. + + .. data:: COMPRESS_FORCE + + Setting this flag will force the client to assume that all servers + support compression despite a HELLO not having been initially negotiated. + """ + + return self._cntl(_LCB.LCB_CNTL_COMPRESSION_OPTS, value_type='int') + + @compression.setter + def compression(self, value): + self._cntl(_LCB.LCB_CNTL_COMPRESSION_OPTS, value_type='int', value=value) + @property def is_ssl(self): """ diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index 9e72db9de..cb9483116 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -56,7 +56,7 @@ def __init__(self): @staticmethod def filter_opts(options): return {key: value for key, value in - options.items() if key in ["certpath", "keypath", "ipv6", "config_cache"] and value} + options.items() if key in ["certpath", "keypath", "ipv6", "config_cache", "compression"] and value} def make_connargs(self, **overrides): bucket = self.bucket_name diff --git a/couchbase/tests/cases/misc_t.py b/couchbase/tests/cases/misc_t.py index c8f15f658..e25918625 100644 --- a/couchbase/tests/cases/misc_t.py +++ b/couchbase/tests/cases/misc_t.py @@ -165,4 +165,20 @@ def test_multi_auth(self): new_bucket = cb.bucket + '2' cb.add_bucket_creds(new_bucket, 'newpass') self.assertRaises(ValueError, cb.add_bucket_creds, '', 'pass') - self.assertRaises(ValueError, cb.add_bucket_creds, 'bkt', '') \ No newline at end of file + self.assertRaises(ValueError, cb.add_bucket_creds, 'bkt', '') + + def test_compression(self): + import couchbase._libcouchbase as _LCB + items = list(_LCB.COMPRESSION.items()) + for entry in range(0, len(items)*2): + connstr, cntl = items[entry % len(items)] + print(connstr + "," + str(cntl)) + cb = self.make_connection(compression=connstr) + self.assertEqual(cb.compression, cntl) + value = "world" + str(entry) + cb.upsert("hello", value) + cb.compression = items[(entry + 1) % len(items)][1] + self.assertEqual(value, cb.get("hello").value) + cb.remove("hello") + + diff --git a/docs/source/api/couchbase.rst b/docs/source/api/couchbase.rst index 7360dfaba..c4f7e3e6a 100644 --- a/docs/source/api/couchbase.rst +++ b/docs/source/api/couchbase.rst @@ -470,6 +470,8 @@ Attributes .. autoattribute:: is_ssl + .. autoattribute:: compression + .. attribute:: default_format Specify the default format (default: :const:`~couchbase.FMT_JSON`) diff --git a/setup.py b/setup.py index 5855f34ec..d401cc095 100644 --- a/setup.py +++ b/setup.py @@ -33,6 +33,9 @@ warnings.warn('Adding /usr/local to search path for OS X') extoptions['library_dirs'] = ['/usr/local/lib'] extoptions['include_dirs'] = ['/usr/local/include'] + if os.environ.get('PYCBC_DEBUG'): + extoptions['extra_compile_args'] = ['-O0', '-g'] + extoptions['extra_link_args'] = ['-O0', '-g'] else: warnings.warn("I'm detecting you're running windows." "You might want to modify " diff --git a/src/constants.c b/src/constants.c index 4cf3df632..a5e4eb876 100644 --- a/src/constants.c +++ b/src/constants.c @@ -83,13 +83,17 @@ X(APPEND) \ X(PREPEND) +typedef void (*pycbc_constant_handler)(PyObject *, const char *, long long int); + +static PyObject *setup_compression_map(PyObject *module, + pycbc_constant_handler handler); + static void -do_all_constants(PyObject *module, - void (*handler)(PyObject*, const char*, PY_LONG_LONG)) +do_all_constants(PyObject *module, pycbc_constant_handler handler) { #define ADD_MACRO(sym) handler(module, #sym, sym) #define ADD_CONSTANT(name, val) handler(module, name, val) - + #define LCB_CONSTANT(postfix, ...) ADD_CONSTANT(#postfix, LCB_##postfix) #define X(b) ADD_MACRO(LCB_##b); XERR(X); XSTORAGE(X); @@ -175,6 +179,7 @@ do_all_constants(PyObject *module, ADD_MACRO(LCB_CNTL_SSL_MODE); ADD_MACRO(LCB_SSL_ENABLED); ADD_MACRO(LCB_CNTL_N1QL_TIMEOUT); + ADD_MACRO(LCB_CNTL_COMPRESSION_OPTS); /* View options */ ADD_MACRO(LCB_CMDVIEWQUERY_F_INCLUDE_DOCS); @@ -197,14 +202,46 @@ do_all_constants(PyObject *module, ADD_MACRO(LCB_BTYPE_COUCHBASE); ADD_MACRO(LCB_BTYPE_EPHEMERAL); ADD_MACRO(LCB_BTYPE_MEMCACHED); + + PyModule_AddObject(module, "COMPRESSION", setup_compression_map(module, handler)); + #ifdef LCB_N1XSPEC_F_DEFER ADD_MACRO(LCB_N1XSPEC_F_DEFER); #endif } +static PyObject *setup_compression_map(PyObject *module, + pycbc_constant_handler handler) +{ +/* Compression options */ +#define LCB_FOR_EACH_COMPRESS_TYPE(X, DIV)\ + X(COMPRESS_NONE, "Do not perform compression in any direction.") DIV \ + X(COMPRESS_IN, "Decompress incoming data, if the data has been compressed at the server.") DIV \ + X(COMPRESS_OUT," Compress outgoing data.") DIV \ + X(COMPRESS_INOUT) DIV \ + X(COMPRESS_FORCE,"Setting this flag will force the client to assume that all servers support compression despite a HELLO not having been initially negotiated.") + + PyObject *result = PyDict_New(); + LCB_FOR_EACH_COMPRESS_TYPE(LCB_CONSTANT, ;); +#define PP_FOR_EACH(FUNC, DIV)\ + FUNC(on, LCB_COMPRESS_INOUT) DIV\ + FUNC(off, LCB_COMPRESS_NONE) DIV\ + FUNC(inflate_only, LCB_COMPRESS_IN) DIV\ + FUNC(force, LCB_COMPRESS_INOUT | LCB_COMPRESS_FORCE) +#undef LCB_FOR_EACH_COMPRESS_TYPE + +#define X(NAME, VALUE) PyDict_SetItemString(result,#NAME,PyLong_FromLong(VALUE)) + PP_FOR_EACH(X, ;); +#undef X + return result; +} + + +#undef LCB_CONSTANT + static void -do_constmod(PyObject *module, const char *name, PY_LONG_LONG value) { - PyObject *o = PyLong_FromLongLong(value); +do_constmod(PyObject *module, const char *name, long long value) { + PyObject *o = PyLong_FromLongLong((PY_LONG_LONG)value); PyModule_AddObject(module, name, o); } From bb4564c44ab3b4d2bd2758df990b2a6d311577bc Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Tue, 13 Feb 2018 13:50:44 +0000 Subject: [PATCH 136/829] PYCBC-452: Implement Log Redaction Motivation ---------- - Expose Log Redaction from libcouchbase Modifications ------------- - Add 'redaction' property - set to non-zero to enable redaction, is non-zero when redaction is enabled. Results ------- - Redaction tags appear in output when connstr/redaction setter requests redaction on, disappear when reset to off Change-Id: Iee070e3d1ee8d4a17998940e05a5c517cce0bb65 Reviewed-on: http://review.couchbase.org/89328 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/bucket.py | 8 +++++ couchbase/tests/base.py | 44 +++++++++++++++++++++++--- couchbase/tests/cases/misc_t.py | 56 ++++++++++++++++++++++++++++++++- dev_requirements.txt | 2 +- src/constants.c | 10 +++++- 5 files changed, 113 insertions(+), 7 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 9b4eae6b6..a3127b488 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -1762,6 +1762,14 @@ def _gen_memd_wrappers(cls, factory): raise return d + @property + def redaction(self): + return self._cntl(_LCB.LCB_CNTL_LOG_REDACTION, value_type='int') + + @redaction.setter + def redaction(self, val): + return self._cntl(_LCB.LCB_CNTL_LOG_REDACTION, value=val, value_type='int') + def _cntl(self, *args, **kwargs): """Low-level interface to the underlying C library's settings. via ``lcb_cntl()``. diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index cb9483116..9a6e1745e 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -19,9 +19,41 @@ import types import platform import warnings +from testfixtures import LogCapture +from testresources import ResourcedTestCase as ResourcedTestCaseReal, TestResourceManager try: - from unittest.case import SkipTest + import unittest2 as unittest +except ImportError: + import unittest +import logging + + +class ResourcedTestCase(ResourcedTestCaseReal): + + class CaptureContext(LogCapture): + def __init__(self, *args, **kwargs): + self.records = [] + kwargs['attributes'] = (lambda r: self.records.append(r)) + super(ResourcedTestCase.CaptureContext, self).__init__(*args, **kwargs) + + @property + def output(self): + return map(str, self.records) + + def __init__(self,*args,**kwargs): + super(ResourcedTestCase,self).__init__(*args,**kwargs) + + def assertLogs(self, *args, **kwargs): + try: + return super(ResourcedTestCase,self).assertLogs(*args, **kwargs) + except Exception as e: + logging.warn(e) + + return ResourcedTestCase.CaptureContext(*args, **kwargs) + +try: + from unittest2.case import SkipTest except ImportError: from nose.exc import SkipTest @@ -31,7 +63,6 @@ # Python <3.0 fallback from fallback import configparser -from testresources import ResourcedTestCase, TestResourceManager from couchbase.exceptions import CouchbaseError from couchbase.admin import Admin from couchbase.mockserver import CouchbaseMock, BucketSpec, MockControlClient @@ -56,7 +87,7 @@ def __init__(self): @staticmethod def filter_opts(options): return {key: value for key, value in - options.items() if key in ["certpath", "keypath", "ipv6", "config_cache", "compression"] and value} + options.items() if key in ["certpath", "keypath", "ipv6", "config_cache", "compression", "log_redaction"] and value} def make_connargs(self, **overrides): bucket = self.bucket_name @@ -132,7 +163,6 @@ def load(self): else: self.mock_enabled = False - class MockResourceManager(TestResourceManager): def __init__(self, config): super(MockResourceManager, self).__init__() @@ -363,6 +393,12 @@ def gen_kv_dict(self, amount=5, prefix=None): ret[k] = "Value_For_" + k return ret + def assertRegex(self, *args, **kwargs): + try: + return super(CouchbaseTestCase,self).assertRegex(*args,**kwargs) + except: + return super(CouchbaseTestCase,self).assertRegexpMatches(*args,**kwargs) + class ConnectionTestCase(CouchbaseTestCase): def checkCbRefcount(self): diff --git a/couchbase/tests/cases/misc_t.py b/couchbase/tests/cases/misc_t.py index e25918625..434b51d99 100644 --- a/couchbase/tests/cases/misc_t.py +++ b/couchbase/tests/cases/misc_t.py @@ -18,11 +18,25 @@ from urlparse import urlparse except ImportError: from urllib.parse import urlparse +import heapq +import inspect +import unittest +try: + import unittest2 +except ImportError: + unittest2 = None + from couchbase.tests.base import ConnectionTestCase from couchbase.user_constants import FMT_JSON, FMT_AUTO, FMT_JSON, FMT_PICKLE from couchbase.exceptions import ClientTemporaryFailError from couchbase.exceptions import CouchbaseError +import couchbase +import re +import couchbase._libcouchbase as _LCB +from couchbase import enable_logging +import logging + class MiscTest(ConnectionTestCase): @@ -142,7 +156,6 @@ def test_vbmap(self): def test_logging(self): # Assume we don't have logging here.. - import couchbase import couchbase._libcouchbase as lcb self.assertFalse(lcb.lcb_logging()) @@ -156,6 +169,47 @@ def test_logging(self): couchbase.disable_logging() self.assertFalse(lcb.lcb_logging()) + def test_redaction(self): + + all_tags = r'|'.join(re.escape(v) for k, v in _LCB.__dict__.items() if + re.match(r'.*LCB_LOG_(SD|MD|UD)_[OC]TAG.*', k)) + + enable_logging() + contains_no_tags = r'^(.(?!<' + all_tags + r'))*$' + contains_tags = r'^.*(' + all_tags + r').*$' + expected = {0: {logging.DEBUG: {'text': 'off', 'pattern': contains_no_tags}}, + 1: {logging.DEBUG: {'text': 'on', 'pattern': contains_tags}}} + + for num, entry in reversed(list(expected.items())): + for level, val in entry.items(): + + optype='connstr' + with self.assertLogs(level=level, recursive_check=True) as cm: + curbc = self.make_connection(log_redaction=val['text']) + self.assertEqual(num != 0, curbc.redaction != 0) + result_str=''.join(cm.output) + logging.info( + 'checking {pattern} matches {optype} addition {text} result:{result_str}'.format(optype=optype, + result_str=result_str, + **val)) + self.assertRegex(result_str, val['pattern']) + + opposite = 1 - num + opposite_val = expected[opposite][level] + + optype='cntl' + with self.assertLogs(level=level) as cm: + curbc.redaction = opposite + curbc.upsert(key='test', value='value') + self.assertEqual(opposite != 0, curbc.redaction != 0) + + result_str=''.join(cm.output) + logging.info( + 'checking {pattern} matches {optype} addition {text} result:{result_str}'.format(optype=optype, + result_str=result_str, + **val)) + self.assertRegex(''.join(cm.output), opposite_val['pattern']) + def test_compat_timeout(self): cb = self.make_connection(timeout=7.5) self.assertEqual(7.5, cb.timeout) diff --git a/dev_requirements.txt b/dev_requirements.txt index 7e7cd2d18..d23587608 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -5,4 +5,4 @@ sphinx==1.6.4 testresources>=0.2.7 jsonschema==2.6.0 configparser==3.5.0 - +testfixtures==5.4.0 diff --git a/src/constants.c b/src/constants.c index a5e4eb876..2652ed4cc 100644 --- a/src/constants.c +++ b/src/constants.c @@ -93,6 +93,7 @@ do_all_constants(PyObject *module, pycbc_constant_handler handler) { #define ADD_MACRO(sym) handler(module, #sym, sym) #define ADD_CONSTANT(name, val) handler(module, name, val) + #define ADD_STRING(name) PyModule_AddObject(module, #name, pycbc_SimpleStringZ(name) ) #define LCB_CONSTANT(postfix, ...) ADD_CONSTANT(#postfix, LCB_##postfix) #define X(b) ADD_MACRO(LCB_##b); XERR(X); @@ -179,7 +180,14 @@ do_all_constants(PyObject *module, pycbc_constant_handler handler) ADD_MACRO(LCB_CNTL_SSL_MODE); ADD_MACRO(LCB_SSL_ENABLED); ADD_MACRO(LCB_CNTL_N1QL_TIMEOUT); - ADD_MACRO(LCB_CNTL_COMPRESSION_OPTS); + ADD_MACRO(LCB_CNTL_COMPRESSION_OPTS); + ADD_MACRO(LCB_CNTL_LOG_REDACTION); + ADD_STRING(LCB_LOG_MD_OTAG); + ADD_STRING(LCB_LOG_MD_CTAG); + ADD_STRING(LCB_LOG_SD_OTAG); + ADD_STRING(LCB_LOG_SD_CTAG); + ADD_STRING(LCB_LOG_UD_OTAG); + ADD_STRING(LCB_LOG_UD_CTAG); /* View options */ ADD_MACRO(LCB_CMDVIEWQUERY_F_INCLUDE_DOCS); From b79d3eb16c1a830f4b6774cdeaa1082afcdb475a Mon Sep 17 00:00:00 2001 From: Sergey Avseyev <sergey.avseyev@gmail.com> Date: Tue, 6 Mar 2018 18:26:05 +0300 Subject: [PATCH 137/829] Remove executable bits and shebangs from examples Examples are part of the documentation and should not be kept as executables Change-Id: Ida403a4855afcab1d1ebe12548ced1cb7c6cc4f5 Reviewed-on: http://review.couchbase.org/90525 Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> Tested-by: Build Bot <build@couchbase.com> --- examples/abench.py | 2 -- examples/basic.py | 2 -- examples/bench.py | 1 - examples/connection-pool.py | 2 -- examples/docloader.py | 2 -- examples/gbench.py | 2 -- examples/iops_demo.py | 1 - examples/item.py | 1 - examples/reversed_keys.py | 2 -- examples/search_keywords.py | 3 --- examples/txbench.py | 2 -- 11 files changed, 20 deletions(-) mode change 100755 => 100644 examples/abench.py mode change 100755 => 100644 examples/basic.py mode change 100755 => 100644 examples/bench.py mode change 100755 => 100644 examples/connection-pool.py mode change 100755 => 100644 examples/gbench.py mode change 100755 => 100644 examples/reversed_keys.py mode change 100755 => 100644 examples/txbench.py diff --git a/examples/abench.py b/examples/abench.py old mode 100755 new mode 100644 index 47b244911..f2c302f83 --- a/examples/abench.py +++ b/examples/abench.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/basic.py b/examples/basic.py old mode 100755 new mode 100644 index 4b951b54d..642e67e25 --- a/examples/basic.py +++ b/examples/basic.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/bench.py b/examples/bench.py old mode 100755 new mode 100644 index 4c41f07aa..79a902128 --- a/examples/bench.py +++ b/examples/bench.py @@ -1,4 +1,3 @@ -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/connection-pool.py b/examples/connection-pool.py old mode 100755 new mode 100644 index 493279fe0..fee9c0486 --- a/examples/connection-pool.py +++ b/examples/connection-pool.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/docloader.py b/examples/docloader.py index d5df22ae6..3a7ece48f 100644 --- a/examples/docloader.py +++ b/examples/docloader.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# # Copyright 2015, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/gbench.py b/examples/gbench.py old mode 100755 new mode 100644 index 6046f0be5..f3cd63636 --- a/examples/gbench.py +++ b/examples/gbench.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/iops_demo.py b/examples/iops_demo.py index 6c8b38e5f..fc894b923 100644 --- a/examples/iops_demo.py +++ b/examples/iops_demo.py @@ -1,4 +1,3 @@ -#!/usr/bin/env python import gevent import gevent.monkey; gevent.monkey.patch_all() import sys diff --git a/examples/item.py b/examples/item.py index 1fcf3937f..1c3ef51e2 100644 --- a/examples/item.py +++ b/examples/item.py @@ -1,4 +1,3 @@ -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/reversed_keys.py b/examples/reversed_keys.py old mode 100755 new mode 100644 index 4074e0588..475c49c3c --- a/examples/reversed_keys.py +++ b/examples/reversed_keys.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python - from couchbase.transcoder import Transcoder from couchbase.bucket import Bucket diff --git a/examples/search_keywords.py b/examples/search_keywords.py index 539c2a18c..c04da7ad7 100644 --- a/examples/search_keywords.py +++ b/examples/search_keywords.py @@ -1,6 +1,3 @@ -#!/usr/bin/env python - -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # diff --git a/examples/txbench.py b/examples/txbench.py old mode 100755 new mode 100644 index afce65709..c67168c9f --- a/examples/txbench.py +++ b/examples/txbench.py @@ -1,5 +1,3 @@ -#!/usr/bin/env python -# # Copyright 2013, Couchbase, Inc. # All Rights Reserved # From 1a520250f9574620abc96310f28119b4a58164c8 Mon Sep 17 00:00:00 2001 From: Sergey Avseyev <sergey.avseyev@gmail.com> Date: Mon, 5 Mar 2018 11:46:06 +0300 Subject: [PATCH 138/829] [docs] Allow to build docs with non-inlined extension Change-Id: If77c9082b63a984867dc268bec2311bd1b33b891 Reviewed-on: http://review.couchbase.org/90530 Tested-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- docs/source/conf.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index ef88c6866..9994fd20d 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -22,6 +22,10 @@ sys.path.insert(0, os.path.dirname(__file__)) #sys.path.insert(0, os.path.abspath('../../couchbase')) #sys.path.insert(0, os.path.abspath(os.path.join(os.pardir, os.pardir, 'couchbase'))) + +from distutils.util import get_platform +sys.path.insert(0, os.path.abspath('../../build/lib.%s-%s' % (get_platform(), sys.version[0:3]))) + import couchbase_version # -- General configuration ----------------------------------------------------- @@ -129,7 +133,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. From c3850a9739b564a9f9e8064d4a0cf6a842c2eee8 Mon Sep 17 00:00:00 2001 From: Sergey Avseyev <sergey.avseyev@gmail.com> Date: Tue, 6 Mar 2018 19:57:48 +0300 Subject: [PATCH 139/829] [docs] Version in sphinx config must be a string Change-Id: I347171c5fa0cbbb07ce147639499256a9670dbd8 Reviewed-on: http://review.couchbase.org/90531 Tested-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- docs/source/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/conf.py b/docs/source/conf.py index 9994fd20d..cbc27e251 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -63,7 +63,7 @@ release = couchbase_version.get_version() # The short X.Y version. -version = release.split('.')[:2] +version = '.'.join(release.split('.')[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. From 2d489573fa15e1998dfe8cc7c53565733f7ec71f Mon Sep 17 00:00:00 2001 From: Sergey Avseyev <sergey.avseyev@gmail.com> Date: Tue, 6 Mar 2018 19:50:20 +0300 Subject: [PATCH 140/829] [docs] Fix sphinx warnings Change-Id: Iacf805ae8f7783ce9d5aba912af653dd9edad07a Reviewed-on: http://review.couchbase.org/90532 Tested-by: Sergey Avseyev <sergey.avseyev@gmail.com> Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/__init__.py | 2 +- couchbase/admin.py | 19 ++++++++++--------- couchbase/async/bucket.py | 6 +++--- couchbase/bucket.py | 14 ++++++-------- couchbase/subdocument.py | 1 + couchbase/views/iterator.py | 2 +- docs/source/api/admin.rst | 6 +++--- docs/source/api/views.rst | 3 +-- 8 files changed, 26 insertions(+), 27 deletions(-) diff --git a/couchbase/__init__.py b/couchbase/__init__.py index 38e29faa4..06d7aee7d 100644 --- a/couchbase/__init__.py +++ b/couchbase/__init__.py @@ -119,7 +119,7 @@ def enable_logging(): This function enables the C library's logging to be propagated to the Python standard `logging` module. - Calling this function affects any :class:`~.Bucket` objects created + Calling this function affects any :class:`~couchbase.bucket.Bucket` objects created afterwards (but not before). Note that currently this will also override any ``LCB_LOGLEVEL`` directive inside the environment as well. diff --git a/couchbase/admin.py b/couchbase/admin.py index 905905ac0..ac4c3d3ff 100644 --- a/couchbase/admin.py +++ b/couchbase/admin.py @@ -50,7 +50,7 @@ class Admin(LCB.Bucket): the cluster. This object should **not** be used to perform Key/Value operations. The - :class:`~.Bucket` is used for that. + :class:`~couchbase.bucket.Bucket` is used for that. """ def __init__(self, username, password, host='localhost', port=8091, **kwargs): @@ -347,6 +347,7 @@ def _get_management_path(auth_domain, userid=None): def users_get(self, domain): """ Retrieve a list of users from the server. + :param AuthDomain domain: The authentication domain to retrieve users from. :return: :class:`~.HttpResult`. The list of users can be obtained from the returned object's `value` property. @@ -358,10 +359,10 @@ def users_get(self, domain): def user_get(self, domain, userid): """ Retrieve a user from the server + :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID. - :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not - exist. + :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not exist. :return: :class:`~.HttpResult`. The user can be obtained from the returned object's `value` property. """ @@ -372,6 +373,7 @@ def user_get(self, domain, userid): def user_upsert(self, domain, userid, password, roles, name=None): """ Upsert a user in the cluster + :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID :param password: The user password @@ -381,11 +383,11 @@ def user_upsert(self, domain, userid, password, roles, name=None): :raise: :exc:`couchbase.exceptions.HTTPError` if the request fails. :return: :class:`~.HttpResult` - .. Creating a new read-only admin user:: + Creating a new read-only admin user :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', ['ro_admin']) - .. An example of using more complex roles: + An example of using more complex roles :: adm.upsert_user(AuthDomain.Local, 'mark', 's3cr3t', [('data_reader', '*'), @@ -394,8 +396,8 @@ def user_upsert(self, domain, userid, password, roles, name=None): .. warning:: - Due to the asynchronous nature of Couchbase management APIs, it may - take a few moments for the new user settings to take effect. + Due to the asynchronous nature of Couchbase management APIs, it may + take a few moments for the new user settings to take effect. """ tmplist = [] for role in roles: @@ -424,8 +426,7 @@ def user_remove(self, domain, userid): Remove a user :param AuthDomain domain: The authentication domain for the user. :param userid: The user ID to remove - :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not - exist. + :raise: :exc:`couchbase.exceptions.HTTPError` if the user does not exist. :return: :class:`~.HttpResult` """ path = self._get_management_path(domain, userid) diff --git a/couchbase/async/bucket.py b/couchbase/async/bucket.py index b250e42b2..2c451b096 100644 --- a/couchbase/async/bucket.py +++ b/couchbase/async/bucket.py @@ -29,7 +29,7 @@ class AsyncBucket(Bucket): """ This class contains the low-level async implementation of the - :class:`~.Bucket` interface. **This module is not intended to be + :class:`~couchbase.bucket.Bucket` interface. **This module is not intended to be used directly by applications**. .. warning:: @@ -78,7 +78,7 @@ class AsyncBucket(Bucket): In both event models, the internal I/O notification system is callback-based. The main difference is in how the high-level - `Bucket` functions (for example, :meth:`~.Bucket.get` operate: + `Bucket` functions (for example, :meth:`~couchbase.bucket.Bucket.get` operate: In callback-based models, these return objects which allow a callback to be assigned to them, whereas in coroutine-based @@ -156,7 +156,7 @@ def query(self, *args, **kwargs): Reimplemented from base class. This method does not add additional functionality of the - base class' :meth:`~.Bucket.query` method (all the + base class' :meth:`~couchbase.bucket.Bucket.query` method (all the functionality is encapsulated in the view class anyway). However it does require one additional keyword argument diff --git a/couchbase/bucket.py b/couchbase/bucket.py index a3127b488..0ef080276 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -190,7 +190,7 @@ def __init__(self, *args, **kwargs): :raise: :exc:`.InvalidError` if the connection string was malformed. - :return: instance of :class:`~.Bucket` + :return: instance of :class:`~couchbase.bucket.Bucket` Initialize bucket using default options:: @@ -492,7 +492,7 @@ def get(self, key, ttl=0, quiet=None, replica=False, no_format=False): Note that the default value is `None`, which means to use the :attr:`quiet`. If it is a boolean (i.e. `True` or - `False`) it will override the `Bucket`-level + `False`) it will override the `couchbase.bucket.Bucket`-level :attr:`quiet` attribute. :param bool replica: Whether to fetch this key from a replica @@ -565,10 +565,8 @@ def touch(self, key, ttl=0): :raise: The same things that :meth:`get` does - .. seealso:: - :meth:`get` - which can be used to get *and* update the - expiration, - :meth:`touch_multi` + .. seealso:: :meth:`get` - which can be used to get *and* update the + expiration, :meth:`touch_multi` """ return _Base.touch(self, key, ttl=ttl) @@ -1293,7 +1291,7 @@ def remove_multi(self, kvs, quiet=None): :param kvs: Iterable of keys to delete from the cluster. If you wish to specify a CAS for each item, then you may pass a dictionary - of keys mapping to cas, like ``remove_multi({k1:cas1, k2:cas2}) + of keys mapping to cas, like `remove_multi({k1:cas1, k2:cas2}`) :param quiet: Whether an exception should be raised if one or more items were not found :return: A :class:`~.MultiResult` containing :class:`~.OperationResult` @@ -1453,7 +1451,7 @@ def query(self, design, view, use_devmode=False, **kwargs): :class:`~.View` contains more extensive documentation and examples - :class:`~.Query` + :class:`couchbase.views.params.Query` contains documentation on the available query options :class:`~.SpatialQuery` diff --git a/couchbase/subdocument.py b/couchbase/subdocument.py index 7e4df725d..5a23f6f5b 100644 --- a/couchbase/subdocument.py +++ b/couchbase/subdocument.py @@ -80,6 +80,7 @@ def get(path, **kwargs): Valid only in :cb_bmeth:`lookup_in` :param path: The path to retrieve + .. seealso:: :meth:`exists` """ return _gen_3spec(LCB_SDCMD_GET, path, **kwargs) diff --git a/couchbase/views/iterator.py b/couchbase/views/iterator.py index 0e73eb264..0304dea95 100644 --- a/couchbase/views/iterator.py +++ b/couchbase/views/iterator.py @@ -156,7 +156,7 @@ def __init__(self, parent, design, view, row_processor=None, ``reduce`` views do not have corresponding doc IDs (as these are aggregation functions). - :param query: If set, should be a :class:`~.Query` or + :param query: If set, should be a :class:`~couchbase.views.params.Query` or :class:`~.SpatialQuery` object. It is illegal to use this in conjunction with additional ``params`` diff --git a/docs/source/api/admin.rst b/docs/source/api/admin.rst index d5ef07e9c..125ebb054 100644 --- a/docs/source/api/admin.rst +++ b/docs/source/api/admin.rst @@ -12,12 +12,12 @@ to perform common API requests. To create an administrative handle, simply instantiate a new -:class:`Admin` object. Note that unlike the :class:`~.Bucket`, +:class:`Admin` object. Note that unlike the :class:`~couchbase.bucket.Bucket`, the :class:`Admin` constructor does not accept a connection string. This is deliberate, as the administrative API communicates with a single node, on -a well defined port (whereas the :class:`Bucket` object communicates with +a well defined port (whereas the :class:`~couchbase.bucket.Bucket` object communicates with one or more nodes using a variety of different protocols). .. autoclass:: Admin - :members: \ No newline at end of file + :members: diff --git a/docs/source/api/views.rst b/docs/source/api/views.rst index 21ac533c0..5a6405c3f 100644 --- a/docs/source/api/views.rst +++ b/docs/source/api/views.rst @@ -690,8 +690,7 @@ function. Unlike Map-Reduce views and compound keys for *startkey* and not sorted or evaluated in any particular order. -See `GeoCouch`_<https://github.com/couchbase/geocouch/wiki/Spatial-Views-API> -for more information. +See `GeoCouch_<https://github.com/couchbase/geocouch/wiki/Spatial-Views-API>` for more information. ^^^^^^^^^^^^^^^^^^^^^^^^^ Creating Geospatial Views From 6093ae9744a7008378b7012f5ec4fd47254e0560 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Thu, 12 Apr 2018 19:42:12 +0100 Subject: [PATCH 141/829] PYCBC-462: End to end tracing Motivation ---------- - It is useful to get detailed metrics on the duration taken for various operations performed by the Python client, libcouchbase and the various endpoints they talk to. We also want to be able to find particularly slow operations and flag them up for examination. Modifications ------------- - Added tracing context creation at each top level API entry point - Added a means to pass this down the call stack and into persistent objects (e.g. ViewResults) - Pass completed spans and contexts into libcouchbase's tracing facilities - Add a reporting facility that receives completed spans and optionally propagates these to a parent OpenTracing tracer (supplied as a Python object to the Bucket). Results ------- - Alpha level developer preview - Most of the KV operations can now be traced, alongside all the associated Python client code, at a per-function level if necessary - Propagation to parent OpenTracing tracer working. - Encoding, decoding and server duration are all recorded - HTTP, multi operations and Pipeline operations still to be properly implemented. Change-Id: I5cae001ffa54434abeabc7f7bcb0709f248527bc Reviewed-on: http://review.couchbase.org/91702 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- CMakeLists.txt | 276 -------------- README.rst | 3 + couchbase/bucket.py | 7 +- couchbase/tests/base.py | 57 ++- couchbase/tests/cases/get_t.py | 3 +- dev_requirements.txt | 1 + setup.py | 23 +- src/bucket.c | 40 +- src/callbacks.c | 81 +++- src/counter.c | 21 +- src/ext.c | 656 ++++++++++++++++++++++++++++++++- src/fts.c | 13 +- src/get.c | 49 ++- src/http.c | 4 +- src/miscops.c | 59 ++- src/multiresult.c | 10 +- src/n1ql.c | 18 +- src/observe.c | 26 +- src/oputil.c | 50 ++- src/oputil.h | 67 ++-- src/pipeline.c | 4 +- src/pycbc.h | 230 +++++++++++- src/result.c | 13 + src/store.c | 117 +++--- src/views.c | 26 +- tests.ini.sample | 2 +- 26 files changed, 1369 insertions(+), 487 deletions(-) delete mode 100644 CMakeLists.txt diff --git a/CMakeLists.txt b/CMakeLists.txt deleted file mode 100644 index 69558ef4a..000000000 --- a/CMakeLists.txt +++ /dev/null @@ -1,276 +0,0 @@ -cmake_minimum_required(VERSION 3.8) -project(couchbase_python_client_2_3_1) - -set(LCB_ROOT ../libcouchbase) -set(LCB_ROOT_SRC ${LCB_ROOT}/src) -set(LCB_ROOT_CONTRIB ${LCB_ROOT}/contrib) - -set(PYTHON_INCLUDE_DIR /Users/ellis_breen/root/virtualenvs/3.6/default/include/python3.6m/) -set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIR}) -set(LCB_IO ${LCB_ROOT}/src/lcbio) -aux_source_directory(${LCB_ROOT}/src LCB_CORE) -aux_source_directory(${LCB_ROOT}/src/http HTTP) -aux_source_directory(${LCB_ROOT}/include/memcached/ MCD_INC) -aux_source_directory(${LCB_ROOT}/include/libcouchbase/ LCB_INC) -aux_source_directory(${LCB_ROOT}/contrib/lcb-jsoncpp LCB_JSON) -aux_source_directory(${LCB_ROOT}/src/ssl LCB_SSL) -aux_source_directory(${LCB_ROOT}/src/mcserver LCB_MCSERVER) -aux_source_directory(${LCB_IO} LCB_IO_A) -aux_source_directory(${LCB_ROOT_SRC}/sasl LCB_SASL) - -aux_source_directory(LCB_VBUCKET ${LCB_ROOT}/src/vbucket) -aux_source_directory(LCB_SASL ${LCB_ROOT}/contrib/cbsasl) -aux_source_directory(LCB_CLI ${LCB_ROOT}/contrib/cliopts) -aux_source_directory(LCB_ROOT_A ${LCB_ROOT_SRC}) - - -ADD_LIBRARY(netbuf OBJECT ${LCB_NETBUF_SRC}) -ADD_LIBRARY(netbuf-malloc OBJECT ${LCB_NETBUF_SRC}) -ADD_LIBRARY(mcreq OBJECT ${LCB_MC_SRC}) -ADD_LIBRARY(rdb OBJECT ${LCB_RDB_SRC}) -ADD_LIBRARY(lcbio OBJECT ${LCB_IO_SRC}) -ADD_LIBRARY(lcbio-cxx OBJECT ${LCB_IO_CXXSRC}) -ADD_LIBRARY(lcbht OBJECT ${LCB_HT_SRC}) -ADD_LIBRARY(lcbcore OBJECT ${LCB_CORE_SRC}) -ADD_LIBRARY(lcbcore-cxx OBJECT ${LCB_CORE_CXXSRC}) -include_directories( ${LCB_CORE} - ${LCB_ROOT}/include - ${MCD_INC} - ${HTTP} - ${PYTHON_INCLUDE_DIR} - ${LCB_JSON} - ${LCB_SSL} - ${LCB_MCSERVER} - ${LCB_IO} - ${LCB_VBUCKET} - ${LCB_SASL} - ${LCB_CLI} - ${LCB_ROOT_SRC} - ${LCB_SASL} - ${LCB_SASL}/src - ${LCB_ROOT_CONTRIB} - ) -include(ExternalProject) - -ExternalProject_Add(libcouchbase - DOWNLOAD_COMMAND "" - SOURCE_DIR ${LCB_ROOT} - ) -set(CMAKE_CXX_STANDARD 11) -set(EXTRA_SOURCE_DIRS - ${LCB_ROOT}/src - ${LCB_ROOT}/include/memcached - ${LCB_ROOT}/include/libcouchbase - ${LCB_ROOT}/src/http - ${LCB_ROOT}/src/ssl - - ${LCB_MCSERVER} - ${LCB_IO} - - ${LCB_VBUCKET} - ${LCB_SASL} - ${LCB_CLI} - ${LCB_ROOT_SRC} - ${LCB_ROOT_A} - ) - -set(SOURCE - ${SOURCE} - ${LCB_CORE} - ${LCB_INC} - ${MCD_INC} - ${HTTP} - ${LCB_SSL} - ${LCB_ROOT}/src/ssl - - ${LCB_MCSERVER} - ${LCB_IO} - - ${LCB_VBUCKET} - ${LCB_SASL} - ${LCB_CLI} - ${LCB_ROOT_SRC} - ) -link_directories(${LCB_ROOT}/lib) -link_libraries(${LCB_ROOT}/../lib/ - ) -add_executable(couchbase_python_client_2_3_1 - ${EXTRA_SOURCE_DIRS} - ${LCB_CORE} - ${LCB_INC} - ${MCD_INC} - ${LCB_JSON} - ${LCB_SSL} - ${LCB_ROOT}/src/ssl - - ${LCB_IO} - ${LCB_VBUCKET} - ${LCB_SASL} - ${LCB_CLI} - ${LCB_ROOT_SRC} - acouchbase/tests/asyncio_tests.py - acouchbase/tests/fixtures.py - acouchbase/tests/py34only.py - acouchbase/tests/py35only.py - acouchbase/__init__.py - acouchbase/asyncio_iops.py - acouchbase/bucket.py - acouchbase/py34only/iterator.py - couchbase/async/__init__.py - couchbase/async/bucket.py - couchbase/async/events.py - couchbase/async/n1ql.py - couchbase/async/rowsbase.py - couchbase/async/view.py - couchbase/iops/__init__.py - couchbase/iops/base.py - couchbase/iops/select.py - couchbase/tests/admin/__init__.py - couchbase/tests/cases/__init__.py - couchbase/tests/cases/admin_t.py - couchbase/tests/cases/append_t.py - couchbase/tests/cases/arithmetic_t.py - couchbase/tests/cases/badargs_t.py - couchbase/tests/cases/cbftstrings_t.py - couchbase/tests/cases/cluster_t.py - couchbase/tests/cases/connection_t.py - couchbase/tests/cases/connstr_t.py - couchbase/tests/cases/datastructures_t.py - couchbase/tests/cases/delete_t.py - couchbase/tests/cases/design_t.py - couchbase/tests/cases/dupkeys_t.py - couchbase/tests/cases/empty_key_t.py - couchbase/tests/cases/encodings_t.py - couchbase/tests/cases/endure_t.py - couchbase/tests/cases/enh_err_t.py - couchbase/tests/cases/excextra_t.py - couchbase/tests/cases/flush_t.py - couchbase/tests/cases/format_t.py - couchbase/tests/cases/get_t.py - couchbase/tests/cases/diag_t.py - couchbase/tests/cases/iops_t.py - couchbase/tests/cases/itertypes_t.py - couchbase/tests/cases/itmops_t.py - couchbase/tests/cases/ixmgmt_t.py - couchbase/tests/cases/lock_t.py - couchbase/tests/cases/lockmode_t.py - couchbase/tests/cases/misc_t.py - couchbase/tests/cases/mutationtokens_t.py - couchbase/tests/cases/n1ql_t.py - couchbase/tests/cases/n1qlstrings_t.py - couchbase/tests/cases/observe_t.py - couchbase/tests/cases/pipeline_t.py - couchbase/tests/cases/results_t.py - couchbase/tests/cases/rget_t.py - couchbase/tests/cases/set_converters_t.py - couchbase/tests/cases/set_t.py - couchbase/tests/cases/spatial_t.py - couchbase/tests/cases/stats_t.py - couchbase/tests/cases/subdoc_t.py - couchbase/tests/cases/touch_t.py - couchbase/tests/cases/transcoder_t.py - couchbase/tests/cases/verinfo_t.py - couchbase/tests/cases/view_iterator_t.py - couchbase/tests/cases/view_t.py - couchbase/tests/cases/viewstrings_t.py - couchbase/tests/cases/xattr_t.py - couchbase/tests/__init__.py - couchbase/tests/base.py - couchbase/tests/importer.py - couchbase/tests/test_sync.py - couchbase/views/__init__.py - couchbase/views/iterator.py - couchbase/views/params.py - couchbase/__init__.py - couchbase/_bootstrap.py - couchbase/_ixmgmt.py - couchbase/_libcouchbase.cpython-36m-darwin.so - couchbase/_logutil.py - couchbase/_pyport.py - couchbase/_version.py - couchbase/admin.py - couchbase/auth_domain.py - couchbase/bucket.py - couchbase/bucketmanager.py - couchbase/cbas.py - couchbase/cluster.py - couchbase/connection.py - couchbase/connstr.py - couchbase/exceptions.py - couchbase/experimental.py - couchbase/fulltext.py - couchbase/items.py - couchbase/mockserver.py - couchbase/mutation_state.py - couchbase/n1ql.py - couchbase/priv_constants.py - couchbase/result.py - couchbase/subdocument.py - couchbase/transcoder.py - couchbase/user_constants.py - examples/abench.py - examples/basic.py - examples/bench.py - examples/connection-pool.py - examples/docloader.py - examples/gbench.py - examples/iops_demo.py - examples/item.py - examples/reversed_keys.py - examples/search_keywords.py - examples/twist-sample.py - examples/txbasic.py - examples/txbench.py - examples/txview.py - gcouchbase/tests/__init__.py - gcouchbase/tests/test_api.py - gcouchbase/tests/test_gevent.py - gcouchbase/__init__.py - gcouchbase/bucket.py - gcouchbase/connection.py - gcouchbase/iops_gevent0x.py - gcouchbase/iops_gevent10.py - src/bucket.c - src/callbacks.c - src/cntl.c - src/connevents.c - src/constants.c - src/convert.c - src/counter.c - src/ctranscoder.c - src/exceptions.c - src/ext.c - src/fts.c - src/get.c - src/htresult.c - src/http.c - src/iops.c - src/iops.h - src/ixmgmt.c - src/miscops.c - src/mresdict.h - src/multiresult.c - src/n1ql.c - src/observe.c - src/opresult.c - src/oputil.c - src/oputil.h - src/pipeline.c - src/pycbc.h - src/result.c - src/store.c - src/typeutil.c - src/views.c - txcouchbase/tests/__init__.py - txcouchbase/tests/base.py - txcouchbase/tests/test_n1ql.py - txcouchbase/tests/test_ops.py - txcouchbase/tests/test_txconn.py - txcouchbase/tests/test_views.py - txcouchbase/__init__.py - txcouchbase/bucket.py - txcouchbase/connection.py - txcouchbase/iops.py - couchbase_version.py - setup.py) - diff --git a/README.rst b/README.rst index 001045c89..097db617e 100644 --- a/README.rst +++ b/README.rst @@ -60,6 +60,9 @@ If your libcouchbase install is in an alternate location (for example, --library-dir /opt/local/libcouchbase/lib \ --include-dir /opt/local/libcouchbase/include +If you wish to enable the experimental tracing feature, set the +``PYCBC_TRACING_ENABLE`` environment variable. + Or you can modify the environment ``CFLAGS`` and ``LDFLAGS`` variables. diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 0ef080276..e03b1e0d0 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -123,7 +123,6 @@ def newfn(self, key, *args, **kwargs): return real_decorator - class Bucket(_Base): def __init__(self, *args, **kwargs): """Connect to a bucket. @@ -181,6 +180,10 @@ def __init__(self, *args, **kwargs): :param lockmode: The *lockmode* for threaded access. See :ref:`multiple_threads` for more information. + :param tracer: An OpenTracing tracer into which + to propagate any tracing information. Requires + tracing to be enabled. + :raise: :exc:`.BucketNotFoundError` or :exc:`.AuthError` if there is no such bucket to connect to, or if invalid credentials were supplied. @@ -254,7 +257,7 @@ def __init__(self, *args, **kwargs): if not _no_connect_exceptions: raise - def _do_ctor_connect(self): + def _do_ctor_connect(self, *args, **kwargs): """This should be overidden by subclasses which want to use a different sort of connection behavior """ diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index 9a6e1745e..bb90249fd 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -27,6 +27,11 @@ except ImportError: import unittest import logging +import gc + +import os +if os.environ.get("PYCBC_TRACE_GC"): + gc.set_debug(gc.DEBUG_STATS | gc.DEBUG_LEAK) class ResourcedTestCase(ResourcedTestCaseReal): @@ -83,11 +88,12 @@ def __init__(self): self.bucket_password = "" self.ipv6 = "disabled" self.protocol = "http" + self.enable_tracing = "off" @staticmethod def filter_opts(options): return {key: value for key, value in - options.items() if key in ["certpath", "keypath", "ipv6", "config_cache", "compression", "log_redaction"] and value} + options.items() if key in ["certpath", "keypath", "ipv6", "config_cache", "compression", "log_redaction", "enable_tracing"] and value} def make_connargs(self, **overrides): bucket = self.bucket_name @@ -146,6 +152,8 @@ def load(self): info.certpath = config.get('realserver', 'certpath', fallback=None) info.keypath = config.get('realserver', 'keypath', fallback=None) info.protocol = config.get('realserver', 'protocol', fallback="http") + info.enable_tracing = config.get('realserver', 'tracing', fallback="off") + logging.info("info is "+str(info.__dict__)) if config.getboolean('realserver', 'enabled'): self.realserver_info = info else: @@ -203,6 +211,7 @@ def make(self, *args, **kw): info.admin_username = "Administrator" info.admin_password = "password" info.mock = mock + info.enable_tracing = "on" self._info = info return info @@ -419,9 +428,9 @@ def checkCbRefcount(self): # commented out for now as GC seems to be unstable #self.assertEqual(oldrc, 2) - def setUp(self): + def setUp(self, **kwargs): super(ConnectionTestCase, self).setUp() - self.cb = self.make_connection() + self.cb = self.make_connection(**kwargs) def tearDown(self): super(ConnectionTestCase, self).tearDown() @@ -434,6 +443,48 @@ def tearDown(self): del self.cb +import time + +import logging +from basictracer import BasicTracer, SpanRecorder +import couchbase + + +class LogRecorder(SpanRecorder): + + def record_span(self, span): + logging.info("recording span: "+str(span.__dict__)) + + +class TracedCase(ConnectionTestCase): + + _tracer = None + @property + def tracer(self): + if not TracedCase._tracer: + TracedCase._tracer = BasicTracer(recorder=LogRecorder()) + return TracedCase._tracer + + def setUp(self): + log_level = logging.INFO + logging.getLogger('').handlers = [] + logging.basicConfig(format='%(asctime)s %(message)s', level=log_level) + + super(TracedCase, self).setUp(tracer=self.tracer) + couchbase.enable_logging() + + def tearDown(self): + super(TracedCase,self).tearDown() + + if self.tracer and getattr(self.tracer,"close", None): + time.sleep(2) # yield to IOLoop to flush the spans - https://github.com/jaegertracing/jaeger-client-python/issues/50 + try: + self.tracer.close() # flush any buffered spans + except: + pass + couchbase.disable_logging() + + class RealServerTestCase(ConnectionTestCase): def setUp(self): super(RealServerTestCase, self).setUp() diff --git a/couchbase/tests/cases/get_t.py b/couchbase/tests/cases/get_t.py index b73df65e0..e15f8f26d 100644 --- a/couchbase/tests/cases/get_t.py +++ b/couchbase/tests/cases/get_t.py @@ -27,8 +27,9 @@ from couchbase.result import MultiResult, Result from couchbase.tests.base import ConnectionTestCase, SkipTest -class GetTest(ConnectionTestCase): +from couchbase.tests.base import TracedCase +class GetTest(TracedCase): def test_trivial_get(self): key = self.gen_key('trivial_get') self.cb.upsert(key, 'value1') diff --git a/dev_requirements.txt b/dev_requirements.txt index d23587608..8e51873e7 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -6,3 +6,4 @@ testresources>=0.2.7 jsonschema==2.6.0 configparser==3.5.0 testfixtures==5.4.0 +basictracer==2.2.0 \ No newline at end of file diff --git a/setup.py b/setup.py index d401cc095..6792a4ea0 100644 --- a/setup.py +++ b/setup.py @@ -27,15 +27,24 @@ LCB_NAME = None +extoptions['extra_compile_args'] = [] +extoptions['extra_link_args'] = [] + +if os.environ.get('PYCBC_TRACING_ENABLE'): + extoptions['extra_compile_args'] += ['-DPYCBC_TRACING_ENABLE'] +if os.environ.get('PYCBC_DEBUG'): + extoptions['extra_compile_args'] += ['-DPYCBC_DEBUG'] + if sys.platform != 'win32': extoptions['libraries'] = ['couchbase'] + if os.environ.get('PYCBC_DEBUG'): + extoptions['extra_compile_args'] += ['-O0', '-g3'] + extoptions['extra_link_args'] += ['-O0', '-g3'] if sys.platform == 'darwin': warnings.warn('Adding /usr/local to search path for OS X') extoptions['library_dirs'] = ['/usr/local/lib'] extoptions['include_dirs'] = ['/usr/local/include'] - if os.environ.get('PYCBC_DEBUG'): - extoptions['extra_compile_args'] = ['-O0', '-g'] - extoptions['extra_link_args'] = ['-O0', '-g'] + else: warnings.warn("I'm detecting you're running windows." "You might want to modify " @@ -57,8 +66,9 @@ extoptions['libraries'] = ['libcouchbase'] ## Enable these lines for debug builds - #extoptions['extra_compile_args'] = ['/Zi'] - #extoptions['extra_link_args'] = ['/DEBUG'] + if os.environ.get('PYCBC_DEBUG'): + extoptions['extra_compile_args'] += ['/Zi'] + extoptions['extra_link_args'] += ['/DEBUG'] extoptions['library_dirs'] = [os.path.join(lcb_root, 'lib')] extoptions['include_dirs'] = [os.path.join(lcb_root, 'include')] extoptions['define_macros'] = [('_CRT_SECURE_NO_WARNINGS', 1)] @@ -145,7 +155,8 @@ 'acouchbase.py34only' ] if sys.version_info >= (3, 4) else []), package_data = pkgdata, - tests_require = [ 'nose', 'testresources>=0.2.7' ], + install_requires = [], + tests_require = [ 'nose', 'testresources>=0.2.7', 'basictracer==2.2.0' ], test_suite = 'couchbase.tests.test_sync', **setup_kw ) diff --git a/src/bucket.c b/src/bucket.c index 430773613..0b9d64b29 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -29,7 +29,7 @@ Bucket__init__(pycbc_Bucket *self, PyObject *kwargs); static PyObject* -Bucket__connect(pycbc_Bucket *self); +Bucket__connect(pycbc_Bucket *self, PyObject* args, PyObject* kwargs); static void Bucket_dtor(pycbc_Bucket *self); @@ -554,7 +554,7 @@ static PyMethodDef Bucket_TABLE_methods[] = { { "_connect", (PyCFunction)Bucket__connect, - METH_NOARGS, + METH_VARARGS|METH_KEYWORDS, PyDoc_STR( "Connect this instance. This is typically called by one of\n" "the wrapping constructors\n") @@ -634,7 +634,7 @@ Bucket__init__(pycbc_Bucket *self, PyObject *iops_O = NULL; PyObject *dfl_fmt = NULL; PyObject *tc = NULL; - + PyObject *tracer = NULL; struct lcb_create_st create_opts = { 0 }; @@ -655,7 +655,8 @@ Bucket__init__(pycbc_Bucket *self, X("lockmode", &self->lockmode, "i") \ X("_flags", &self->flags, "I") \ X("_conntype", &conntype, "i") \ - X("_iops", &iops_O, "O") + X("_iops", &iops_O, "O") \ + X("tracer", &tracer, "O") static char *kwlist[] = { #define X(s, target, type) s, @@ -691,7 +692,19 @@ Bucket__init__(pycbc_Bucket *self, if (unlock_gil_O && PyObject_IsTrue(unlock_gil_O) == 0) { self->unlock_gil = 0; } - +#ifdef PYCBC_TRACING + if (tracer){ + PyObject *tracer_args = PyTuple_New(2); + PyTuple_SetItem(tracer_args, 0, tracer); + PyTuple_SetItem(tracer_args, 1, (PyObject*) self); + self->tracer = (pycbc_Tracer_t *) PyObject_CallFunction((PyObject *) &pycbc_TracerType, "O", tracer_args); + if (PyErr_Occurred()) { + PYCBC_EXCEPTION_LOG_NOCLEAR; + PYCBC_XDECREF(self->tracer); + self->tracer = NULL; + } + } +#endif create_opts.version = 3; create_opts.v.v3.type = conntype; @@ -734,6 +747,11 @@ Bucket__init__(pycbc_Bucket *self, #endif err = lcb_create(&self->instance, &create_opts); +#ifdef PYCBC_TRACING + if (self->tracer) { + lcb_set_tracer(self->instance, self->tracer->tracer); + } +#endif if (err != LCB_SUCCESS) { self->instance = NULL; PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, err, @@ -757,7 +775,7 @@ Bucket__init__(pycbc_Bucket *self, pycbc_callbacks_init(self->instance); lcb_set_cookie(self->instance, self); { - const char *bucketstr; + char *bucketstr; err = lcb_cntl(self->instance, LCB_CNTL_GET, LCB_CNTL_BUCKETNAME, &bucketstr); if (err == LCB_SUCCESS && bucketstr != NULL) { self->bucket = pycbc_SimpleStringZ(bucketstr); @@ -765,12 +783,15 @@ Bucket__init__(pycbc_Bucket *self, } self->btype = pycbc_IntFromL(LCB_BTYPE_UNSPEC); + return 0; } static PyObject* -Bucket__connect(pycbc_Bucket *self) +Bucket__connect(pycbc_Bucket *self, PyObject* args, PyObject* kwargs) { + pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, + self->tracer, "bucket.connect"); lcb_error_t err; if (self->flags & PYCBC_CONN_F_CONNECTED) { @@ -786,7 +807,7 @@ Bucket__connect(pycbc_Bucket *self) return NULL; } - pycbc_oputil_wait_common(self); + PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, NULL, self); if ((self->flags & PYCBC_CONN_F_ASYNC) == 0) { err = lcb_get_bootstrap_status(self->instance); if (err != LCB_SUCCESS) { @@ -829,6 +850,9 @@ Bucket_dtor(pycbc_Bucket *self) if (self->instance) { lcb_destroy(self->instance); } +#ifdef PYCBC_TRACING + PYCBC_XDECREF((PyObject*)self->tracer); +#endif #ifdef WITH_THREAD if (self->lock) { diff --git a/src/callbacks.c b/src/callbacks.c index 882b283cd..c9dabf62c 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -14,6 +14,7 @@ * limitations under the License. **/ +#include <libcouchbase/api3.h> #include "pycbc.h" #define CB_THREADS @@ -27,6 +28,7 @@ static PyObject * mk_sd_tuple(const lcb_SDENTRY *ent); */ static void mk_sd_error(pycbc__SDResult *res, pycbc_MultiResult *mres, lcb_error_t rc, size_t ix); + static void cb_thr_end(pycbc_Bucket *self) { @@ -93,6 +95,9 @@ static void operation_completed3(pycbc_Bucket *self, { pycbc_assert(self->nremaining); --self->nremaining; +#ifdef PYCBC_TRACING + pycbc_Tracer_propagate(self->tracer); +#endif if (mres) { mres->err_info = err_info; Py_XINCREF(err_info); @@ -114,11 +119,20 @@ static void operation_completed3(pycbc_Bucket *self, } } + void pycbc_dict_add_text_kv(PyObject *dict, const char *key, const char *value) { - PyObject *valstr = pycbc_SimpleStringZ(value); - PyDict_SetItemString(dict, key, valstr); - Py_DECREF(valstr); + if (!key || !value || !dict) { + PYCBC_DEBUG_LOG("one of key %p value %p dict %p is NULL", key, value, dict); + } + PYCBC_DEBUG_LOG("adding %s to %s on %p\n", value, key, dict); + { + PyObject *valstr = pycbc_SimpleStringZ(value); + PyObject *keystr = pycbc_SimpleStringZ(key); + PyDict_SetItem(dict, keystr, valstr); + PYCBC_DECREF(valstr); + PYCBC_DECREF(keystr); + } } void pycbc_enhanced_err_register_entry(PyObject **dict, @@ -176,7 +190,9 @@ get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, PyObject *hkey; PyObject *mrdict; int rv; - +#ifdef PYCBC_TRACING + pycbc_stack_context_handle stack_context_handle = NULL; +#endif pycbc_assert(pycbc_multiresult_check(resp->cookie)); *mres = (pycbc_MultiResult*)resp->cookie; *conn = (*mres)->parent; @@ -193,7 +209,33 @@ get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, mrdict = pycbc_multiresult_dict(*mres); *res = (pycbc_Result*)PyDict_GetItem(mrdict, hkey); +#ifdef PYCBC_TRACING + pycbc_print_repr(mrdict); + + PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("&res %p: coming back from callback on key [%.*s] or PyString: [",res, (int)resp->nkey,(const char*)resp->key); + pycbc_stack_context_handle handle = NULL; + PYCBC_DEBUG_LOG_RAW("]\n"); + if( *res ) { + handle = (*res)->tracing_context; + PYCBC_DEBUG_LOG("res %p",*res); + + if (PYCBC_CHECK_CONTEXT(handle)) { + stack_context_handle = pycbc_Tracer_span_start(handle->tracer, NULL, + LCBTRACE_OP_RESPONSE_DECODING, 0, + handle, LCBTRACE_REF_CHILD_OF, "get_common_objects"); + PYCBC_DEBUG_LOG("res %p: starting new context on key %.*s\n", *res, (int) resp->nkey, + (const char *) resp->key); + } + if ((*res)->is_tracing_stub) { + PyDict_DelItem(mrdict, hkey); + + *res = NULL; + + } + + } +#endif if (*res) { int exists_ok = (restype & RESTYPE_EXISTS_OK) || ( (*mres)->mropts & PYCBC_MRES_F_UALLOCED); @@ -224,28 +266,35 @@ get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, if (*res == NULL) { /* Now, get/set the result object */ - if ( (*mres)->mropts & PYCBC_MRES_F_ITEMS) { - *res = (pycbc_Result*)pycbc_item_new(*conn); - + if ((*mres)->mropts & PYCBC_MRES_F_ITEMS) { + PYCBC_DEBUG_LOG("Item creation"); + *res = (pycbc_Result *) pycbc_item_new(*conn); } else if (restype & RESTYPE_BASE) { - *res = (pycbc_Result*)pycbc_result_new(*conn); + PYCBC_DEBUG_LOG("Result creation"); + *res = (pycbc_Result *) pycbc_result_new(*conn); } else if (restype & RESTYPE_OPERATION) { - *res = (pycbc_Result*)pycbc_opresult_new(*conn); + PYCBC_DEBUG_LOG("Opresult creation"); + *res = (pycbc_Result *) pycbc_opresult_new(*conn); } else if (restype & RESTYPE_VALUE) { - *res = (pycbc_Result*)pycbc_valresult_new(*conn); - - } else { + PYCBC_DEBUG_LOG("Valresult creation"); + *res = (pycbc_Result *) pycbc_valresult_new(*conn); + } + PYCBC_EXCEPTION_LOG_NOCLEAR; + if (*res == NULL){ abort(); } - PyDict_SetItem(mrdict, hkey, (PyObject*)*res); - +#ifdef PYCBC_TRACING + handle = stack_context_handle; + (*res)->is_tracing_stub = 0; +#endif (*res)->key = hkey; - Py_DECREF(*res); + PYCBC_DECREF(*res); } + PYCBC_TRACE_POP_CONTEXT(stack_context_handle); if (resp->rc) { (*res)->rc = resp->rc; } @@ -794,6 +843,7 @@ static void ping_callback(lcb_t instance, Py_DECREF(struct_services_dict); } if (resp->njson) { + pycbc_dict_add_text_kv(resultdict, "services_json", resp->json); } if (resp->rflags & LCB_RESP_F_FINAL) { @@ -815,7 +865,6 @@ static void diag_callback(lcb_t instance, PyObject *resultdict = pycbc_multiresult_dict(mres); parent = mres->parent; CB_THR_END(parent); - if (resp->rc != LCB_SUCCESS) { if (mres->errop == NULL) { pycbc_Result *res = (pycbc_Result *)pycbc_result_new(parent); diff --git a/src/counter.c b/src/counter.c index 6800a02bf..70022ffa6 100644 --- a/src/counter.c +++ b/src/counter.c @@ -15,16 +15,17 @@ **/ #include "oputil.h" +#include "pycbc.h" struct arithmetic_common_vars { lcb_S64 delta; - lcb_U64 initial; + lcb_uint64_t initial; unsigned long ttl; int create; }; -static int -handle_single_arith(pycbc_Bucket *self, struct pycbc_common_vars *cv, +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, static, int, +handle_single_arith, pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *item, void *arg) { @@ -81,6 +82,8 @@ handle_single_arith(pycbc_Bucket *self, struct pycbc_common_vars *cv, cmd.create = my_params.create; cmd.initial = my_params.initial; cmd.exptime = my_params.ttl; + + PYCBC_TRACECMD(cmd,context,cv->mres,curkey, self); err = lcb_counter3(self->instance, cv->mres, &cmd); if (err != LCB_SUCCESS) { PYCBC_EXCTHROW_SCHED(err); @@ -96,7 +99,7 @@ handle_single_arith(pycbc_Bucket *self, struct pycbc_common_vars *cv, PyObject * arithmetic_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, - int optype, int argopts) + int optype, int argopts, pycbc_stack_context_handle context) { int rv; Py_ssize_t ncmds; @@ -141,19 +144,19 @@ arithmetic_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, rv = pycbc_common_vars_init(&cv, self, argopts, ncmds, 0); if (argopts & PYCBC_ARGOPT_MULTI) { - rv = pycbc_oputil_iter_multi(self, seqtype, collection, &cv, optype, - handle_single_arith, &global_params); + rv = PYCBC_OPUTIL_ITER_MULTI(self, seqtype, collection, &cv, optype, + handle_single_arith, &global_params, context); } else { rv = handle_single_arith(self, &cv, optype, collection, NULL, NULL, NULL, - &global_params); + &global_params, context); } if (rv < 0) { goto GT_DONE; } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } @@ -165,7 +168,7 @@ arithmetic_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, #define DECLFUNC(name, operation, mode) \ PyObject *pycbc_Bucket_##name(pycbc_Bucket *self, \ PyObject *args, PyObject *kwargs) { \ - return arithmetic_common(self, args, kwargs, operation, mode); \ + return arithmetic_common(self, args, kwargs, operation, mode, PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, self->tracer, "bucket." #name)); \ } DECLFUNC(counter, PYCBC_CMD_COUNTER, PYCBC_ARGOPT_SINGLE) diff --git a/src/ext.c b/src/ext.c index de28c1afb..211d6d7d8 100644 --- a/src/ext.c +++ b/src/ext.c @@ -30,6 +30,7 @@ static void log_handler(struct lcb_logprocs_st *procs, unsigned int iid, struct lcb_logprocs_st pycbc_lcb_logprocs = { 0 }; + static PyObject * _libcouchbase_init_helpers(PyObject *self, PyObject *args, PyObject *kwargs) { @@ -314,7 +315,7 @@ init_libcouchbase(void) * name (not a string but a literal) and the second is the name of the * function used to initialize it */ -#define X_PYTYPES(X) \ +#define X_PYTYPES_NOTRACING(X) \ X(Bucket, pycbc_BucketType_init) \ /** Remember to keep base classes in order */ \ X(Result, pycbc_ResultType_init) \ @@ -332,6 +333,14 @@ init_libcouchbase(void) X(AsyncResult, pycbc_AsyncResultType_init) \ X(_IOPSWrapper, pycbc_IOPSWrapperType_init) \ X(_SDResult, pycbc_SDResultType_init) +#ifdef PYCBC_TRACING +#define X_PYTYPES(X) \ + X_PYTYPES_NOTRACING(X) \ + X(Tracer, pycbc_TracerType_init) +#else +#define X_PYTYPES(X) \ + X_PYTYPES_NOTRACING(X) +#endif #define X(name, inf) PyObject *cls_##name; X_PYTYPES(X) @@ -436,3 +445,648 @@ static void log_handler(struct lcb_logprocs_st *procs, Py_DECREF(kwargs); PyGILState_Release(gil_prev); } + + +#include <stdio.h> +#include <libcouchbase/tracing.h> +#ifdef _WIN32 +#define PRIx64 "I64x" +#define PRId64 "I64d" +#else +#include <inttypes.h> +#endif + +#include "oputil.h" + + + +void pycbc_print_string( PyObject *curkey) { +#if PYTHON_ABI_VERSION >= 3 + { + const char *keyname = PyUnicode_AsUTF8(curkey); + PYCBC_DEBUG_LOG_RAW("%s", keyname);//(int)length, keyname); + PYCBC_EXCEPTION_LOG_NOCLEAR; + } +#else + { + PYCBC_DEBUG_LOG("%s",PyString_AsString(curkey)); + }; +#endif +} + + +void pycbc_print_repr( PyObject *pobj) { + PyObject* x,*y,*z; + PyErr_Fetch(&x,&y,&z); + { + PyObject *curkey = PyObject_Repr(pobj); + PYCBC_EXCEPTION_LOG; + PyErr_Restore(x, y, z); + if (!curkey) { + PYCBC_DEBUG_LOG("got null repr from %p", pobj); + return; + } + pycbc_print_string(curkey); + PYCBC_XDECREF(curkey); + } +} + + +void pycbc_exception_log(const char* file, int line, int clear) +{ + + if (PyErr_Occurred()) { + PyObject* type, *value, *traceback; + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(file, line, "***** EXCEPTION:[", ""); + PyErr_Fetch(&type,&value,&traceback); + pycbc_print_repr(type); + PYCBC_DEBUG_LOG_RAW(","); + pycbc_print_repr(value); + PYCBC_DEBUG_LOG_RAW(","); + if (0){ + /* See if we can get a full traceback */ + PyObject* module_name = PyString_FromString("traceback"); + PyObject* pyth_module = PyImport_Import(module_name); + PYCBC_DECREF(module_name); + + if (pyth_module) { + PyObject* pyth_func = PyObject_GetAttrString(pyth_module, "format_exception"); + if (pyth_func && PyCallable_Check(pyth_func)) { + PyObject *pyth_val; + + pyth_val = PyObject_CallFunctionObjArgs(pyth_func, type, value, traceback, NULL); + pycbc_print_string(pyth_val); + PYCBC_DECREF(pyth_val); + } + } + } + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line, "]: END OF EXCEPTION *****"); + if (clear) + { + PYCBC_XDECREF(type); + PYCBC_XDECREF(value); + PYCBC_XDECREF(traceback); + } + else + { + PyErr_Restore(type,value,traceback); + + } + } +} +#ifdef PYCBC_TRACING + +pycbc_stack_context_handle +pycbc_Context_init(pycbc_Tracer_t *py_tracer, const char *operation, lcb_uint64_t now, lcbtrace_REF *ref, const char* component) { + pycbc_stack_context_handle context = malloc(sizeof(pycbc_stack_context)); + pycbc_assert(py_tracer); + context->tracer = py_tracer; + context->span = lcbtrace_span_start(py_tracer->tracer, operation, now, ref); + lcbtrace_span_add_tag_str(context->span, LCBTRACE_TAG_COMPONENT, component); + PYCBC_DEBUG_LOG("Created context %p with span %p: component: %s, operation %s",context, context->span, component, operation); + return context; +} + +PyObject* pycbc_Context_finish(pycbc_stack_context_handle context ) +{ + if (PYCBC_CHECK_CONTEXT(context)) { + PYCBC_DEBUG_LOG("closing span %p",context->span); + lcbtrace_span_finish(context->span, 0); + (context)->span = NULL; + }; + return Py_None; +} +pycbc_stack_context_handle pycbc_check_context(pycbc_stack_context_handle CONTEXT, const char* file, int line) +{ + if (!(CONTEXT)){ + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line,"warning: got null context"); + } else if (!(CONTEXT)->tracer){\ + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line,"warning: got null tracer"); + } else if (!(CONTEXT)->span){ + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line,"warning: got null span"); + } + else { + return CONTEXT; + } + return NULL; +} + +pycbc_stack_context_handle +pycbc_Tracer_span_start(pycbc_Tracer_t *py_tracer, PyObject *kwargs, const char *operation, lcb_uint64_t now, + pycbc_stack_context_handle context, lcbtrace_REF_TYPE ref_type, const char* component) { + + PyObject *tracer = kwargs?PyDict_GetItemString(kwargs, "tracer"):NULL; + if (!(py_tracer || (tracer && PyArg_ParseTuple(tracer, "O!", &pycbc_TracerType, &py_tracer) && py_tracer))) + { + PYCBC_EXCEPTION_LOG; + printf("Warning - got NULL tracer\n"); + return NULL; + } + + if (context ) + { + lcbtrace_REF ref; + ref.type = ref_type; + ref.span = context->span; + return pycbc_Context_init(py_tracer, operation, now, &ref, component); + } + else + { + return pycbc_Context_init(py_tracer, operation, now, NULL, component); + } +} + +void pycbc_init_traced_result(pycbc_Bucket *self, PyObject* mres_dict, PyObject *curkey, + pycbc_stack_context_handle context) { + pycbc_pybuffer keybuf={0}; + pycbc_Result *item; + pycbc_tc_encode_key(self, curkey, &keybuf); + PYCBC_EXCEPTION_LOG_NOCLEAR; + pycbc_tc_decode_key(self, keybuf.buffer, keybuf.length, &curkey); + item=(pycbc_Result*)PyDict_GetItem(mres_dict, curkey); + if (!item) { + item = (pycbc_Result*) pycbc_valresult_new(self); + PyDict_SetItem(mres_dict, curkey, (PyObject*)item); + item->is_tracing_stub = 1; + } + PYCBC_EXCEPTION_LOG_NOCLEAR; + item->tracing_context = context; + PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("\nres %p: binding context %p to [", item, context); + pycbc_print_repr(curkey); + PYCBC_DEBUG_LOG_RAW("]\n"); + PYCBC_EXCEPTION_LOG_NOCLEAR; + PYCBC_DEBUG_LOG("\nPrior to insertion:["); + pycbc_print_repr(mres_dict); + PYCBC_DEBUG_LOG_RAW("]\n"); + PYCBC_EXCEPTION_LOG_NOCLEAR; + PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("After insertion:["); + pycbc_print_repr(mres_dict); + PYCBC_DEBUG_LOG("]\n"); +} + + +int pycbc_is_async_or_pipeline(const pycbc_Bucket *self) { return self->flags & PYCBC_CONN_F_ASYNC || self->pipeline_queue; } + +void pycbc_tracer_destructor(lcbtrace_TRACER *tracer) +{ + if (tracer) { + if (tracer->cookie) { + free(tracer->cookie); + tracer->cookie = NULL; + } + free(tracer); + } +} + +#define LOGARGS(instance, lvl) instance->settings, "bootstrap", LCB_LOG_##lvl, __FILE__, __LINE__ +#endif + +void pycbc_set_dict_kv_object(PyObject *dict, PyObject *key, const char* value_str) { + PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("adding ["); + pycbc_print_repr(key); + PYCBC_DEBUG_LOG_RAW("], value %s to [", value_str); + pycbc_print_repr(dict); + PYCBC_DEBUG_LOG_RAW("]\n"); + { + PyObject *value = pycbc_SimpleStringZ(value_str); + PyDict_SetItem(dict, key, value); + PYCBC_DECREF(value); + } +} + + +void pycbc_set_kv_ull(PyObject *dict, PyObject *keystr, lcb_uint64_t parenti_id) { + PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("adding ["); + pycbc_print_repr(keystr); + PYCBC_DEBUG_LOG_RAW("], value %" PRId64 " to [", parenti_id); + pycbc_print_repr(dict); + PYCBC_DEBUG_LOG_RAW("]\n"); + { + PyObject *pULL = PyLong_FromUnsignedLongLong(parenti_id); + PyDict_SetItem(dict, keystr, pULL); + PYCBC_DECREF(pULL); + } +} + +#ifdef PYCBC_TRACING + +#define PYCBC_X_SPAN_ARGS(TEXT,ULL,TAGS)\ + TEXT(operation_name) \ + ULL(child_of) \ + ULL(start_time) \ + TAGS(tags) + +#define PYCBC_X_FINISH_ARGS(TEXT,ULL)\ + ULL(finish_time) + +#define PYCBC_X_LITERALTAGNAMES(TEXT,ULL)\ + TEXT(DB_TYPE) \ + ULL(PEER_LATENCY) \ + ULL(OPERATION_ID) \ + TEXT(COMPONENT) \ + TEXT(PEER_ADDRESS) \ + TEXT(LOCAL_ADDRESS) \ + TEXT(DB_INSTANCE) +#undef X + +#define X(NAME) PyObject* pycbc_##NAME; +PYCBC_X_SPAN_ARGS(X,X,X) +PYCBC_X_LITERALTAGNAMES(X,X) +PYCBC_X_FINISH_ARGS(X,X) +#undef X + +PyObject* pycbc_default_key; + +void pycbc_Tracer_init_constants() +{ + pycbc_default_key = pycbc_SimpleStringZ("__PYCBC_DEFAULT_KEY"); + +#define X(NAME) pycbc_##NAME=pycbc_SimpleStringZ(#NAME); + PYCBC_X_SPAN_ARGS(X,X,X) + PYCBC_X_FINISH_ARGS(X,X) +#undef X +#define X(NAME) pycbc_##NAME=pycbc_SimpleStringZ(LCBTRACE_TAG_##NAME); + PYCBC_X_LITERALTAGNAMES(X,X); +#undef X +} + +#define PYCBC_TAG_TEXT(NAME) const char* NAME; +#define PYCBC_TAG_ULL(NAME) lcb_uint64_t* NAME; + +typedef struct pycbc_tracer_tags { + PYCBC_X_LITERALTAGNAMES(PYCBC_TAG_TEXT,PYCBC_TAG_ULL) +} pycbc_tracer_tags_t; + +#define PYCBC_TAG_STRUCT(NAME) pycbc_tracer_tags_t* NAME; + +typedef struct pycbc_tracer_span_args { + PYCBC_X_SPAN_ARGS(PYCBC_TAG_TEXT,PYCBC_TAG_ULL,PYCBC_TAG_STRUCT) +} pycbc_tracer_span_args_t; + +typedef struct pycbc_tracer_finish_args +{ + PYCBC_X_FINISH_ARGS(PYCBC_TAG_TEXT,PYCBC_TAG_ULL) +} pycbc_tracer_finish_args_t; + +#undef PYCBC_TAG_ULL +#undef PYCBC_TAG_TEXT +#undef PYCBC_TAG_STRUCT + +#define PYCBC_TRACING_ADD_TEXT(DICT, KEY, VALUE) (DICT)->KEY=strdup((VALUE)); +#define PYCBC_TRACING_ADD_U64(DICT, KEY, VALUE) (DICT)->KEY=malloc(sizeof(lcb_uint64_t));\ + *((DICT)->KEY)=VALUE; + +#define PYCBC_TEXT_TO_DICT(NAME) if (args->NAME) { pycbc_set_dict_kv_object(dict, pycbc_##NAME, (args->NAME)); } +#define PYCBC_ULL_TO_DICT(NAME) if(args->NAME) { pycbc_set_kv_ull(dict, pycbc_##NAME, *(args->NAME)); } + +PyObject* pycbc_set_tags_from_payload(pycbc_tracer_tags_t *args) { + PyObject *dict = PyDict_New(); + PYCBC_X_LITERALTAGNAMES(PYCBC_TEXT_TO_DICT,PYCBC_ULL_TO_DICT) + return dict; +} + +#define TAGS(NAME) if(args->NAME) {PyDict_SetItem(dict, pycbc_##NAME, pycbc_set_tags_from_payload((args->NAME))); } + + +PyObject* pycbc_set_args_from_payload(pycbc_tracer_span_args_t *args) { + PyObject* dict = PyDict_New(); + PYCBC_X_SPAN_ARGS(PYCBC_TEXT_TO_DICT,PYCBC_ULL_TO_DICT,TAGS) + return dict; +} + +PyObject* pycbc_set_finish_args_from_payload(pycbc_tracer_finish_args_t *args) { + PyObject* dict = PyDict_New(); + PYCBC_X_FINISH_ARGS(TEXT,PYCBC_ULL_TO_DICT) + return dict; +} + +#undef TAGS + +#undef PYCBC_ULL_TO_DICT +#undef PYCBC_TEXT_TO_DICT + +#define PYCBC_TEXT_FREE(NAME) free((void*)args->NAME); +#define PYCBC_ULL_FREE(NAME) free((void*)args->NAME); +void pycbc_span_tags_args_dealloc(pycbc_tracer_tags_t* args) { + PYCBC_X_LITERALTAGNAMES(PYCBC_TEXT_FREE, PYCBC_ULL_FREE) + free(args); +} + +#define PYCBC_TAGS_FREE(NAME) if(args->NAME) { pycbc_span_tags_args_dealloc(args->NAME); } +void pycbc_span_args_dealloc(pycbc_tracer_span_args_t *args) { + PYCBC_X_SPAN_ARGS(PYCBC_TEXT_FREE,PYCBC_ULL_FREE, PYCBC_TAGS_FREE) + free(args); +} + +void pycbc_span_finish_args_dealloc(struct pycbc_tracer_finish_args *args) { + PYCBC_X_FINISH_ARGS(PYCBC_TEXT_FREE, PYCBC_ULL_FREE); + free(args); +} +#undef PYCBC_TEXT_FREE +#undef PYCBC_ULL_FREE + +struct pycbc_tracer_payload; +struct pycbc_tracer_span_args; +struct pycbc_tracer_finish_args; +typedef struct pycbc_tracer_payload { + struct pycbc_tracer_span_args* span_start_args; + struct pycbc_tracer_finish_args* span_finish_args; + struct pycbc_tracer_payload *next; +} pycbc_tracer_payload; + + +typedef struct pycbc_tracer_state { + pycbc_tracer_payload *root; + pycbc_tracer_payload *last; + pycbc_Bucket* bucket; + PyObject* parent; + PyObject *start_span_method; +} pycbc_tracer_state; + + +void pycbc_init_span_args(pycbc_tracer_payload* payload) +{ + payload->span_start_args=malloc(sizeof(pycbc_tracer_span_args_t)); + memset((payload->span_start_args),0, sizeof(pycbc_tracer_span_args_t)); + payload->span_start_args->tags=malloc(sizeof(pycbc_tracer_tags_t)); + memset((payload->span_start_args->tags),0, sizeof(pycbc_tracer_tags_t)); + payload->span_finish_args=malloc(sizeof(pycbc_tracer_finish_args_t)); + memset((payload->span_finish_args),0, sizeof(pycbc_tracer_finish_args_t)); +} + +void pycbc_payload_dealloc(pycbc_tracer_payload *pPayload) { + pycbc_span_args_dealloc(pPayload->span_start_args); + pycbc_span_finish_args_dealloc(pPayload->span_finish_args); +} + +void pycbc_span_report(lcbtrace_TRACER *tracer, lcbtrace_SPAN *span) +{ + pycbc_tracer_state *state = NULL; + if (tracer == NULL) { + return; + } + state = tracer->cookie; + if (state == NULL) { + return; + } + + { + +#define PYCBC_TRACING_BUFSZ 1000 + size_t nbuf = PYCBC_TRACING_BUFSZ; + char bufarray[PYCBC_TRACING_BUFSZ]={0}; + char* buf = &bufarray[0]; + lcbtrace_SPAN *parent; + lcb_uint64_t start; + pycbc_tracer_payload *payload = calloc(1, sizeof(pycbc_tracer_payload)); + pycbc_init_span_args(payload); + { + pycbc_tracer_span_args_t* span_args = payload->span_start_args; + pycbc_tracer_tags_t* tags_p = span_args->tags; + pycbc_tracer_finish_args_t* span_finish_args = payload->span_finish_args; + PYCBC_DEBUG_LOG("got span %p\n", span); + + buf = calloc(nbuf, sizeof(char)); + PYCBC_TRACING_ADD_TEXT(span_args, operation_name, lcbtrace_span_get_operation(span)); + parent = lcbtrace_span_get_parent(span); + if (parent) { + lcb_uint64_t parenti_id = lcbtrace_span_get_trace_id(parent); + snprintf(buf, nbuf, "%" PRIx64, parenti_id); + } + start = lcbtrace_span_get_start_ts(span); + PYCBC_TRACING_ADD_U64(span_finish_args, finish_time, lcbtrace_span_get_finish_ts(span)); + PYCBC_TRACING_ADD_U64(span_args, start_time, start); + { + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_DB_TYPE, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, DB_TYPE, buf); + } + } + + { + lcb_uint64_t latency, operation_id; + if (lcbtrace_span_get_tag_uint64(span, LCBTRACE_TAG_PEER_LATENCY, &latency) == LCB_SUCCESS) { + PYCBC_TRACING_ADD_U64(tags_p, PEER_LATENCY, latency); + } + if (lcbtrace_span_get_tag_uint64(span, LCBTRACE_TAG_OPERATION_ID, &operation_id) == LCB_SUCCESS) { + PYCBC_TRACING_ADD_U64(tags_p, OPERATION_ID, operation_id); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_COMPONENT, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, COMPONENT, buf); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_PEER_ADDRESS, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, PEER_ADDRESS, buf); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_LOCAL_ADDRESS, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, LOCAL_ADDRESS, buf); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_DB_INSTANCE, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, DB_INSTANCE, buf); + } + } + } + + if (state->last) { + state->last->next = payload; + } + state->last = payload; + if (state->root == NULL) { + state->root = payload; + } + } +#undef PYCBC_TRACING_BUFSZ +} + + + +void pycbc_Tracer_propagate_span(pycbc_Tracer_t *tracer, struct pycbc_tracer_payload *payload) { + pycbc_tracer_state *state = (pycbc_tracer_state *) tracer->tracer->cookie; + pycbc_assert(state->parent); + if (state->start_span_method && PyObject_IsTrue(state->start_span_method)) { + PyObject* start_span_args = pycbc_set_args_from_payload(payload->span_start_args); + PyObject *fresh_span; + + PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("calling start method:["); + pycbc_print_repr(state->start_span_method); + PYCBC_DEBUG_LOG_RAW("]"); + PYCBC_DEBUG_LOG("\nabout to call: %p[\n", state->start_span_method); + PYCBC_DEBUG_LOG("] on %p=[", state->parent); + pycbc_print_repr(state->parent); + PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("full args to start_span:["); + pycbc_print_repr(start_span_args); + PYCBC_DEBUG_LOG_RAW("\n"); + + fresh_span= PyObject_Call(state->start_span_method, pycbc_DummyTuple, + start_span_args); + if(fresh_span) + { + PyObject *finish_method = PyObject_GetAttrString(fresh_span, "finish"); + PYCBC_DEBUG_LOG("Got span'["); + pycbc_print_repr(fresh_span); + PYCBC_DEBUG_LOG("]\n"); + pycbc_assert(finish_method); + PYCBC_DEBUG_LOG("Got finish method'["); + pycbc_print_repr(finish_method); + PYCBC_DEBUG_LOG("]\n"); + if (finish_method) { + PyObject* span_finish_args = pycbc_set_finish_args_from_payload(payload->span_finish_args); + PYCBC_DEBUG_LOG("calling finish method with;["); + pycbc_print_repr(span_finish_args); + PYCBC_DEBUG_LOG("]\n"); + PyObject_Call(finish_method, pycbc_DummyTuple, span_finish_args); + PYCBC_XDECREF(span_finish_args); + } + + PYCBC_DECREF(finish_method); + PYCBC_DECREF(fresh_span); + } else{ + PYCBC_DEBUG_LOG("Yielded no span!\n"); + } + PYCBC_DECREF(start_span_args); + PYCBC_EXCEPTION_LOG_NOCLEAR; + + } +} + +void pycbc_tracer_flush(pycbc_Tracer_t *tracer) +{ + pycbc_tracer_state *state = NULL; + + if (tracer == NULL) { + return; + } + state = tracer->tracer->cookie; + if (state == NULL) { + return; + } + if (state->root == NULL || !state->root->span_start_args) { + return; + } + { + pycbc_tracer_payload *ptr = state->root; + PYCBC_DEBUG_LOG("flushing\n"); + while (ptr) { + pycbc_tracer_payload *tmp = ptr; + if (tracer->parent) { + pycbc_Tracer_propagate_span(tracer, tmp); + } + ptr = ptr->next; + pycbc_payload_dealloc(tmp); + } + } + state->root = state->last = NULL; +} + + +void pycbc_Tracer_propagate( pycbc_Tracer_t *tracer) { + pycbc_tracer_flush(tracer); +} + + + +lcbtrace_TRACER *pycbc_tracer_new(PyObject *parent, pycbc_Bucket *bucket) +{ + lcbtrace_TRACER *tracer = calloc(1, sizeof(lcbtrace_TRACER)); + pycbc_tracer_state *pycbc_tracer = calloc(1, sizeof(pycbc_tracer_state)); + tracer->destructor = pycbc_tracer_destructor; + tracer->flags = 0; + tracer->version = 0; + tracer->v.v0.report = pycbc_span_report; + pycbc_tracer->root = NULL; + pycbc_tracer->last = NULL; + tracer->cookie = pycbc_tracer; + pycbc_tracer->bucket = bucket; + if (parent) { + PYCBC_DEBUG_LOG("\ninitialising tracer start_span method from:["); + pycbc_print_repr(parent); + PYCBC_DEBUG_LOG("]\n"); + pycbc_tracer->start_span_method = PyObject_GetAttrString(parent, "start_span"); + if (pycbc_tracer->start_span_method) + { + PYCBC_DEBUG_LOG("got start_span method:["); + pycbc_print_repr(pycbc_tracer->start_span_method); + PYCBC_DEBUG_LOG("]\n"); + pycbc_tracer->parent = parent; + } + else + { + PYCBC_EXCEPTION_LOG_NOCLEAR; + PYCBC_DEBUG_LOG("Falling back to internal tracing only"); + pycbc_tracer->parent = NULL; + } + } + return tracer; +} + +static PyGetSetDef pycbc_Tracer_TABLE_getset[] = { + { NULL } +}; + +static struct PyMemberDef pycbc_Tracer_TABLE_members[] = { + { NULL } +}; + +static PyMethodDef pycbc_Tracer_TABLE_methods[] = { + +#define OPFUNC(name, doc) \ +{ #name, (PyCFunction)pycbc_Tracer_##name, METH_VARARGS|METH_KEYWORDS, \ + PyDoc_STR(doc) } + +#undef OPFUNC + + { NULL, NULL, 0, NULL } +}; + + +static int +Tracer__init__(pycbc_Tracer_t *self, + PyObject *args, PyObject* kwargs) +{ + int rv = 0; + PyObject* tracer =PyTuple_GetItem(args, 0); + pycbc_Bucket* bucket= (pycbc_Bucket*) PyTuple_GetItem(args,1); + self->parent=(tracer && PyObject_IsTrue(tracer))?tracer:NULL; + self->tracer= pycbc_tracer_new(self->parent, bucket); + PYCBC_EXCEPTION_LOG_NOCLEAR; + + return rv; +} + +static void +Tracer_dtor(pycbc_Tracer_t *self) +{ + Py_TYPE(self)->tp_free((PyObject*)self); +} + +PyTypeObject pycbc_TracerType = { + PYCBC_POBJ_HEAD_INIT(NULL) + 0 +}; + +int pycbc_TracerType_init(PyObject **ptr) { + PyTypeObject *p = &pycbc_TracerType; + *ptr = (PyObject *) p; + if (p->tp_name) { return 0; } + p->tp_name = "Tracer"; + p->tp_new = PyType_GenericNew; + p->tp_init = (initproc) Tracer__init__; + p->tp_dealloc = (destructor) Tracer_dtor; + p->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE; + p->tp_doc = "The Tracer Object"; + p->tp_basicsize = sizeof(pycbc_Tracer_t); + p->tp_methods = pycbc_Tracer_TABLE_methods; + p->tp_members = pycbc_Tracer_TABLE_members; + p->tp_getset = pycbc_Tracer_TABLE_getset; + pycbc_Tracer_init_constants(); + return PyType_Ready(p); +}; +#endif \ No newline at end of file diff --git a/src/fts.c b/src/fts.c index 0ead5a8e5..481b7e6f2 100644 --- a/src/fts.c +++ b/src/fts.c @@ -69,7 +69,18 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) } mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); - vres = (pycbc_ViewResult *)PYCBC_TYPE_CTOR(&pycbc_ViewResultType); + { +#ifdef PYCBC_TRACING + PyObject* view_kwargs = PyDict_New(); + PyDict_SetItemString(view_kwargs, "tracer", (PyObject*)self->tracer); +#else + PyObject* view_kwargs = pycbc_DummyKeywords; +#endif + vres = (pycbc_ViewResult *) PYCBC_TYPE_CTOR(&pycbc_ViewResultType, pycbc_DummyTuple, view_kwargs); +#ifdef PYCBC_TRACING + PYCBC_DECREF(view_kwargs); +#endif + } pycbc_httpresult_init(&vres->base, mres); vres->rows = PyList_New(0); vres->base.format = PYCBC_FMT_JSON; diff --git a/src/get.c b/src/get.c index fb75428b0..c7dca97c5 100644 --- a/src/get.c +++ b/src/get.c @@ -15,6 +15,8 @@ **/ #include "oputil.h" +#include "pycbc.h" + /** * Covers 'lock', 'touch', and 'get_and_touch' */ @@ -31,10 +33,10 @@ struct getcmd_vars_st { } u; }; -static int -handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, - PyObject *curkey, PyObject *curval, PyObject *options, pycbc_Item *itm, - void *arg) +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, static, int, + handle_single_key, pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, + PyObject *curkey, PyObject *curval, PyObject *options, pycbc_Item *itm, + void *arg) { int rv; unsigned int lock = 0; @@ -99,6 +101,7 @@ handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, } } u_cmd.base.exptime = ttl; + PYCBC_TRACE_POP_CONTEXT(context); switch (optype) { case PYCBC_CMD_GAT: @@ -121,11 +124,13 @@ handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, case PYCBC_CMD_GET: GT_GET: u_cmd.get.lock = lock; + PYCBC_TRACECMD(u_cmd.get, context, cv->mres, curkey, self); err = lcb_get3(self->instance, cv->mres, &u_cmd.get); break; case PYCBC_CMD_TOUCH: u_cmd.touch.exptime = ttl; + PYCBC_TRACECMD(u_cmd.touch, context, cv->mres, curkey, self); err = lcb_touch3(self->instance, cv->mres, &u_cmd.touch); break; @@ -134,6 +139,7 @@ handle_single_key(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, case PYCBC_CMD_GETREPLICA_ALL: u_cmd.rget.strategy = gv->u.replica.strategy; u_cmd.rget.index = gv->u.replica.index; + PYCBC_TRACECMD(u_cmd.rget,context, cv->mres, curkey, self); err = lcb_rget3(self->instance, cv->mres, &u_cmd.rget); break; default: @@ -198,7 +204,7 @@ handle_replica_options(int *optype, struct getcmd_vars_st *gv, PyObject *replica static PyObject* get_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, - int argopts) + int argopts, pycbc_stack_context_handle context) { int rv; Py_ssize_t ncmds = 0; @@ -283,11 +289,11 @@ get_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, } if (argopts & PYCBC_ARGOPT_MULTI) { - rv = pycbc_oputil_iter_multi(self, seqtype, kobj, &cv, optype, - handle_single_key, &gv); + rv = PYCBC_OPUTIL_ITER_MULTI(self, seqtype, kobj, &cv, optype, + handle_single_key, &gv, context); } else { - rv = handle_single_key(self, &cv, optype, kobj, NULL, NULL, NULL, &gv); + rv= PYCBC_TRACE_WRAP(handle_single_key, kwargs, self, &cv, optype, kobj, NULL, NULL, NULL, &gv); } if (rv < 0) { goto GT_DONE; @@ -297,7 +303,7 @@ get_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, goto GT_DONE; } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } @@ -306,8 +312,8 @@ get_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, return cv.ret; } -static int -handle_single_lookup(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, static, int, +handle_single_lookup, pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, PyObject *curkey, PyObject *curval, PyObject *options, pycbc_Item *itm, void *arg) { @@ -323,13 +329,13 @@ handle_single_lookup(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optyp return -1; } LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); - rv = pycbc_sd_handle_speclist(self, cv->mres, curkey, curval, &cmd); + rv = PYCBC_TRACE_WRAP(pycbc_sd_handle_speclist, NULL, self, cv->mres, curkey, curval, &cmd); PYCBC_PYBUF_RELEASE(&keybuf); return rv; } static PyObject * -sdlookup_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts) +sdlookup_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts, pycbc_stack_context_handle context) { Py_ssize_t ncmds; PyObject *kobj = NULL; @@ -352,8 +358,8 @@ sdlookup_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopt return NULL; } - if (pycbc_oputil_iter_multi( - self, seqtype, kobj, &cv, 0, handle_single_lookup, NULL) != 0) { + if (PYCBC_OPUTIL_ITER_MULTI( + self, seqtype, kobj, &cv, 0, handle_single_lookup, NULL, context) != 0) { goto GT_DONE; } @@ -361,7 +367,7 @@ sdlookup_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopt goto GT_DONE; } - pycbc_common_vars_wait(&cv, self); + PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self); GT_DONE: pycbc_common_vars_finalize(&cv, self); @@ -371,22 +377,25 @@ sdlookup_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopt PyObject * pycbc_Bucket_lookup_in(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { - return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_SINGLE); + return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_SINGLE, + PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, self->tracer, "bucket.lookup_in")); } PyObject * pycbc_Bucket_lookup_in_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { - return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_MULTI); + return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_MULTI, + PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, self->tracer, "bucket.lookup_in_multi")); } #define DECLFUNC(name, operation, mode) \ PyObject *pycbc_Bucket_##name(pycbc_Bucket *self, \ PyObject *args, PyObject *kwargs) { \ - return get_common(self, args, kwargs, operation, mode); \ + PyObject* result;\ + PYCBC_TRACE_WRAP_TOPLEVEL(result,LCBTRACE_OP_REQUEST_ENCODING,get_common, self->tracer, self, args, kwargs, operation, mode); \ + return result;\ } - DECLFUNC(get, PYCBC_CMD_GET, PYCBC_ARGOPT_SINGLE) DECLFUNC(touch, PYCBC_CMD_TOUCH, PYCBC_ARGOPT_SINGLE) DECLFUNC(lock, PYCBC_CMD_LOCK, PYCBC_ARGOPT_SINGLE) diff --git a/src/http.c b/src/http.c index 3240d1882..29a5b436d 100644 --- a/src/http.c +++ b/src/http.c @@ -193,6 +193,8 @@ pycbc_http_callbacks_init(lcb_t instance) PyObject * pycbc_Bucket__http_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { + pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, + self->tracer, "bucket.http_request"); int rv; int method; int reqtype; @@ -262,7 +264,7 @@ pycbc_Bucket__http_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) } if (!(self->flags & PYCBC_CONN_F_ASYNC)) { - pycbc_oputil_wait_common(self); + PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, kwargs, self); /* RC=1 (decref on done) */ if (pycbc_multiresult_maybe_raise(mres)) { goto GT_DONE; diff --git a/src/miscops.c b/src/miscops.c index cc1d3bdb3..3514f63a9 100644 --- a/src/miscops.c +++ b/src/miscops.c @@ -17,7 +17,7 @@ #include <libcouchbase/api3.h> #include "oputil.h" #include "pycbc.h" - +#include "libcouchbase/tracing.h" /** * This file contains 'miscellaneous' operations. Functions contained here * might move to other files if they become more complex. @@ -30,14 +30,14 @@ /** * This is called during each iteration of delete/unlock */ -static int -handle_single_keyop(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING,static,int, +handle_single_keyop, pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, PyObject *curkey, PyObject *curval, PyObject *options, pycbc_Item *item, void *arg) { int rv; pycbc_pybuffer keybuf = { NULL }; - lcb_U64 cas = 0; + lcb_uint64_t cas = 0; lcb_error_t err; union { @@ -98,12 +98,14 @@ handle_single_keyop(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype rv = -1; goto GT_DONE; } + PYCBC_TRACECMD(ucmd.unl, context, cv->mres, curkey, self); err = lcb_unlock3(self->instance, cv->mres, &ucmd.unl); } else if (optype == PYCBC_CMD_ENDURE) { err = cv->mctx->addcmd(cv->mctx, &ucmd.base); } else { + PYCBC_TRACECMD(ucmd.rm,context, cv->mres, curkey, self); err = lcb_remove3(self->instance, cv->mres, &ucmd.rm); } if (err == LCB_SUCCESS) { @@ -118,8 +120,7 @@ handle_single_keyop(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype return rv; } -static PyObject * -keyop_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, static, PyObject*, keyop_common, pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, int argopts) { int rv; @@ -169,10 +170,10 @@ keyop_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, } if (argopts & PYCBC_ARGOPT_MULTI) { - rv = pycbc_oputil_iter_multi(self, seqtype, kobj, &cv, optype, - handle_single_keyop, NULL); + rv = PYCBC_OPUTIL_ITER_MULTI(self, seqtype, kobj, &cv, optype, + handle_single_keyop, NULL, context); } else { - rv = handle_single_keyop(self, &cv, optype, kobj, casobj, NULL, NULL, NULL); + rv= PYCBC_TRACE_WRAP(handle_single_keyop, kwargs, self, &cv, optype, kobj, casobj, NULL, NULL, NULL); } if (rv < 0) { @@ -193,7 +194,7 @@ keyop_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, } } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } @@ -202,9 +203,7 @@ keyop_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int optype, return cv.ret; } - -PyObject * -pycbc_Bucket_endure_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +TRACED_FUNCTION_WRAPPER(endure_multi, LCBTRACE_OP_REQUEST_ENCODING, Bucket) { int rv; Py_ssize_t ncmds; @@ -215,7 +214,7 @@ pycbc_Bucket_endure_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) PyObject *is_delete_O = Py_False; lcb_error_t err; float timeout = 0.0; - float interval = 0.0; + float interval = 0.00; struct pycbc_common_vars cv = PYCBC_COMMON_VARS_STATIC_INIT; @@ -259,13 +258,12 @@ pycbc_Bucket_endure_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) goto GT_DONE; } - rv = pycbc_oputil_iter_multi(self, seqtype, keys, &cv, PYCBC_CMD_ENDURE, - handle_single_keyop, NULL); + rv = PYCBC_OPUTIL_ITER_MULTI(self, seqtype, keys, &cv, PYCBC_CMD_ENDURE, handle_single_keyop, NULL, context); if (rv < 0) { goto GT_DONE; } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } @@ -278,7 +276,9 @@ pycbc_Bucket_endure_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) #define DECLFUNC(name, operation, mode) \ PyObject *pycbc_Bucket_##name(pycbc_Bucket *self, \ PyObject *args, PyObject *kwargs) { \ - return keyop_common(self, args, kwargs, operation, mode); \ + PyObject* result;\ + PYCBC_TRACE_WRAP_TOPLEVEL(result,#operation,keyop_common, self->tracer, self, args, kwargs, operation, mode); \ + return result;\ } DECLFUNC(remove, PYCBC_CMD_DELETE, PYCBC_ARGOPT_SINGLE) @@ -287,8 +287,7 @@ DECLFUNC(remove_multi, PYCBC_CMD_DELETE, PYCBC_ARGOPT_MULTI) DECLFUNC(unlock_multi, PYCBC_CMD_UNLOCK, PYCBC_ARGOPT_MULTI) -PyObject * -pycbc_Bucket__stats(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +TRACED_FUNCTION_WRAPPER(_stats,LCBTRACE_OP_REQUEST_ENCODING,Bucket) { int rv; int ii; @@ -341,11 +340,13 @@ pycbc_Bucket__stats(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) if (is_keystats && PyObject_IsTrue(is_keystats)) { cmd.cmdflags |= LCB_CMDSTATS_F_KV; } + PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); err = lcb_stats3(self->instance, cv.mres, &cmd); Py_XDECREF(newkey); } } else { + PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); err = lcb_stats3(self->instance, cv.mres, &cmd); } @@ -354,7 +355,7 @@ pycbc_Bucket__stats(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) goto GT_DONE; } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } @@ -362,10 +363,7 @@ pycbc_Bucket__stats(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) pycbc_common_vars_finalize(&cv, self); return cv.ret; } - -PyObject *pycbc_Bucket__ping(pycbc_Bucket *self, - PyObject *args, - PyObject *kwargs) +TRACED_FUNCTION_WRAPPER(_ping,LCBTRACE_OP_REQUEST_ENCODING,Bucket) { int rv; Py_ssize_t ncmds = 0; @@ -383,6 +381,7 @@ PyObject *pycbc_Bucket__ping(pycbc_Bucket *self, if (rv < 0) { return NULL; } + PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); lcb_sched_enter(self->instance); err = lcb_ping3(self->instance, cv.mres, &cmd); @@ -391,7 +390,7 @@ PyObject *pycbc_Bucket__ping(pycbc_Bucket *self, goto GT_DONE; } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } lcb_sched_leave(self->instance); @@ -400,9 +399,7 @@ PyObject *pycbc_Bucket__ping(pycbc_Bucket *self, return cv.ret; } -PyObject *pycbc_Bucket__diagnostics(pycbc_Bucket *self, - PyObject *args, - PyObject *kwargs) +TRACED_FUNCTION_WRAPPER(_diagnostics,LCBTRACE_OP_REQUEST_ENCODING,Bucket) { int rv; Py_ssize_t ncmds = 0; @@ -417,6 +414,8 @@ PyObject *pycbc_Bucket__diagnostics(pycbc_Bucket *self, if (rv < 0) { return NULL; } + + PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); lcb_sched_enter(self->instance); PYCBC_CONN_THR_BEGIN(self); err = lcb_diag(self->instance, cv.mres, &cmd); @@ -427,7 +426,7 @@ PyObject *pycbc_Bucket__diagnostics(pycbc_Bucket *self, goto GT_DONE; } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } lcb_sched_leave(self->instance); diff --git a/src/multiresult.c b/src/multiresult.c index 551440711..c81e35ac2 100644 --- a/src/multiresult.c +++ b/src/multiresult.c @@ -350,14 +350,20 @@ pycbc_multiresult_get_result(pycbc_MultiResult *self) int rv; Py_ssize_t dictpos = 0; PyObject *key, *value; + PyObject *mres = pycbc_multiresult_dict(self); if (!(self->mropts & PYCBC_MRES_F_SINGLE)) { PyObject *res = (PyObject *)self; Py_INCREF(res); return res; } - - rv = PyDict_Next(pycbc_multiresult_dict(self), &dictpos, &key, &value); + PYCBC_DEBUG_LOG("\n hit multiresult_get_result:["); + pycbc_print_repr((PyObject*)self); + PYCBC_DEBUG_LOG("]\n"); + PYCBC_DEBUG_LOG("\ngot results:["); + pycbc_print_repr(mres); + PYCBC_DEBUG_LOG("]\n"); + rv = PyDict_Next(mres, &dictpos, &key, &value); if (!rv) { PYCBC_EXC_WRAP(PYCBC_EXC_INTERNAL, 0, "No objects in MultiResult"); return NULL; diff --git a/src/n1ql.c b/src/n1ql.c index 9677a0c73..f93132968 100644 --- a/src/n1ql.c +++ b/src/n1ql.c @@ -56,7 +56,23 @@ query_common(pycbc_Bucket *self, const char *params, unsigned nparams, } mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); - vres = (pycbc_ViewResult *)PYCBC_TYPE_CTOR(&pycbc_ViewResultType); + { + PyObject* kwargs = pycbc_DummyKeywords; +#ifdef PYCBC_TRACING + if (self->tracer) { + kwargs= PyDict_New(); + PyDict_SetItemString(kwargs, "tracer", (PyObject*)self->tracer); + } +#endif + vres = (pycbc_ViewResult *) PyObject_CallFunction((PyObject*)&pycbc_ViewResultType, "OO", Py_None, kwargs); + if (!vres) + { + PYCBC_DEBUG_LOG("null vres"); + } + + PYCBC_EXCEPTION_LOG_NOCLEAR; + PYCBC_DEBUG_LOG("got vres: %p", vres); + } pycbc_httpresult_init(&vres->base, mres); vres->rows = PyList_New(0); vres->base.format = PYCBC_FMT_JSON; diff --git a/src/observe.c b/src/observe.c index d58dfa0cf..c841d11d2 100644 --- a/src/observe.c +++ b/src/observe.c @@ -89,7 +89,7 @@ pycbc_observeinfo_new(pycbc_Bucket *parent) static int handle_single_observe(pycbc_Bucket *self, PyObject *curkey, int master_only, - struct pycbc_common_vars *cv) + struct pycbc_common_vars *cv, pycbc_stack_context_handle context) { int rv; pycbc_pybuffer keybuf = { NULL }; @@ -105,7 +105,7 @@ handle_single_observe(pycbc_Bucket *self, PyObject *curkey, int master_only, if (master_only) { cmd.cmdflags |= LCB_CMDOBSERVE_F_MASTER_ONLY; } - + PYCBC_TRACECMD(cmd,context, cv->mres, curkey, self); err = cv->mctx->addcmd(cv->mctx, (lcb_CMDBASE*)&cmd); if (err == LCB_SUCCESS) { rv = 0; @@ -119,7 +119,7 @@ handle_single_observe(pycbc_Bucket *self, PyObject *curkey, int master_only, } static PyObject * -observe_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts) +observe_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts, pycbc_stack_context_handle context) { int rv; int ii; @@ -174,12 +174,12 @@ observe_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts PyObject *curkey = NULL, *curvalue = NULL; rv = pycbc_oputil_sequence_next(seqtype, curseq, &dictpos, ii, - &curkey, &curvalue); + &curkey, &curvalue, context); if (rv < 0) { goto GT_ITER_DONE; } - rv = handle_single_observe(self, curkey, master_only, &cv); + rv = handle_single_observe(self, curkey, master_only, &cv, context); GT_ITER_DONE: Py_XDECREF(curkey); @@ -191,7 +191,7 @@ observe_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts } } else { - rv = handle_single_observe(self, kobj, master_only, &cv); + rv = handle_single_observe(self, kobj, master_only, &cv, context); if (rv < 0) { goto GT_DONE; @@ -199,7 +199,7 @@ observe_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts } cv.is_seqcmd = 1; - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } @@ -210,13 +210,17 @@ observe_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopts } PyObject * -pycbc_Bucket_observe(pycbc_Bucket *self, PyObject *args, PyObject *kw) +pycbc_Bucket_observe(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { - return observe_common(self, args, kw, PYCBC_ARGOPT_SINGLE); + PyObject* result; + PYCBC_TRACE_WRAP_TOPLEVEL(result,LCBTRACE_OP_REQUEST_ENCODING, observe_common, self->tracer, self, args, kwargs, PYCBC_ARGOPT_SINGLE); + return result; } PyObject * -pycbc_Bucket_observe_multi(pycbc_Bucket *self, PyObject *args, PyObject *kw) +pycbc_Bucket_observe_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { - return observe_common(self, args, kw, PYCBC_ARGOPT_MULTI); + PyObject* result; + PYCBC_TRACE_WRAP_TOPLEVEL(result,LCBTRACE_OP_REQUEST_ENCODING, observe_common, self->tracer, self, args, kwargs, PYCBC_ARGOPT_MULTI); + return result; } diff --git a/src/oputil.c b/src/oputil.c index d5de4254b..6e3e1965b 100644 --- a/src/oputil.c +++ b/src/oputil.c @@ -14,7 +14,10 @@ * limitations under the License. **/ +//#include <libcouchbase/subdoc.h> #include "oputil.h" +#include "pycbc.h" +#include "structmember.h" void pycbc_common_vars_finalize(struct pycbc_common_vars *cv, pycbc_Bucket *conn) @@ -31,8 +34,9 @@ pycbc_common_vars_finalize(struct pycbc_common_vars *cv, pycbc_Bucket *conn) } } -int -pycbc_common_vars_wait(struct pycbc_common_vars *cv, pycbc_Bucket *self) +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING,, +int, +pycbc_common_vars_wait, struct pycbc_common_vars *cv, pycbc_Bucket *self) { Py_ssize_t nsched = cv->is_seqcmd ? 1 : cv->ncmds; @@ -58,7 +62,7 @@ pycbc_common_vars_wait(struct pycbc_common_vars *cv, pycbc_Bucket *self) Py_INCREF(Py_None); return 0; } - pycbc_oputil_wait_common(self); + PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, NULL, self); if (!pycbc_assert(self->nremaining == 0)) { fprintf(stderr, "Remaining count != 0. Adjusting"); @@ -250,7 +254,8 @@ pycbc_oputil_sequence_next(pycbc_seqtype_t seqtype, Py_ssize_t *dictpos, int ii, PyObject **key, - PyObject **value) + PyObject **value, + pycbc_stack_context_handle context ) { if (seqtype & PYCBC_SEQTYPE_DICT) { int rv = PyDict_Next(seqobj, dictpos, key, value); @@ -338,6 +343,16 @@ extract_item_params(struct pycbc_common_vars *cv, return 0; } +#ifdef PYCBC_TRACING +pycbc_oputil_keyhandler pycbc_oputil_keyhandler_build(pycbc_oputil_keyhandler_raw cb, const char* category, const char* name) { + pycbc_oputil_keyhandler handler; + handler.cb = cb; + handler.category = category; + handler.name = name; + return handler; +} +#endif + int pycbc_oputil_iter_multi(pycbc_Bucket *self, pycbc_seqtype_t seqtype, @@ -345,7 +360,8 @@ pycbc_oputil_iter_multi(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, pycbc_oputil_keyhandler handler, - void *arg) + void *arg, + pycbc_stack_context_handle context) { int rv = 0; int ii; @@ -359,12 +375,13 @@ pycbc_oputil_iter_multi(pycbc_Bucket *self, } for (ii = 0; ii < cv->ncmds; ii++) { + PyObject *k, *v = NULL, *options = NULL; PyObject *arg_k = NULL; pycbc_Item *itm = NULL; rv = pycbc_oputil_sequence_next(seqtype, - seqobj, &dictpos, ii, &k, &v); + seqobj, &dictpos, ii, &k, &v, context); if (rv < 0) { goto GT_ITER_DONE; } @@ -379,9 +396,13 @@ pycbc_oputil_iter_multi(pycbc_Bucket *self, arg_k = k; } - rv = handler(self, cv, optype, arg_k, v, options, itm, arg); +#ifdef PYCBC_TRACING + rv = PYCBC_TRACE_WRAP_EXPLICIT_NAMED((handler).cb, (handler).name, (handler).category, NULL, self, cv, optype, arg_k, v, options, itm, arg); +#else + rv = PYCBC_TRACE_WRAP_EXPLICIT_NAMED(handler, "", "", NULL, self, cv, optype, arg_k, v, options, itm, arg); +#endif - GT_ITER_DONE: + GT_ITER_DONE: Py_XDECREF(k); Py_XDECREF(v); @@ -440,8 +461,8 @@ pycbc_oputil_conn_unlock(pycbc_Bucket *self) PyThread_release_lock(self->lock); } -void -pycbc_oputil_wait_common(pycbc_Bucket *self) +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING,,void, +pycbc_oputil_wait_common,pycbc_Bucket *self) { /** * If we have a 'lockmode' specified, check to see that nothing else is @@ -601,8 +622,8 @@ sd_convert_spec(PyObject *pyspec, lcb_SDSPEC *sdspec, return -1; } -int -pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING,, int, +pycbc_sd_handle_speclist, pycbc_Bucket *self, pycbc_MultiResult *mres, PyObject *key, PyObject *spectuple, lcb_CMDSUBDOC *cmd) { int rv = 0; @@ -655,6 +676,11 @@ pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, } if (rv == 0) { + PYCBC_TRACECMD_PURE((*cmd), context); +#ifdef PYCBC_TRACING + newitm->tracing_context = context; + newitm->is_tracing_stub = 0; +#endif err = lcb_subdoc3(self->instance, mres, cmd); if (err == LCB_SUCCESS) { PyDict_SetItem((PyObject*)mres, key, (PyObject*)newitm); diff --git a/src/oputil.h b/src/oputil.h index 68ba3fc0a..5d4b1b4a3 100644 --- a/src/oputil.h +++ b/src/oputil.h @@ -96,15 +96,26 @@ struct pycbc_common_vars { /** * Handler for iterations */ -typedef int (*pycbc_oputil_keyhandler) +typedef int (*pycbc_oputil_keyhandler_raw) (pycbc_Bucket *self, - struct pycbc_common_vars *cv, - int optype, - PyObject *key, - PyObject *value, - PyObject *options, - pycbc_Item *item, - void *arg); + struct pycbc_common_vars *cv, + int optype, + PyObject *key, + PyObject *value, + PyObject *options, + pycbc_Item *item, + void *arg, + pycbc_stack_context_handle context); + +#ifdef PYCBC_TRACING +typedef struct { + const char* category; + const char* name; + pycbc_oputil_keyhandler_raw cb; +} pycbc_oputil_keyhandler; +#else +typedef pycbc_oputil_keyhandler_raw pycbc_oputil_keyhandler; +#endif /** * Examine the 'quiet' parameter and see if we should set the MultiResult's @@ -131,9 +142,9 @@ int pycbc_maybe_set_quiet(pycbc_MultiResult *mres, PyObject *quiet); * Other stuff. */ int pycbc_oputil_check_sequence(PyObject *sequence, - int allow_list, - Py_ssize_t *ncmds, - pycbc_seqtype_t *seqtype); + int allow_list, + Py_ssize_t *ncmds, + pycbc_seqtype_t *seqtype); /** @@ -184,7 +195,8 @@ int pycbc_oputil_sequence_next(pycbc_seqtype_t seqtype, Py_ssize_t *dictpos, int ii, PyObject **key, - PyObject **value); + PyObject **value, + pycbc_stack_context_handle context); /** * Initialize the 'common_vars' structure. @@ -198,7 +210,16 @@ int pycbc_common_vars_init(struct pycbc_common_vars *cv, int argopts, Py_ssize_t ncmds, int want_vals); - +#ifdef PYCBC_TRACING +pycbc_oputil_keyhandler pycbc_oputil_keyhandler_build(pycbc_oputil_keyhandler_raw cb, const char* category, const char* name); +#define PYCBC_OPUTIL_KEYHANDLER(NAME) pycbc_oputil_keyhandler_build(NAME, NAME##_category(), #NAME) +#define PYCBC_OPUTIL_ITER_MULTI(SELF,SEQTYPE,COLLECTION,CV,OPTYPE,HANDLER,CONTEXT,...)\ + pycbc_oputil_iter_multi(SELF,SEQTYPE,COLLECTION,CV,OPTYPE,PYCBC_OPUTIL_KEYHANDLER(HANDLER),CONTEXT,__VA_ARGS__) +#else +#define PYCBC_OPUTIL_KEYHANDLER(NAME) NAME +#define PYCBC_OPUTIL_ITER_MULTI(SELF,SEQTYPE,COLLECTION,CV,OPTYPE,HANDLER,CONTEXT,...)\ + pycbc_oputil_iter_multi(SELF,SEQTYPE,COLLECTION,CV,OPTYPE,PYCBC_OPUTIL_KEYHANDLER(HANDLER),CONTEXT,__VA_ARGS__) +#endif /** * Iterate over a sequence of command objects @@ -217,7 +238,8 @@ pycbc_oputil_iter_multi(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, pycbc_oputil_keyhandler handler, - void *arg); + void *arg, + pycbc_stack_context_handle context); /** @@ -230,15 +252,17 @@ void pycbc_common_vars_finalize(struct pycbc_common_vars *cv, pycbc_Bucket *self * Wait for the operation to complete * @return 0 on success, -1 on failure. */ -int pycbc_common_vars_wait(struct pycbc_common_vars *cv, pycbc_Bucket *self); +TRACED_FUNCTION_DECL(, +int, +pycbc_common_vars_wait, struct pycbc_common_vars *cv, pycbc_Bucket *self); /** * Wrapper around lcb_wait(). This ensures threading contexts are properly * initialized. */ -void -pycbc_oputil_wait_common(pycbc_Bucket *self); +TRACED_FUNCTION_DECL(,void, +pycbc_oputil_wait_common, pycbc_Bucket *self); /** @@ -271,11 +295,12 @@ int pycbc_handle_durability_args(pycbc_Bucket *self, */ int pycbc_encode_sd_keypath(pycbc_Bucket *conn, PyObject *src, - pycbc_pybuffer *keybuf, pycbc_pybuffer *pathbuf); + pycbc_pybuffer *keybuf, pycbc_pybuffer *pathbuf); -int -pycbc_sd_handle_speclist(pycbc_Bucket *self, pycbc_MultiResult *mres, - PyObject *key, PyObject *spectuple, lcb_CMDSUBDOC *cmd); +TRACED_FUNCTION_DECL(, +int, +pycbc_sd_handle_speclist, pycbc_Bucket *self, pycbc_MultiResult *mres, + PyObject *key, PyObject *spectuple, lcb_CMDSUBDOC *cmd); /** * Macro to declare prototypes for entry points. diff --git a/src/pipeline.c b/src/pipeline.c index 78b63aa20..0cce2c129 100644 --- a/src/pipeline.c +++ b/src/pipeline.c @@ -39,6 +39,8 @@ pycbc_Bucket__start_pipeline(pycbc_Bucket *self) PyObject * pycbc_Bucket__end_pipeline(pycbc_Bucket *self) { + pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(NULL, LCBTRACE_OP_RESPONSE_DECODING, + self->tracer, "bucket.end_pipeline"); PyObject *rv; int ii; @@ -54,7 +56,7 @@ pycbc_Bucket__end_pipeline(pycbc_Bucket *self) goto GT_DONE; } - pycbc_oputil_wait_common(self); + PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, NULL, self); pycbc_assert(self->nremaining == 0); diff --git a/src/pycbc.h b/src/pycbc.h index a15479387..fc712ae20 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -13,13 +13,34 @@ * See the License for the specific language governing permissions and * limitations under the License. **/ - #ifndef PYCBC_H_ #define PYCBC_H_ /** * This file contains the base header for the Python Couchbase Client * @author Mark Nunberg */ +#ifdef PYCBC_DEBUG +#define PYCBC_DEBUG_LOG_RAW(...) printf(__VA_ARGS__); +#else +#define PYCBC_DEBUG_LOG_RAW(...) +#endif + +#define PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(FILE,LINE,POSTFIX,...)\ + PYCBC_DEBUG_LOG_RAW("at %s line %d:", FILE, LINE)\ + PYCBC_DEBUG_LOG_RAW(__VA_ARGS__)\ + PYCBC_DEBUG_LOG_RAW(POSTFIX) +#define PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(FILE,LINE,...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(FILE,LINE,"\n", __VA_ARGS__) +#define PYCBC_DEBUG_LOG_WITHOUT_NEWLINE(...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(__FILE__,__LINE__, "", __VA_ARGS__) +#define PYCBC_DEBUG_LOG(...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(__FILE__,__LINE__,__VA_ARGS__) + +#ifdef PYCBC_DEBUG +#define PYCBC_DECREF(X) pycbc_assert(Py_REFCNT(X)>0);PYCBC_DEBUG_LOG("%p has count of %li", X, Py_REFCNT(X)); Py_DECREF(X); +#define PYCBC_XDECREF(X) pycbc_assert(!X || Py_REFCNT(X)>0);PYCBC_DEBUG_LOG("%p has count of %li", X, X?Py_REFCNT(X):0); Py_XDECREF(X) +#else +#define PYCBC_DECREF(X) Py_DECREF(X); +#define PYCBC_XDECREF(X) Py_XDECREF(X); +#endif + #include <Python.h> #include <libcouchbase/couchbase.h> @@ -272,12 +293,35 @@ typedef struct { char replicate_to; } pycbc_dur_params; +void pycbc_dict_add_text_kv(PyObject *dict, const char *key, const char *value); +void pycbc_print_string( PyObject *curkey); +void pycbc_print_repr( PyObject *pobj); +void pycbc_exception_log(const char* file, int line, int clear); +#ifdef PYCBC_DEBUG +#define PYCBC_EXCEPTION_LOG_NOCLEAR pycbc_exception_log(__FILE__,__LINE__,0); +#define PYCBC_EXCEPTION_LOG pycbc_exception_log(__FILE__,__LINE__,1); +#else +#define PYCBC_EXCEPTION_LOG_NOCLEAR +#define PYCBC_EXCEPTION_LOG PyErr_Clear(); +#endif + +struct pycbc_Tracer; + +#ifdef LCB_TRACING +#ifdef PYCBC_TRACING_ENABLE +#define PYCBC_TRACING +#endif +#endif + typedef struct { PyObject_HEAD /** LCB instance */ lcb_t instance; - +#ifdef PYCBC_TRACING + /** Tracer **/ + struct pycbc_Tracer *tracer; +#endif /** Transcoder object */ PyObject *tc; @@ -342,6 +386,140 @@ typedef struct { } pycbc_Bucket; +#ifdef PYCBC_TRACING + +typedef struct pycbc_Tracer { + PyObject_HEAD + lcbtrace_TRACER *tracer; + PyObject* parent; + lcb_t *instance; + + int init_called:1; +} pycbc_Tracer_t; + +static PyTypeObject SpanType = { + PYCBC_POBJ_HEAD_INIT(NULL) + 0 +}; + +typedef struct { + PyObject_HEAD +#ifdef PYCBC_TRACING + lcbtrace_SPAN *span; +#endif +} pycbc_Span_t; + + +typedef struct +{ +#ifdef PYCBC_TRACING + pycbc_Tracer_t* tracer; + lcbtrace_SPAN* span; +#endif +} pycbc_stack_context; + +typedef pycbc_stack_context* pycbc_stack_context_handle; +#else + +typedef void* pycbc_stack_context_handle; + +#endif + +#ifdef PYCBC_TRACING + +int pycbc_is_async_or_pipeline(const pycbc_Bucket *self); + +pycbc_stack_context_handle pycbc_Tracer_span_start(pycbc_Tracer_t *tracer, PyObject *kwargs, const char *operation, + lcb_uint64_t now, pycbc_stack_context_handle context, + lcbtrace_REF_TYPE ref_type, const char* component); +PyObject* pycbc_Context_finish(pycbc_stack_context_handle context ); +void pycbc_Tracer_propagate(pycbc_Tracer_t *tracer); + + +void pycbc_init_traced_result(pycbc_Bucket *self, PyObject* mres_dict, PyObject *curkey, + pycbc_stack_context_handle context); + +#define PYCBC_GET_STACK_CONTEXT(KWARGS,CATEGORY,TRACER, PARENT_CONTEXT) pycbc_Tracer_span_start(TRACER, KWARGS, CATEGORY, 0, PARENT_CONTEXT, LCBTRACE_REF_CHILD_OF ) +#define PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(KWARGS,CATEGORY,TRACER, NAME) pycbc_Tracer_span_start(TRACER, KWARGS, CATEGORY, 0, NULL, LCBTRACE_REF_NONE, NAME ) + +extern PyObject* pycbc_default_key; +#define PYCBC_DEFAULT_TRACING_KEY pycbc_default_key + +pycbc_stack_context_handle pycbc_check_context(pycbc_stack_context_handle CONTEXT, const char* file, int line); + + +#define PYCBC_CHECK_CONTEXT(CONTEXT) pycbc_check_context(CONTEXT,__FILE__,__LINE__) +#define PYCBC_TRACECMD_PURE(CMD,CONTEXT) {\ + if (PYCBC_CHECK_CONTEXT(CONTEXT))\ + { \ + PYCBC_DEBUG_LOG("setting trace span on %.*s\n",\ + (int)(CMD).key.contig.nbytes,(const char*)(CMD).key.contig.bytes);\ + LCB_CMD_SET_TRACESPAN(&(CMD),(CONTEXT)->span);\ + } else {PYCBC_EXCEPTION_LOG_NOCLEAR;}\ +}; + +#define PYCBC_TRACECMD(CMD,CONTEXT,MRES,CURKEY,BUCKET) PYCBC_TRACECMD_PURE(CMD,CONTEXT); \ + pycbc_init_traced_result(BUCKET, pycbc_multiresult_dict(MRES), CURKEY, context); + +#define PYCBC_TRACE_POP_CONTEXT(CONTEXT) pycbc_Context_finish(CONTEXT); + + +#define PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(RV, CATEGORY, NAME, TRACER, STRINGNAME, ...) \ +{\ + int should_trace = 1 || !pycbc_is_async_or_pipeline(self);\ + pycbc_stack_context_handle sub_context = NULL;\ + if (should_trace) { sub_context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, CATEGORY, TRACER, STRINGNAME); };\ + RV = NAME(__VA_ARGS__, sub_context);\ + if ( 0 && should_trace && sub_context && !pycbc_is_async_or_pipeline(self)) {\ + pycbc_Context_finish(sub_context);\ + }\ + PYCBC_EXCEPTION_LOG_NOCLEAR;\ +}; + + + +#define PYCBC_TRACE_WRAP_EXPLICIT_NAMED(NAME,COMPONENTNAME,CATEGORY,KWARGS,...) NAME(__VA_ARGS__, pycbc_Tracer_span_start(self->tracer,KWARGS,CATEGORY,0, context, LCBTRACE_REF_CHILD_OF, COMPONENTNAME)) +#define PYCBC_TRACE_WRAP(NAME,KWARGS,...) PYCBC_TRACE_WRAP_EXPLICIT_NAMED(NAME, #NAME, NAME##_category(), KWARGS, __VA_ARGS__) + +#define PYCBC_TRACE_WRAP_TOPLEVEL(RV, CATEGORY, NAME, TRACER, ...)\ + PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(RV, CATEGORY, NAME, TRACER, #NAME, __VA_ARGS__); + +#else + +#define PYCBC_GET_STACK_CONTEXT(CATEGORY,TRACER, PARENT_CONTEXT) NULL +#define PYCBC_TRACECMD(...) +#define PYCBC_TRACECMD_PURE(...) +#define PYCBC_TRACE_POP_CONTEXT(X) +#define PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(RV, CATEGORY, NAME, TRACER, STRINGNAME, ...) { RV = NAME(__VA_ARGS__, NULL); } +#define PYCBC_TRACE_WRAP_EXPLICIT_NAMED(NAME,COMPONENTNAME,CATEGORY,KWARGS,...) NAME(__VA_ARGS__, NULL) +#define PYCBC_TRACE_WRAP_TOPLEVEL(RV,CATEGORY,NAME,TRACER,...) { RV=NAME(__VA_ARGS__,NULL); } +#define PYCBC_TRACE_WRAP(NAME,KWARGS,...) NAME(__VA_ARGS__, NULL) +#define PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(...) NULL + +#endif + + +#define TRACED_FUNCTION(CATEGORY,QUALIFIERS,RTYPE,NAME,...)\ + const char* NAME##_category(void){ return CATEGORY; }\ + QUALIFIERS RTYPE NAME(__VA_ARGS__, pycbc_stack_context_handle context) + +#define TRACED_FUNCTION_DECL(QUALIFIERS,RTYPE,NAME,...)\ + const char* NAME##_category(void);\ + QUALIFIERS RTYPE NAME(__VA_ARGS__, pycbc_stack_context_handle context); + +#define TRACED_FUNCTION_WRAPPER(name, CATEGORY, CLASS) \ +PyObject *pycbc_##CLASS##_##name##_real(pycbc_##CLASS *self, PyObject *args, PyObject *kwargs,\ + pycbc_stack_context_handle context);\ +PyObject *pycbc_##CLASS##_##name(pycbc_##CLASS *self, \ + PyObject *args, PyObject *kwargs) {\ + PyObject* result;\ + PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(result, CATEGORY, pycbc_##CLASS##_##name##_real, self->tracer, #CLASS "." #name, self, args, kwargs);\ + return result;\ +}\ +PyObject *pycbc_##CLASS##_##name##_real(pycbc_##CLASS *self, PyObject *args, PyObject *kwargs,\ + pycbc_stack_context_handle context) + + /***************** * Result Objects. @@ -353,10 +531,20 @@ typedef struct { * See result.c and opresult.c */ +#ifdef PYCBC_TRACING +#define TRACING_DATA \ + pycbc_stack_context_handle tracing_context;\ + int is_tracing_stub;\ + PyObject* tracing_output; +#else +#define TRACING_DATA +#endif + #define pycbc_Result_HEAD \ PyObject_HEAD \ lcb_error_t rc; \ - PyObject *key; + PyObject *key; \ + TRACING_DATA #define pycbc_OpResult_HEAD \ pycbc_Result_HEAD \ @@ -429,6 +617,10 @@ typedef struct { PyObject *rows; long rows_per_call; char has_parse_error; +#ifdef PYCBC_TRACING + pycbc_Tracer_t* py_tracer; + int own_tracer; +#endif } pycbc_ViewResult; @@ -616,6 +808,11 @@ enum { */ int pycbc_ResultType_ready(PyTypeObject *p, int flags); +/** + * Types used for tracing + */ +#define PYCBC_TRACING_TYPES(X)\ + X(Tracer, "The Tracer Object") \ /** * Extern PyTypeObject declaraions. @@ -638,6 +835,12 @@ extern PyTypeObject pycbc__SDResultType; /* views.c */ extern PyTypeObject pycbc_ViewResultType; +/* ext.c */ +#define PYCBC_EXTERN(X,DOC)\ +extern PyTypeObject pycbc_##X##Type; + +PYCBC_TRACING_TYPES(PYCBC_EXTERN); +#undef PYCBC_EXTERN /** * Result type check macros */ @@ -775,11 +978,28 @@ int pycbc_AsyncResultType_init(PyObject **ptr); int pycbc_IOPSWrapperType_init(PyObject **ptr); int pycbc_ViewResultType_init(PyObject **ptr); +#define PYCBC_TYPE_INIT_DECL(TYPENAME,TYPE_DOC)\ +int \ +pycbc_##TYPENAME##Type_init(PyObject **ptr);\ +extern PyTypeObject pycbc_##TYPENAME##Type; + +PYCBC_TRACING_TYPES(PYCBC_TYPE_INIT_DECL); + +#undef PYCBC_TYPE_INIT_DECL /** * Calls the type's constructor with no arguments: */ -#define PYCBC_TYPE_CTOR(t) PyObject_CallFunction((PyObject*)t, NULL, NULL) +//#define PYCBC_TYPE_CTOR(t,...) PyObject_CallFunction((PyObject*)t, NULL, NULL) +#define PYCBC_TYPE_CTOR_1_args(t) PyObject_CallFunction((PyObject*)t, 0) +#define PYCBC_TYPE_CTOR_2_args(t, args) PyObject_CallFunction((PyObject*)t, "O", args) +#define PYCBC_TYPE_CTOR_3_args(t, args, kwargs) PyObject_CallFunction((PyObject*)t, "OO", args, kwargs) + +#define GET_4TH_ARG(arg1, arg2, arg3, arg4, ...) arg4 +#define PYCBC_TYPE_CTOR_CHOOSER(...) \ + GET_4TH_ARG(__VA_ARGS__, PYCBC_TYPE_CTOR_3_args, \ + PYCBC_TYPE_CTOR_2_args, PYCBC_TYPE_CTOR_1_args, ) +#define PYCBC_TYPE_CTOR(...) PYCBC_TYPE_CTOR_CHOOSER(__VA_ARGS__)(__VA_ARGS__) /** * Allocators for result functions. See callbacks.c:get_common @@ -952,7 +1172,7 @@ PyObject* pycbc_exc_get_categories(PyObject *self, PyObject *arg); struct pycbc_exception_params __pycbc_ep = {0}; \ __pycbc_ep.file = __FILE__; \ __pycbc_ep.line = __LINE__; \ - __pycbc_ep.err = e_err; \ + __pycbc_ep.err = (lcb_error_t)e_err; \ __pycbc_ep.msg = e_msg; \ __pycbc_ep.key = e_key; \ __pycbc_ep.objextra = e_objextra; \ diff --git a/src/result.c b/src/result.c index 7e9677ec5..65ded1ea7 100644 --- a/src/result.c +++ b/src/result.c @@ -111,6 +111,18 @@ pycbc_Result_dealloc(pycbc_Result *self) { Result_dealloc(self); } +static int +pycbc_Result_init(PyObject *self_raw, + PyObject *args, PyObject *kwargs) +{ + pycbc_Result* self = (pycbc_Result*) self_raw; +#ifdef PYCBC_TRACING + self->is_tracing_stub = 0; + self->tracing_context = NULL; +#endif + PYCBC_EXCEPTION_LOG_NOCLEAR; + return 0; +} int pycbc_ResultType_init(PyObject **ptr) @@ -130,6 +142,7 @@ pycbc_ResultType_init(PyObject **ptr) "operations which may required additional fields."); p->tp_new = PyType_GenericNew; + p->tp_init = pycbc_Result_init; p->tp_dealloc = (destructor)Result_dealloc; p->tp_basicsize = sizeof(pycbc_Result); p->tp_members = Result_TABLE_members; diff --git a/src/store.c b/src/store.c index 0ac83e694..69e8bd52b 100644 --- a/src/store.c +++ b/src/store.c @@ -14,7 +14,9 @@ * limitations under the License. **/ + #include "oputil.h" +#include "pycbc.h" struct storecmd_vars { int operation; @@ -104,25 +106,47 @@ handle_item_kv(pycbc_Item *itm, PyObject *options, const struct storecmd_vars *s return 0; } -static int -handle_multi_mutate(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, - PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, - void *arg); +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING,static, int, + handle_multi_mutate, pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, + PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, + void *arg) { + int rv; + const struct storecmd_vars *scv = (const struct storecmd_vars *) arg; + pycbc_pybuffer keybuf = {NULL}; + lcb_CMDSUBDOC cmd = {0}; -static int -handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, - PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, - void *arg) -{ + if (itm) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Item not supported in subdoc mode"); + return -1; + } + + if (pycbc_tc_encode_key(self, curkey, &keybuf) != 0) { + return -1; + } + + cmd.cas = scv->single_cas; + cmd.exptime = scv->ttl; + cmd.cmdflags |= scv->sd_doc_flags; + LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); + rv = PYCBC_TRACE_WRAP(pycbc_sd_handle_speclist, NULL, self, cv->mres, curkey, curvalue, &cmd); + PYCBC_PYBUF_RELEASE(&keybuf); + return rv; +} + + +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, static, int, +handle_single_kv, pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, + PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, + void *arg) { int rv; - const struct storecmd_vars *scv = (struct storecmd_vars *)arg; - struct single_key_context skc = { NULL }; - pycbc_pybuffer keybuf = { NULL }, valbuf = { NULL }; + const struct storecmd_vars *scv = (struct storecmd_vars *) arg; + struct single_key_context skc = {NULL}; + pycbc_pybuffer keybuf = {NULL}, valbuf = {NULL}; lcb_error_t err; - lcb_CMDSTORE cmd = { 0 }; + lcb_CMDSTORE cmd = {0}; if (scv->argopts & PYCBC_ARGOPT_SDMULTI) { - return handle_multi_mutate(self, cv, optype, curkey, curvalue, options, itm, arg); + return PYCBC_TRACE_WRAP(handle_multi_mutate, NULL, self, cv, optype, curkey, curvalue, options, itm, arg); } skc.ttl = scv->ttl; @@ -144,7 +168,7 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, } rv = pycbc_tc_encode_value(self, skc.value, skc.flagsobj, - &valbuf, &cmd.flags); + &valbuf, &cmd.flags); if (rv < 0) { rv = -1; goto GT_DONE; @@ -160,8 +184,10 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); LCB_CMD_SET_VALUE(&cmd, valbuf.buffer, valbuf.length); cmd.cas = skc.cas; - cmd.operation = scv->operation; + cmd.operation = (lcb_storage_t) scv->operation; cmd.exptime = skc.ttl; + PYCBC_TRACECMD(cmd, context, cv->mres, curkey, self); + err = lcb_store3(self->instance, cv->mres, &cmd); if (err == LCB_SUCCESS) { rv = 0; @@ -179,36 +205,7 @@ handle_single_kv(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, } static int -handle_multi_mutate(pycbc_Bucket *self, struct pycbc_common_vars *cv, int optype, - PyObject *curkey, PyObject *curvalue, PyObject *options, pycbc_Item *itm, - void *arg) -{ - int rv; - const struct storecmd_vars *scv = arg; - pycbc_pybuffer keybuf = { NULL }; - lcb_CMDSUBDOC cmd = { 0 }; - - if (itm) { - PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Item not supported in subdoc mode"); - return -1; - } - - if (pycbc_tc_encode_key(self, curkey, &keybuf) != 0) { - return -1; - } - - cmd.cas = scv->single_cas; - cmd.exptime = scv->ttl; - cmd.cmdflags |= scv->sd_doc_flags; - LCB_CMD_SET_KEY(&cmd, keybuf.buffer, keybuf.length); - rv = pycbc_sd_handle_speclist(self, cv->mres, curkey, curvalue, &cmd); - PYCBC_PYBUF_RELEASE(&keybuf); - return rv; -} - -static int -handle_append_flags(pycbc_Bucket *self, PyObject **flagsobj) -{ +handle_append_flags(pycbc_Bucket *self, PyObject **flagsobj) { unsigned long val = 0; if (*flagsobj == NULL || *flagsobj == Py_None) { @@ -221,7 +218,7 @@ handle_append_flags(pycbc_Bucket *self, PyObject **flagsobj) } val = pycbc_IntAsUL(*flagsobj); - if (val == (unsigned long)-1) { + if (val == (unsigned long) -1) { PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ARGUMENTS, 0, "invalid flags", *flagsobj); return -1; } @@ -232,16 +229,18 @@ handle_append_flags(pycbc_Bucket *self, PyObject **flagsobj) return 0; } - PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ARGUMENTS, 0, "Only FMT_BYTES and FMT_UTF8 are supported for append/prepend", *flagsobj); + PYCBC_EXC_WRAP_OBJ(PYCBC_EXC_ARGUMENTS, 0, "Only FMT_BYTES and FMT_UTF8 are supported for append/prepend", + *flagsobj); return -1; } -static PyObject * -set_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, - int operation, int argopts) -{ +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, + static, PyObject *, +set_common, pycbc_Bucket *self, PyObject *args, PyObject *kwargs, + int operation, int argopts) { int rv; + Py_ssize_t ncmds = 0; PyObject *ttl_O = NULL; PyObject *dict = NULL; @@ -328,29 +327,31 @@ set_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, } if (argopts & PYCBC_ARGOPT_MULTI) { - rv = pycbc_oputil_iter_multi(self, seqtype, dict, &cv, 0, handle_single_kv, &scv); + rv = PYCBC_OPUTIL_ITER_MULTI(self, seqtype, dict, &cv, 0, handle_single_kv, &scv, context); } else { - rv = handle_single_kv(self, &cv, 0, key, value, NULL, NULL, &scv); + rv = handle_single_kv(self, &cv, 0, key, value, NULL, NULL, &scv, context); } if (rv < 0) { goto GT_DONE; } - if (-1 == pycbc_common_vars_wait(&cv, self)) { + if (-1 == PYCBC_TRACE_WRAP(pycbc_common_vars_wait, kwargs, &cv, self)) { goto GT_DONE; } -GT_DONE: + GT_DONE: pycbc_common_vars_finalize(&cv, self); return cv.ret; } #define DECLFUNC(name, operation, mode) \ PyObject *pycbc_Bucket_##name(pycbc_Bucket *self, \ - PyObject *args, PyObject *kwargs) { \ - return set_common(self, args, kwargs, operation, mode); \ + PyObject *args, PyObject *kwargs) {\ + PyObject* result;\ + PYCBC_TRACE_WRAP_TOPLEVEL(result, LCBTRACE_OP_REQUEST_ENCODING, set_common, self->tracer, self, args, kwargs, operation, mode);\ + return result;\ } DECLFUNC(upsert_multi, LCB_SET, PYCBC_ARGOPT_MULTI) diff --git a/src/views.c b/src/views.c index 83127ec6c..9db17ad64 100644 --- a/src/views.c +++ b/src/views.c @@ -320,7 +320,7 @@ ViewResult_fetch(pycbc_ViewResult *self, PyObject *args) } if (!self->base.done) { - pycbc_oputil_wait_common(bucket); + pycbc_oputil_wait_common(bucket, PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(NULL, LCBTRACE_OP_RESPONSE_DECODING, self->py_tracer, "viewresult.fetch")); } if (pycbc_multiresult_maybe_raise(mres)) { @@ -334,11 +334,33 @@ ViewResult_fetch(pycbc_ViewResult *self, PyObject *args) pycbc_oputil_conn_unlock(bucket); return ret; } +static int +ViewResult__init__(PyObject *self_raw, + PyObject *args, PyObject *kwargs) +{ + pycbc_ViewResult* self = (pycbc_ViewResult*)(self_raw); +#ifdef PYCBC_TRACING + PYCBC_DEBUG_LOG("in ur view making ur tracer\n"); + self->py_tracer = kwargs?(pycbc_Tracer_t*)PyDict_GetItemString(kwargs, "tracer"):NULL; + self->own_tracer = 0; + if (self->py_tracer) { + PYCBC_DEBUG_LOG("Got parent tracer %p\n", self->py_tracer); + } +#endif + PYCBC_EXCEPTION_LOG_NOCLEAR; + return 0; +} static void ViewResult_dealloc(pycbc_ViewResult *vres) { Py_CLEAR(vres->rows); +#ifdef PYCBC_TRACING + if (vres->own_tracer && vres->py_tracer) { + lcbtrace_destroy(vres->py_tracer->tracer); + PYCBC_DECREF((PyObject*)vres->py_tracer); + } +#endif Py_TYPE(vres)->tp_base->tp_dealloc((PyObject*)vres); } @@ -366,6 +388,7 @@ static struct PyMethodDef ViewResult_TABLE_methods[] = { { NULL } }; + int pycbc_ViewResultType_init(PyObject **ptr) { @@ -378,6 +401,7 @@ pycbc_ViewResultType_init(PyObject **ptr) p->tp_name = "ViewResult"; p->tp_doc = PyDoc_STR("Low level view result object"); p->tp_new = PyType_GenericNew; + p->tp_init = ViewResult__init__; p->tp_dealloc = (destructor)ViewResult_dealloc; p->tp_basicsize = sizeof(pycbc_ViewResult); p->tp_members = ViewResult_TABLE_members; diff --git a/tests.ini.sample b/tests.ini.sample index 03cffb3a2..121346105 100644 --- a/tests.ini.sample +++ b/tests.ini.sample @@ -15,7 +15,7 @@ bucket_password = ; Set this to true if there is a real cluster available enabled = True - +tracing = off [mock] ; Set this to enabled to use the mock enabled = True From e25378918b3afd3df2522cf095fdb9613bfb89c6 Mon Sep 17 00:00:00 2001 From: Daniel Ancuta <whisller@gmail.com> Date: Sat, 3 Mar 2018 23:29:29 +0000 Subject: [PATCH 142/829] PYCBC-472: Evaluate/merge PR: "TypeError: _assign_kwargs() got an unexpected keyword argument 'distance'" As per title: At the moment when you try to use `GeoDistanceQuery` you will get: ```sh Traceback (most recent call last): File "test.py", line 208, in <module> for city in cities: File "test.py", line 103, in get_by_coordinates GeoDistanceQuery('1km', (lon, lat)), File "/my-path/venv/lib/python3.6/site-packages/couchbase/fulltext.py", line 697, in __init__ _assign_kwargs(self, **kwargs) TypeError: _assign_kwargs() got an unexpected keyword argument 'distance' ``` That fixes this problem. Change-Id: Ic1e2a0dedba9bfd53d749dba6507442f6abbaf52 Reviewed-on: http://review.couchbase.org/90858 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> --- couchbase/fulltext.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index ea85795be..92bae85a9 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -694,7 +694,7 @@ def __init__(self, distance, location, **kwargs): super(GeoDistanceQuery, self).__init__() kwargs['distance'] = distance kwargs['location'] = location - _assign_kwargs(self, **kwargs) + _assign_kwargs(self, kwargs) location = _genprop(_location_conv, 'location', doc='Location') distance = _genprop_str('distance') @@ -706,7 +706,7 @@ def __init__(self, top_left, bottom_right, **kwargs): super(GeoBoundingBoxQuery, self).__init__() kwargs['top_left'] = top_left kwargs['bottom_right'] = bottom_right - _assign_kwargs(self, **kwargs) + _assign_kwargs(self, kwargs) top_left = _genprop( _location_conv, 'top_left', From d4f59f29696dc4dd1871b5c22718c9df42b4ea98 Mon Sep 17 00:00:00 2001 From: Dave Starling <dave@seenit.io> Date: Thu, 12 Apr 2018 15:29:16 +0100 Subject: [PATCH 143/829] PYCBC-477: Update PrefixQuery to allow unicode characters Updated PrefixQuery to use _genprop_str so that unicode characters do not raise an unnecessary conversion exception. Change-Id: I22d7265ee36d35545fbe7bf1c6d5339ea980ba0d Reviewed-on: http://review.couchbase.org/92632 Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> Tested-by: Build Bot <build@couchbase.com> --- couchbase/fulltext.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/couchbase/fulltext.py b/couchbase/fulltext.py index 92bae85a9..794ababe6 100644 --- a/couchbase/fulltext.py +++ b/couchbase/fulltext.py @@ -668,7 +668,7 @@ class PrefixQuery(_SingleQuery): most useful for type-ahead or lookup queries. """ _TERMPROP = 'prefix' - prefix = _genprop(str, 'prefix', doc='The prefix to match') + prefix = _genprop_str('prefix', doc='The prefix to match') @_with_fields('field') From b0b7805c70029c8827f6ca5d4e587e0c21726d58 Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Thu, 26 Apr 2018 22:00:58 +0100 Subject: [PATCH 144/829] PYCBC-474: Add support for N1QL profile parameter Change-Id: I4092c3028b13fc1b42028fe8c9682a27b6824611 Reviewed-on: http://review.couchbase.org/93379 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/n1ql.py | 26 ++++++++++++++++++++++++++ couchbase/tests/cases/n1ql_t.py | 19 ++++++++++++++++++- 2 files changed, 44 insertions(+), 1 deletion(-) diff --git a/couchbase/n1ql.py b/couchbase/n1ql.py index a0d25627b..ccd9a187d 100644 --- a/couchbase/n1ql.py +++ b/couchbase/n1ql.py @@ -66,6 +66,11 @@ def n1ql_errcode(self): this is not guaranteed in future. """ +PROFILE_OFF = 'off' +PROFILE_PHASES = 'phases' +PROFILE_TIMINGS = 'timings' +VALID_PROFILES = [PROFILE_OFF, PROFILE_TIMINGS, PROFILE_PHASES] + class N1QLQuery(object): def __init__(self, query, *args, **kwargs): """ @@ -334,6 +339,27 @@ def readonly(self): def readonly(self, value): self._body['readonly'] = value + @property + def profile(self): + """ + Get the N1QL profile type. + :return: The profile type. + """ + value = self._body.get('profile', PROFILE_OFF) + return value + + @profile.setter + def profile(self, value): + """ + Sets the N1QL profile type. Must be one of: 'off', 'phases', 'timings' + :param value: The profile type to use. + :return: + """ + if value not in VALID_PROFILES: + raise TypeError('Profile option must be one of: ' + ', '.join(VALID_PROFILES)) + + self._body['profile'] = value + def __repr__(self): return ('<{cls} stmt={stmt} at {oid}>'.format( cls=self.__class__.__name__, diff --git a/couchbase/tests/cases/n1ql_t.py b/couchbase/tests/cases/n1ql_t.py index bc5e8cb4d..bba66a660 100644 --- a/couchbase/tests/cases/n1ql_t.py +++ b/couchbase/tests/cases/n1ql_t.py @@ -34,4 +34,21 @@ def test_meta(self): q = self.cb.n1ql_query('SELECT mockrow') self.assertRaises(RuntimeError, getattr, q, 'meta') q.execute() - self.assertIsInstance(q.meta, dict) \ No newline at end of file + self.assertIsInstance(q.meta, dict) + + def test_profile(self): + query = N1QLQuery('SELECT 1') + + # default should be 'off' + self.assertEqual('off', query.profile) + + # test setting each possible value + query.profile = 'phases' + self.assertEqual('phases', query.profile) + query.profile = 'timings' + self.assertEqual('timings', query.profile) + query.profile = 'off' + self.assertEqual('off', query.profile) + + # should raise error for unknown profile + self.assertRaises(TypeError, query.profile, 'unknown') From 7f9322386726ef142a2096c74fa5c9ecc5543aef Mon Sep 17 00:00:00 2001 From: Mike Goldsmith <goldsmith.mike@gmail.com> Date: Thu, 29 Mar 2018 17:09:28 +0100 Subject: [PATCH 145/829] PYCBC-468: Support for Field Level Encryption Motivation ---------- Customers may wish to use encrypt on sensitive fields in JSON documents. Changes ------- - Allow the registration and use of CryptoProviders against a given bucket. - Allow direct pass through of libcouchbase Crypto Providers via CTypesCryptoProvider. - Allow Python Crypto Providers via extension of CryptoProvider. Results ------- Encryption providers in both C and Python can be used to encrypt, sign, verify and decrypt fields in documents. Change-Id: I269a9aa9b30b5cad88b9b2c7b14bcce2ea1a9190 Reviewed-on: http://review.couchbase.org/92029 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- CMakeLists.txt | 297 ++++++++++++++++ couchbase/__init__.py | 2 +- couchbase/bucket.py | 67 ++++ couchbase/crypto.py | 203 +++++++++++ couchbase/tests/cases/crypto_t.py | 188 ++++++++++ setup.py | 3 +- src/bucket.c | 155 ++++++++ src/constants.c | 5 + src/crypto.c | 566 ++++++++++++++++++++++++++++++ src/ext.c | 1 + src/pycbc.h | 57 +++ 11 files changed, 1542 insertions(+), 2 deletions(-) create mode 100644 CMakeLists.txt create mode 100644 couchbase/crypto.py create mode 100644 couchbase/tests/cases/crypto_t.py create mode 100644 src/crypto.c diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 000000000..52505fe2f --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,297 @@ +cmake_minimum_required(VERSION 3.8) +project(couchbase_python_client_2_3_1) +add_definitions("-DPYCBC_TRACING_ENABLE -DPYCBC_DEBUG") +set(LCB_ROOT ../libcouchbase) +set(LCB_ROOT_SRC ${LCB_ROOT}/src) +set(LCB_ROOT_CONTRIB ${LCB_ROOT}/contrib) + +set(PYTHON_INCLUDE_DIR /Users/ellis_breen/root/virtualenvs/3.6/default/include/python3.6m/) +set(PYTHON_INCLUDE ${PYTHON_INCLUDE_DIR}) +set(LCB_IO ${LCB_ROOT}/src/lcbio) +set(OT_ROOT thirdparty/opentracing-cpp) +set(LCB_CJSON ${LCB_ROOT}/contrib/cJSON) +set(TP_CJSON thirdparty/cJSON) +set(LCB_TRACING_DIR ${LCB_ROOT}/src/tracing) +aux_source_directory(${LCB_ROOT}/src LCB_CORE) +aux_source_directory(${LCB_ROOT}/src/http HTTP) +aux_source_directory(${LCB_ROOT}/include/memcached/ MCD_INC) +aux_source_directory(${LCB_ROOT}/include/libcouchbase/ LCB_INC) +aux_source_directory(${LCB_ROOT}/contrib/lcb-jsoncpp LCB_JSON) +aux_source_directory(${LCB_CJSON} LCB_CJSON_SRC ) +aux_source_directory(${LCB_ROOT}/src/ssl LCB_SSL) +aux_source_directory(${LCB_ROOT}/src/mcserver LCB_MCSERVER) +aux_source_directory(${LCB_IO} LCB_IO_A) +aux_source_directory(${LCB_ROOT_SRC}/sasl LCB_SASL) +aux_source_directory(${LCB_ROOT_SRC}/tracing LCB_TRACING_DIR) + +aux_source_directory(LCB_VBUCKET ${LCB_ROOT}/src/vbucket) +aux_source_directory(LCB_SASL ${LCB_ROOT}/contrib/cbsasl) +aux_source_directory(LCB_CLI ${LCB_ROOT}/contrib/cliopts) +aux_source_directory(LCB_ROOT_A ${LCB_ROOT_SRC}) + +include(ExternalProject) +add_definitions(-DLCB_TRACING=TRUE + -DPYCBC_TRACING_ENABLE=TRUE + -DPYCBC_DEBUG=TRUE) + +ExternalProject_Add (cJSON +PREFIX ${OT_ROOT} +GIT_REPOSITORY "https://github.com/DaveGamble/cJSON.git" + +BINARY_DIR ${TP_CJSON} +SOURCE_DIR ${TP_CJSON} +INSTALL_DIR ${TP_CJSON}/build +) + +ExternalProject_Add (opentracing-cpp + PREFIX ${OT_ROOT} + GIT_REPOSITORY "https://github.com/griels/opentracing-cpp.git" + + GIT_TAG "07779476cd362629fabce02a6a952d0fc478b5c1" + BINARY_DIR ${OT_ROOT} + SOURCE_DIR ${OT_ROOT} + INSTALL_DIR ${OT_ROOT}/build + ) + +include(FindPythonLibs) + +include_directories( ${LCB_ROOT_SRC} + ${LCB_ROOT}/include + ${MCD_INC} + ${HTTP} + ${PYTHON_INCLUDE_DIR} + ${LCB_JSON} + ${LCB_SSL} + ${LCB_MCSERVER} + ${LCB_IO} + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + ${LCB_ROOT_SRC} + ${LCB_SASL} + ${LCB_SASL}/src + ${LCB_ROOT_CONTRIB} + ${LCB_TRACING_DIR} + ${LCB_CJSON} + ${PYTHON_INCLUDE_DIRS} + ) + +ExternalProject_Add(libcouchbase + DOWNLOAD_COMMAND "" + SOURCE_DIR ${LCB_ROOT} + ) +set(CMAKE_CXX_STANDARD 11) +set(EXTRA_SOURCE_DIRS + ${LCB_ROOT}/src + ${LCB_ROOT}/include/memcached + ${LCB_ROOT}/include/libcouchbase + ${LCB_ROOT}/src/http + ${LCB_ROOT}/src/ssl + + ${LCB_MCSERVER} + ${LCB_IO} + + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + ${LCB_ROOT_SRC} + ${LCB_ROOT_A} + ) + +set(SOURCE + ${SOURCE} + ${LCB_CORE} + ${LCB_INC} + ${MCD_INC} + ${HTTP} + ${LCB_SSL} + ${LCB_ROOT}/src/ssl + + ${LCB_MCSERVER} + ${LCB_IO} + + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + ${LCB_ROOT_SRC} + ) +link_directories(${LCB_ROOT}/lib) +link_libraries(${LCB_ROOT}/../lib/ + ) +add_executable(couchbase_python_client_2_3_1 + ${OT_ROOT}/src + ${LCB_CORE} + ${LCB_INC} + ${MCD_INC} + ${LCB_JSON} + ${LCB_SSL} + ${LCB_CJSON_SRC} + ${LCB_VBUCKET} + ${LCB_SASL} + ${LCB_CLI} + acouchbase/tests/asyncio_tests.py + acouchbase/tests/fixtures.py + acouchbase/tests/py34only.py + acouchbase/tests/py35only.py + acouchbase/__init__.py + acouchbase/asyncio_iops.py + acouchbase/bucket.py + acouchbase/py34only/iterator.py + couchbase/async/__init__.py + couchbase/async/bucket.py + couchbase/async/events.py + couchbase/async/n1ql.py + couchbase/async/rowsbase.py + couchbase/async/view.py + couchbase/iops/__init__.py + couchbase/iops/base.py + couchbase/iops/select.py + couchbase/tests/admin/__init__.py + couchbase/tests/cases/__init__.py + couchbase/tests/cases/admin_t.py + couchbase/tests/cases/append_t.py + couchbase/tests/cases/arithmetic_t.py + couchbase/tests/cases/badargs_t.py + couchbase/tests/cases/cbftstrings_t.py + couchbase/tests/cases/cluster_t.py + couchbase/tests/cases/connection_t.py + couchbase/tests/cases/connstr_t.py + couchbase/tests/cases/datastructures_t.py + couchbase/tests/cases/delete_t.py + couchbase/tests/cases/design_t.py + couchbase/tests/cases/dupkeys_t.py + couchbase/tests/cases/empty_key_t.py + couchbase/tests/cases/encodings_t.py + couchbase/tests/cases/endure_t.py + couchbase/tests/cases/enh_err_t.py + couchbase/tests/cases/excextra_t.py + couchbase/tests/cases/flush_t.py + couchbase/tests/cases/format_t.py + couchbase/tests/cases/get_t.py + couchbase/tests/cases/diag_t.py + couchbase/tests/cases/iops_t.py + couchbase/tests/cases/itertypes_t.py + couchbase/tests/cases/itmops_t.py + couchbase/tests/cases/ixmgmt_t.py + couchbase/tests/cases/lock_t.py + couchbase/tests/cases/lockmode_t.py + couchbase/tests/cases/misc_t.py + couchbase/tests/cases/mutationtokens_t.py + couchbase/tests/cases/n1ql_t.py + couchbase/tests/cases/n1qlstrings_t.py + couchbase/tests/cases/observe_t.py + couchbase/tests/cases/pipeline_t.py + couchbase/tests/cases/results_t.py + couchbase/tests/cases/rget_t.py + couchbase/tests/cases/set_converters_t.py + couchbase/tests/cases/set_t.py + couchbase/tests/cases/spatial_t.py + couchbase/tests/cases/stats_t.py + couchbase/tests/cases/subdoc_t.py + couchbase/tests/cases/touch_t.py + couchbase/tests/cases/transcoder_t.py + couchbase/tests/cases/verinfo_t.py + couchbase/tests/cases/view_iterator_t.py + couchbase/tests/cases/view_t.py + couchbase/tests/cases/viewstrings_t.py + couchbase/tests/cases/xattr_t.py + couchbase/tests/__init__.py + couchbase/tests/base.py + couchbase/tests/importer.py + couchbase/tests/test_sync.py + couchbase/views/__init__.py + couchbase/views/iterator.py + couchbase/views/params.py + couchbase/__init__.py + couchbase/_bootstrap.py + couchbase/_ixmgmt.py + couchbase/_logutil.py + couchbase/_pyport.py + couchbase/_version.py + couchbase/admin.py + couchbase/auth_domain.py + couchbase/bucket.py + couchbase/bucketmanager.py + couchbase/cbas.py + couchbase/cluster.py + couchbase/connection.py + couchbase/connstr.py + couchbase/exceptions.py + couchbase/experimental.py + couchbase/fulltext.py + couchbase/items.py + couchbase/mockserver.py + couchbase/mutation_state.py + couchbase/n1ql.py + couchbase/priv_constants.py + couchbase/result.py + couchbase/subdocument.py + couchbase/transcoder.py + couchbase/user_constants.py + examples/abench.py + examples/basic.py + examples/bench.py + examples/connection-pool.py + examples/docloader.py + examples/gbench.py + examples/iops_demo.py + examples/item.py + examples/reversed_keys.py + examples/search_keywords.py + examples/twist-sample.py + examples/txbasic.py + examples/txbench.py + examples/txview.py + gcouchbase/tests/__init__.py + gcouchbase/tests/test_api.py + gcouchbase/tests/test_gevent.py + gcouchbase/__init__.py + gcouchbase/bucket.py + gcouchbase/connection.py + gcouchbase/iops_gevent0x.py + gcouchbase/iops_gevent10.py + src/bucket.c + src/callbacks.c + src/cntl.c + src/connevents.c + src/constants.c + src/convert.c + src/crypto.c + src/counter.c + src/ctranscoder.c + src/exceptions.c + src/ext.c + src/fts.c + src/get.c + src/htresult.c + src/http.c + src/iops.c + src/iops.h + src/ixmgmt.c + src/miscops.c + src/mresdict.h + src/multiresult.c + src/n1ql.c + src/observe.c + src/opresult.c + src/oputil.c + src/oputil.h + src/pipeline.c + src/pycbc.h + src/result.c + src/store.c + src/typeutil.c + src/views.c + txcouchbase/tests/__init__.py + txcouchbase/tests/base.py + txcouchbase/tests/test_n1ql.py + txcouchbase/tests/test_ops.py + txcouchbase/tests/test_txconn.py + txcouchbase/tests/test_views.py + txcouchbase/__init__.py + txcouchbase/bucket.py + txcouchbase/connection.py + txcouchbase/iops.py + couchbase_version.py + setup.py) + diff --git a/couchbase/__init__.py b/couchbase/__init__.py index 06d7aee7d..ee333eaae 100644 --- a/couchbase/__init__.py +++ b/couchbase/__init__.py @@ -27,7 +27,7 @@ from couchbase.user_constants import * import couchbase._libcouchbase as _LCB - +from couchbase.crypto import * try: from couchbase._version import __version__ diff --git a/couchbase/bucket.py b/couchbase/bucket.py index e03b1e0d0..20c297ae4 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -2239,3 +2239,70 @@ def get_attribute(self, key, attrname): def set_attribute(self, key, attrname): pass + + def register_crypto_provider(self, name, provider): + """ + Registers the crypto provider used to encrypt and decrypt document fields. + :param name: The name of the provider. + :param provider: The provider implementation. // reference LCB type? + """ + _Base.register_crypto_provider(self, name, provider) + + def unregister_crypto_provider(self, name): + """ + Unregisters the crypto provider used to encrypt and decrypt document fields. + :param name: The name of the provider. + """ + _Base.unregister_crypto_provider(self, name) + + def encrypt_fields(self, document, fieldspec, prefix): + """ + Encrypt a document using the registered encryption providers. + :param document: The document body. + :param fieldspec: A list of field specifications, each of which is + a dictionary as follows: + { + 'alg' : registered algorithm name, + 'kid' : key id to use to encrypt with, + 'name' : field name + } + :param prefix: Prefix for encrypted field names. Default is None. + :return: Encrypted document. + """ + json_encoded = json.dumps(document) + encrypted_string = _Base.encrypt_fields(self, json_encoded, fieldspec, prefix) + if not encrypted_string: + raise couchbase.exceptions.CouchbaseError("Encryption failed") + return json.loads(encrypted_string) + + def decrypt_fields_real(self, document, *args): + json_decoded = json.dumps(document) + decrypted_string = _Base.decrypt_fields(self, json_decoded, *args) + if not decrypted_string: + raise couchbase.exceptions.CouchbaseError("Decryption failed") + return json.loads(decrypted_string) + + if _LCB.PYCBC_CRYPTO_VERSION<1: + def decrypt_fields(self, document, prefix): + """ + Decrypts a document using the registered encryption providers. + :param document: The document body. + :param prefix: Prefix for encrypted field names. Default is None. + :return: + """ + return self.decrypt_fields_real(document, prefix) + else: + def decrypt_fields(self, document, fieldspec, prefix): + """ + Decrypts a document using the registered encryption providers. + :param document: The document body. + :param fieldspec: A list of field specifications, each of which is + a dictionary as follows: + { + 'alg' : registered algorithm name, + 'name' : field name + } + :param prefix: Prefix for encrypted field names. Default is None. + :return: + """ + return self.decrypt_fields_real(document, fieldspec, prefix) diff --git a/couchbase/crypto.py b/couchbase/crypto.py new file mode 100644 index 000000000..8dda4af59 --- /dev/null +++ b/couchbase/crypto.py @@ -0,0 +1,203 @@ +# +# Copyright 2018, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import platform + +from couchbase._libcouchbase import CryptoProvider +import ctypes.util +import ctypes +import logging +import os.path +import abc +import couchbase.exceptions +import couchbase._bootstrap +import couchbase._libcouchbase as _LCB + + +class InMemoryKeyStore(object): + + def __init__(self): + self.keys = {} + + def get_key(self, keyName): + return self.keys.get(keyName) or None + + def set_key(self, keyName, key): + self.keys[keyName] = key + + +class PythonCryptoProvider_V0(CryptoProvider): + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def load_key(self, type, keyid): + """ + Load a decryption/encryption key, as selected by the type + :param type: LCBCRYPTO_KEY_ENCRYPT or LCBCRYPTO_KEY_DECRYPT + :param keyid: Key ID to retrieve + """ + pass + + @abc.abstractmethod + def generate_iv(self): + """ + Return an IV for use with decryption/encryption. + """ + pass + + @abc.abstractmethod + def sign(self, inputs): + """ + Sign the inputs provided. + :param inputs: List of strings to sign against + :param signature: Signature + """ + pass + + @abc.abstractmethod + def verify_signature(self, inputs, signature): + """ + Verify the inputs provided against the signature given. + :param inputs: List of strings to verify signature against + :param signature: Signature + """ + pass + + @abc.abstractmethod + def encrypt(self, input, key, iv): + """ + Encrypt the input string using the key and iv + :param input: input string + :param key: actual encryption key + :param iv: iv for encryption + """ + pass + + @abc.abstractmethod + def decrypt(self, input, key, iv) : + """ + Encrypt the input string using the key and iv + :param input: input string + :param key: actual decryption key + :param iv: iv for decryption + """ + pass + + +class PythonCryptoProvider_V1(CryptoProvider): + __metaclass__ = abc.ABCMeta + + @abc.abstractmethod + def generate_iv(self): + """ + Return an IV for use with decryption/encryption. + """ + pass + + @abc.abstractmethod + def sign(self, inputs): + """ + Sign the inputs provided. + :param inputs: List of strings to sign against + :param signature: Signature + """ + pass + + @abc.abstractmethod + def verify_signature(self, inputs, signature): + """ + Verify the inputs provided against the signature given. + :param inputs: List of strings to verify signature against + :param signature: Signature + """ + pass + + + @abc.abstractmethod + def encrypt(self, input, iv): + """ + Encrypt the input string using the iv + :param input: input string + :param iv: iv for encryption + """ + pass + + @abc.abstractmethod + def decrypt(self, input, iv) : + """ + Decrypt the input string using the iv + :param input: input string + :param iv: iv for decryption + """ + pass + + +if _LCB.PYCBC_CRYPTO_VERSION == 0: + PythonCryptoProvider=PythonCryptoProvider_V0 +else: + PythonCryptoProvider=PythonCryptoProvider_V1 + + +class CTypesCryptoProvider(CryptoProvider): + postfix = {"Linux": "so", "Darwin": "dylib", "Windows": "dll"}[platform.system()] + + def key_loader(self, direction, key_id): + key = self.keystore.get_key(key_id) + logging.error("loading key from {}: {}".format(key_id, key)) + return key + + def __init__(self, providername, providerfunc, initfuncs=[], providerpaths=[], keystore=None, **kwargs): + """ + Initialise CTypesCryptoProvider + :param providername: library name as used by ctypes.util.find_library + :param providerfunc: name of function which returns C Provider + :param initfuncs: list of any initialisation functions required, in order + :param providerpaths: list of functions that take providername as argument and return a path to try + loading the library from + :param keystore: optional keystore + """ + self.keystore = keystore + providerpaths.append(ctypes.util.find_library) + crypto_dll = None + for library in providerpaths: + logging.debug("Trying " + str(library)) + libname = os.path.abspath(library(providername)) + logging.debug("Got (providername)=" + libname) + try: + crypto_dll = ctypes.cdll.LoadLibrary(libname) + break + except Exception as e: + logging.error(str(e)) + raise couchbase.exceptions.NotFoundError("Couldn't load provider shared library") + if crypto_dll: + logging.debug("Crypto provider DLL=[" + repr(crypto_dll)) + try: + for func in initfuncs: + getattr(crypto_dll, func)() + provfunc = getattr(crypto_dll, providerfunc) + logging.debug("Crypto provider Func=" + str(provfunc)) + provfunc.restype = ctypes.c_void_p + provider = provfunc() + logging.debug("Crypto provider=" + format(provider, '02x')) + except Exception as e: + logging.debug("Problem executing Crypto provider: " + str(e)) + raise couchbase.exceptions.NotFoundError("Couldn't initialise crypto provider shared library") + + super(CTypesCryptoProvider, self).__init__(provider=provider) + if self.keystore: + setattr(self, 'load_key', self.key_loader) + else: + raise couchbase.exceptions.NotFoundError("Couldn't find provider shared library") diff --git a/couchbase/tests/cases/crypto_t.py b/couchbase/tests/cases/crypto_t.py new file mode 100644 index 000000000..f9ecb973c --- /dev/null +++ b/couchbase/tests/cases/crypto_t.py @@ -0,0 +1,188 @@ +# +# Copyright 2018, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from unittest import SkipTest + +from couchbase.tests.base import ConnectionTestCase +import codecs +import couchbase.exceptions +from couchbase.crypto import CTypesCryptoProvider, InMemoryKeyStore, PythonCryptoProvider +import os +import logging +import couchbase._libcouchbase as _LCB + + +class AESCryptoProvider(CTypesCryptoProvider): + @staticmethod + def example_provider(x): + return os.path.join(os.getcwd(), "..", "libcouchbase", "example", "crypto", + x + "." + CTypesCryptoProvider.postfix) + + def __init__(self, keystore): + super(AESCryptoProvider, self).__init__("openssl_symmetric_provider", "osp_create", + initfuncs=['osp_initialize'], + providerpaths=[AESCryptoProvider.example_provider], + keystore=keystore) + + +class ROT13PythonCryptoProvider(PythonCryptoProvider): + + def __init__(self, keystore): + super(ROT13PythonCryptoProvider,self).__init__() + self.keystore = keystore + + def load_key(self, type, keyid): + return self.keystore.get_key(keyid) + + def generate_iv(self): + return "wibble" + + def sign(self, inputs): + return "gronk" + + def verify_signature(self, inputs, signature): + return signature == b"gronk" + + def encrypt_real(self, input, iv): + if codecs.decode(input,'utf-8').endswith('\n'): + raise couchbase.exceptions.InternalError("passing back string containing newline") + logging.debug("encrypting with input={} iv={}".format(repr(input),repr(iv))) + encoded = codecs.encode(codecs.decode(input,'utf-8'), 'rot_13') + return encoded + + def decrypt_real(self, input, iv): + encoded = codecs.encode(codecs.decode(input,'utf-8'), 'rot_13') + return encoded + + if _LCB.PYCBC_CRYPTO_VERSION<1: + def encrypt(self, input, key, iv): + return self.encrypt_real(input, iv) + + def decrypt(self, input, key, iv): + return self.decrypt_real(input, iv) + + else: + def encrypt(self, input, iv): + return self.encrypt_real(input, iv) + + def decrypt(self, input, iv): + return self.decrypt_real(input, iv) + + + + +class FieldEncryptionTests(ConnectionTestCase): + + def test_keystore_returns_correct_value(self): + keystore = InMemoryKeyStore() + keystore.set_key('key', 'my-secret') + key = keystore.get_key('key') + self.assertEqual('my-secret', key) + + def test_aes_c_encryption(self): + raise SkipTest("C Crypto module not found, skipping") + # create key store & encryption provider + keystore = InMemoryKeyStore() + keystore.set_key('key', 'my-secret') + try: + provider = AESCryptoProvider(keystore=keystore) + except couchbase.exceptions.NotFoundError: + raise SkipTest("C Crypto module not found, skipping") + + document = {'sensitive': 'secret'} + # register encryption provider with LCB + self.cb.register_crypto_provider('aes256', provider) + + # encrypt document + + document = self.cb.encrypt_fields(document, [{'alg': 'aes256', 'name': 'sensitive', 'kid': 'key'}], "crypto_") + self.assertEqual(document, {'crypto_sensitive': {'alg': 'aes256', 'ciphertext': 'LYOFcKPUcQiFhbyYVShvrg==', + 'iv': 'ZedmvjWy0lIrLn6OmQmNqQ==', 'kid': 'key', + 'sig': 'zUJOrVxGlyNXrOhAM+PAvDQ3frXFpvEyHZuQLw9ym9U='}}) + # write document to cluster + key = self.gen_key('crypto-test') + self.cb.upsert(key, document) + + # # read document (unencrypted) + rv = self.cb.get(key) + decrypted_document = self.cb.decrypt_fields(rv.value, "crypto_") + # verify encrypted field can be read + self.assertEqual(decrypted_document, {'sensitive': 'secret'}) + + # remove encryption provider + self.cb.unregister_crypto_provider('aes256') + + # read document (encrypted) + rv = self.cb.get(key) + if "sensitive" in rv.value.keys(): + self.assertNotEqual(rv.value["sensitive"], "secret") + elif "crypto_sensitive" in rv.value.keys(): + self.assertNotEqual(rv.value["crypto_sensitive"], "secret") + + def test_pure_python_encryption(self): + # create key store & encryption provider + fieldspec = {'alg': 'aes256', 'name': 'sensitive'} + if _LCB.PYCBC_CRYPTO_VERSION<1: + fieldspec['kid'] = 'key' + + keystore = InMemoryKeyStore() + keystore.set_key('key', 'my-secret') + provider = ROT13PythonCryptoProvider(keystore) + document = {'sensitive': 'secret'} + # register encryption provider with LCB + self.cb.register_crypto_provider('aes256', provider) + # encrypt document + document = self.cb.encrypt_fields(document, [fieldspec], "crypto_") + expected = {'ciphertext': 'ImZycGVyZyI=', 'iv': 'd2liYmxl', 'sig': 'Z3Jvbms='} + orig_fields = {'alg': 'aes256', 'kid': 'key'} + expected_orig = {k:orig_fields[k] for k in orig_fields if k in fieldspec} + expected.update(expected_orig) + + self.assertEqual(document, {'crypto_sensitive': expected}) + # write document to cluster + key = self.gen_key('crypto-test') + self.cb.upsert(key, document) + + # # read document (unencrypted) + rv = self.cb.get(key) + decrypt_args = [] + if _LCB.PYCBC_CRYPTO_VERSION>0: + decrypt_args.append([fieldspec]) + decrypt_args.append("crypto_") + decrypted_document = self.cb.decrypt_fields(rv.value, *decrypt_args) + # verify encrypted field can be read + self.assertEqual(decrypted_document, {'sensitive': 'secret'}) + + # remove encryption provider + self.cb.unregister_crypto_provider('aes256') + + # read document (encrypted) + rv = self.cb.get(key) + if "sensitive" in rv.value.keys(): + self.assertNotEqual(rv.value["sensitive"], "secret") + elif "crypto_sensitive" in rv.value.keys(): + self.assertNotEqual(rv.value["crypto_sensitive"], "secret") + + self.cb.register_crypto_provider('aes256', provider) + + emptystring = {'sensitive': ''} + document = self.cb.encrypt_fields(emptystring, [fieldspec], "crypto_") + self.assertEqual(self.cb.decrypt_fields(document, *decrypt_args), emptystring) + + newlineonly = {'sensitive': '\n'} + document = self.cb.encrypt_fields(newlineonly, [fieldspec], "crypto_") + self.assertEqual(self.cb.decrypt_fields(document, *decrypt_args), newlineonly) + diff --git a/setup.py b/setup.py index 6792a4ea0..1bbbde88d 100644 --- a/setup.py +++ b/setup.py @@ -67,7 +67,7 @@ extoptions['libraries'] = ['libcouchbase'] ## Enable these lines for debug builds if os.environ.get('PYCBC_DEBUG'): - extoptions['extra_compile_args'] += ['/Zi'] + extoptions['extra_compile_args'] += ['/Zi','/DEBUG'] extoptions['extra_link_args'] += ['/DEBUG'] extoptions['library_dirs'] = [os.path.join(lcb_root, 'lib')] extoptions['include_dirs'] = [os.path.join(lcb_root, 'include')] @@ -95,6 +95,7 @@ 'http', 'htresult', 'ctranscoder', + 'crypto', 'observe', 'iops', 'connevents', diff --git a/src/bucket.c b/src/bucket.c index 0b9d64b29..00d84a99a 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -103,6 +103,141 @@ Bucket_get_transcoder(pycbc_Bucket *self, void *unused) return Py_None; } +static PyObject* +Bucket_register_crypto_provider(pycbc_Bucket *self, PyObject *args) { + char *name = NULL; + pycbc_CryptoProvider *provider = NULL; + + if (!PyArg_ParseTuple(args, "sO", &name, &provider)) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + + if (!provider || !provider->provider) + { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, LCB_EINVAL, "Invalid provider"); + return NULL; + } + + lcbcrypto_register(self->instance, name, provider->provider); + + return Py_None; +} + +static PyObject* +Bucket_unregister_crypto_provider(pycbc_Bucket *self, PyObject *args) +{ + char *name = NULL; + + if (!PyArg_ParseTuple(args, "s", &name)) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + lcbcrypto_unregister(self->instance, name); + + return Py_None; +} +#if PY_MAJOR_VERSION<3 +const char* pycbc_cstrn(PyObject* object, Py_ssize_t *length) +{ + const char* buffer = NULL; + PyString_AsStringAndSize(object, &buffer, length); + return buffer; +} +#endif + +const char *pycbc_dict_cstr(PyObject *dp, char *key) { + PyObject *item = PyDict_GetItemString(dp, key); + const char* result = ""; + if (item && PyObject_IsTrue(item)) { + result=PYCBC_CSTR(item); + } + return result; +} + +size_t pycbc_populate_fieldspec(lcbcrypto_FIELDSPEC **fields, PyObject *fieldspec) { + size_t i = 0; + size_t size = (size_t) PyList_Size(fieldspec); + *fields = calloc(size, sizeof(lcbcrypto_FIELDSPEC)); + for (i = 0; i < size ; ++i) { + PyObject *dict = PyList_GetItem(fieldspec, i); + (*fields)[i].alg = pycbc_dict_cstr(dict, "alg"); + (*fields)[i].kid = (PYCBC_CRYPTO_VERSION < 1) ? pycbc_dict_cstr(dict, "kid") : NULL; + (*fields)[i].name = pycbc_dict_cstr(dict, "name");; + } + return size; +} + +static PyObject * +Bucket_encrypt_fields(pycbc_Bucket *self, PyObject *args) +{ + lcbcrypto_CMDENCRYPT cmd = {0}; + PyObject* fieldspec=NULL; + int res =0; + if (!PyArg_ParseTuple(args, "s#Os", &cmd.doc, &cmd.ndoc, &fieldspec, &(cmd.prefix))) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + + ; + if (!PyErr_Occurred()) { + cmd.nfields = pycbc_populate_fieldspec(&cmd.fields, fieldspec); +#if PYCBC_CRYPTO_VERSION > 0 + res = lcbcrypto_encrypt_fields(self->instance, &cmd); +#else + res = lcbcrypto_encrypt_document(self->instance, &cmd); +#endif + + } + if (PyErr_Occurred()) { + return NULL; + } + if (!res) { + return pycbc_SimpleStringN(cmd.out, cmd.nout); + } + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, LCB_GENERIC_TMPERR, "Internal error while encrypting"); + return NULL; +} + + +static PyObject * +Bucket_decrypt_fields(pycbc_Bucket *self, PyObject *args) +{ + lcbcrypto_CMDDECRYPT cmd = { 0 }; + int res =0; + PyObject* fieldspec = NULL; + if (!PyArg_ParseTuple(args, "s#" +#if PYCBC_CRYPTO_VERSION >0 + "O" +#endif + "s", &cmd.doc, &cmd.ndoc, +#if PYCBC_CRYPTO_VERSION > 0 + &fieldspec, +#endif + &cmd.prefix)) { + PYCBC_EXCTHROW_ARGS(); + return NULL; + } + + if (!PyErr_Occurred()) { +#if PYCBC_CRYPTO_VERSION > 0 + cmd.nfields = pycbc_populate_fieldspec(&cmd.fields, fieldspec); + res = lcbcrypto_decrypt_fields(self->instance, &cmd); +#else + res = lcbcrypto_decrypt_document(self->instance, &cmd); +#endif + } + + if (PyErr_Occurred()) { + return NULL; + } + if (!res) { + return pycbc_SimpleStringN(cmd.out, cmd.nout); + } + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, LCB_GENERIC_TMPERR, "Internal error while decrypting"); + return NULL; +} + static PyObject * Bucket_server_nodes(pycbc_Bucket *self, void *unused) { @@ -618,6 +753,26 @@ static PyMethodDef Bucket_TABLE_methods[] = { METH_VARARGS, PyDoc_STR("Add additional user/pasword information") }, + { "register_crypto_provider", + (PyCFunction)Bucket_register_crypto_provider, + METH_VARARGS, + PyDoc_STR("Registers crypto provider used to encrypt/decrypt document fields") + }, + { "unregister_crypto_provider", + (PyCFunction)Bucket_unregister_crypto_provider, + METH_VARARGS, + PyDoc_STR("Unregisters crypto provider used to encrypt/decrypt document fields") + }, + { "encrypt_fields", + (PyCFunction)Bucket_encrypt_fields, + METH_VARARGS, + PyDoc_STR("Encrypts a set of fields using the registered providers") + }, + { "decrypt_fields", + (PyCFunction)Bucket_decrypt_fields, + METH_VARARGS, + PyDoc_STR("Decrypts a set of fields using the registered providers") + }, { NULL, NULL, 0, NULL } }; diff --git a/src/constants.c b/src/constants.c index 2652ed4cc..a78b7bc7b 100644 --- a/src/constants.c +++ b/src/constants.c @@ -211,6 +211,11 @@ do_all_constants(PyObject *module, pycbc_constant_handler handler) ADD_MACRO(LCB_BTYPE_EPHEMERAL); ADD_MACRO(LCB_BTYPE_MEMCACHED); + /* Encryption options */ + ADD_MACRO(LCBCRYPTO_KEY_ENCRYPT); + ADD_MACRO(LCBCRYPTO_KEY_DECRYPT); + LCB_CONSTANT(VERSION); + ADD_MACRO(PYCBC_CRYPTO_VERSION); PyModule_AddObject(module, "COMPRESSION", setup_compression_map(module, handler)); #ifdef LCB_N1XSPEC_F_DEFER diff --git a/src/crypto.c b/src/crypto.c new file mode 100644 index 000000000..a548f96fc --- /dev/null +++ b/src/crypto.c @@ -0,0 +1,566 @@ +/** + * Copyright 2018 Couchbase, Inc. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + **/ + +#include "pycbc.h" +#include <libcouchbase/crypto.h> +#include <string.h> + +static int +CryptoProvider__init(pycbc_CryptoProvider *self, + PyObject *args, + PyObject *kwargs); + +static void +CryptoProvider_dtor(pycbc_CryptoProvider *self); + +static PyTypeObject CryptoProviderType = { + PYCBC_POBJ_HEAD_INIT(NULL) + 0 +}; + +lcb_error_t pycbc_cstrndup(uint8_t **key, size_t *key_len, PyObject *result) { + uint8_t* data = NULL; + lcb_error_t lcb_result = LCB_EINTERNAL; +#if PY_MAJOR_VERSION>=3 + if (PyObject_IsInstance(result, (PyObject*)&PyBytes_Type)){ + lcb_result = (!PyBytes_AsStringAndSize(result, (char**)&data, (Py_ssize_t* )key_len))?LCB_SUCCESS:LCB_EINTERNAL; + } + else +#endif + { + data = (uint8_t*)PYCBC_CSTRN(result, key_len); + lcb_result = data?LCB_SUCCESS:LCB_EINTERNAL; + } + if (!lcb_result && data && *key_len){ + *key = (uint8_t *) calloc(1,*key_len); + memcpy(*key,data,*key_len);} + + return lcb_result; +} + +lcb_error_t pycbc_is_true(uint8_t *key, const size_t key_len, PyObject *result) { + return (result && PyObject_IsTrue(result) && !PyErr_Occurred())? + LCB_SUCCESS: + LCB_EINTERNAL; +} + +#define PYCBC_SIZE(X) (sizeof(X)/sizeof(char)) + +void pycbc_report_method_exception(lcb_error_t errflags, const char* fmt, ...) { + char buffer[1000]; + va_list args; + va_start(args,fmt); + vsnprintf(buffer, PYCBC_SIZE(buffer), fmt,args); + va_end(args); + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, errflags, buffer); +} + +static PyObject* pycbc_retrieve_method(lcbcrypto_PROVIDER* provider, const char* method_name){ + PyObject* method = PyObject_GetAttrString((PyObject *) provider->cookie, method_name); + if (!method || !PyObject_IsTrue(method)) { + pycbc_report_method_exception(LCB_GENERIC_TMPERR, "Method %s does not exist", method_name); + return NULL; + } + return method; +} +#define PYCBC_CRYPTO_GET_METHOD(PROVIDER,METHOD) pycbc_retrieve_method(PROVIDER,#METHOD) + +PyObject* pycbc_python_proxy(PyObject *method, PyObject *args, const char* method_name) { + PyObject* result = NULL; + pycbc_assert(method && PyObject_IsTrue(method)); + if (PyErr_Occurred() || !args) + { + return NULL; + }; + result = PyObject_CallObject(method, args); + if (!result || PyErr_Occurred()) { + PyErr_Print(); + pycbc_report_method_exception(LCB_EINTERNAL, "Problem calling method %s", + method_name); + result =NULL; + }; + + return result; +} +typedef struct pycbc_crypto_buf { + const uint8_t *data; + size_t len; +} pycbc_crypto_buf; + +typedef struct pycbc_crypto_buf_array { + pycbc_crypto_buf* buffers; + size_t len; +} pycbc_crypto_buf_array; + +#define PYCBC_GEN_LIST(TYPE)\ +PyObject *pycbc_gen_list_##TYPE(const TYPE* array, size_t len,\ + PyObject *(*converter)(const TYPE *))\ +{\ + PyObject* result = PyList_New(0);\ + size_t i;\ + for (i=0; i<len; ++i)\ + {\ + TYPE sigv = array[i];\ + PyObject *input = converter(&sigv);\ + PyList_Append(result, input);\ + Py_DECREF(input);\ + }\ + return result;\ +}\ + +PYCBC_GEN_LIST(lcbcrypto_SIGV); +PYCBC_GEN_LIST(uint8_t); + +PyObject *pycbc_convert_uint8_t(const pycbc_crypto_buf buf) { +#if PY_MAJOR_VERSION >= 3 + return PyBytes_FromStringAndSize((const char *) buf.data, buf.len); +#else + return PyString_FromStringAndSize(buf.data, buf.len); +#endif +} + +PyObject *pycbc_convert_lcbcrypto_SIGV(const lcbcrypto_SIGV *sigv) { + const pycbc_crypto_buf buf = {sigv->data, sigv->len}; + return pycbc_convert_uint8_t(buf); +} + +PyObject* pycbc_convert_lcbcrypto_KEYTYPE(const lcbcrypto_KEYTYPE type) +{ + return PyLong_FromLong(type); +} + +PyObject* pycbc_convert_char_p(const char* string) +{ + return pycbc_SimpleStringZ(string); +} + +pycbc_crypto_buf pycbc_gen_buf(const uint8_t *data, size_t len) +{ + pycbc_crypto_buf result; + result.data=data; + result.len = len; + return result; +} + +#define PYCBC_SIZED_ARRAY(TYPE,NAME) ,const TYPE* NAME, size_t NAME##_num +#define PYCBC_SIZED_ARRAY_FWD(TYPE,NAME) ,NAME, NAME##_num + +#define PYCBC_ARG_CSTRN(TYPE,NAME) , TYPE* NAME, size_t NAME##_len +#define PYCBC_ARG_CSTRN_FWD(TYPE,NAME) ,NAME##_cstrn +#define PYCBC_STORE_CSTRN(TYPE,NAME) \ + PyObject *NAME##_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(NAME,NAME##_len)); +#define PYCBC_ARG_CSTRN_FREE(TYPE,NAME) Py_DecRef(NAME##_cstrn); + +#define PYCBC_ARG_CSTRN_TRIM_NEWLINE(TYPE,NAME) , TYPE* NAME, size_t NAME##_len +#define PYCBC_ARG_CSTRN_TRIM_NEWLINE_FWD(TYPE,NAME) ,NAME##_cstrn +#define PYCBC_STORE_CSTRN_TRIM_NEWLINE(TYPE,NAME) \ + PyObject *NAME##_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(NAME,(NAME##_len>0)?(NAME##_len-1):0)); + +#define PYCBC_ARG_PASSTHRU(TYPE,NAME) ,TYPE NAME +#define PYCBC_DUMMY_ARG_ACTOR(TYPE,NAME) +#define PYCBC_ARG_FWD(TYPE,NAME) ,pycbc_convert_##TYPE(NAME) +#define PYCBC_ARG_NAME_AND_TYPE(TYPE,NAME) ,TYPE NAME +#define PYCBC_PARAM_NAME_AND_TYPE(TYPE,NAME) ,TYPE NAME + +#define PYCBC_PARAM_P_NAME_AND_TYPE(TYPE,NAME) ,const TYPE* NAME +#define PYCBC_PARAM_P_CONVERTED(TYPE,NAME) ,NAME##_converted +#define PYCBC_PARAM_P_STORE(TYPE,NAME) \ + PyObject *NAME##_converted = pycbc_convert_##TYPE##_p(NAME); +#define PYCBC_PARAM_P_DEREF(TYPE,NAME) Py_DecRef(NAME##_converted); + +#define PYCBC_STORE_LIST(TYPE,NAME) \ + PyObject *NAME##_list = pycbc_gen_list_##TYPE(NAME,NAME##_num,pycbc_convert_##TYPE); +#define PYCBC_DEREF_LIST(TYPE,NAME) Py_DecRef(NAME##_list); +#define PYCBC_GET_LIST(TYPE,NAME) ,NAME##_list + +#define PARAM_FMT(TYPE,NAME) "O" +#define PARAM_P_FMT(TYPE,NAME) "O" +#define PARAM_L_FMT(TYPE,NAME) "O" +#define PARAM_CSTRN_FMT(TYPE,NAME) "O" +#define PARAM_PASSTHRU_FMT(TYPE,NAME) + +#define PYCBC_LOAD_KEY_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU) PARAM(lcbcrypto_KEYTYPE,type)\ + PARAM_P(char,keyid)\ + PARAM_PASSTHRU(uint8_t**, subject) PARAM_PASSTHRU(size_t*, subject_len) +#define PYCBC_GENERATE_IV_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU)\ + PARAM_PASSTHRU(uint8_t**, subject) PARAM_PASSTHRU(size_t*, subject_len) +#define PYCBC_SIGN_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU) PARAM_L(lcbcrypto_SIGV,inputs)\ + PARAM_PASSTHRU(uint8_t**, subject) PARAM_PASSTHRU(size_t*, subject_len) +#define PYCBC_VER_SIGN_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU) PARAM_L(lcbcrypto_SIGV,inputs)\ + PARAM_CSTRN(uint8_t, subject) +#define PYCBC_V0_ENCRYPT_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU)\ + PARAM_CSTRN_TRIM_NEWLINE(const uint8_t,input) PARAM_CSTRN(const uint8_t, key) PARAM_CSTRN(const uint8_t,iv)\ + PARAM_PASSTHRU(uint8_t**, subject) PARAM_PASSTHRU(size_t*, subject_len) +#define PYCBC_V0_DECRYPT_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU)\ + PARAM_CSTRN(const uint8_t,input) PARAM_CSTRN(const uint8_t, key) PARAM_CSTRN(const uint8_t,iv)\ + PARAM_PASSTHRU(uint8_t**, subject) PARAM_PASSTHRU(size_t*, subject_len) + +#define PYCBC_V1_ENCRYPT_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU)\ + PARAM_CSTRN_TRIM_NEWLINE(const uint8_t,input) PARAM_CSTRN(const uint8_t,iv)\ + PARAM_PASSTHRU(uint8_t**, subject) PARAM_PASSTHRU(size_t*, subject_len) +#define PYCBC_V1_DECRYPT_TYPES(PARAM, PARAM_P, PARAM_L, PARAM_CSTRN, PARAM_CSTRN_TRIM_NEWLINE, PARAM_PASSTHRU)\ + PARAM_CSTRN(const uint8_t,input) PARAM_CSTRN(const uint8_t,iv)\ + PARAM_PASSTHRU(uint8_t**, subject) PARAM_PASSTHRU(size_t*, subject_len) + +#define PYCBC_X_COMMON_CRYPTO_METHODS(X)\ +X(generic,generate_iv, pycbc_cstrndup, PYCBC_GENERATE_IV_TYPES)\ +X(generic,sign, pycbc_cstrndup, PYCBC_SIGN_TYPES)\ +X(generic,verify_signature, pycbc_is_true, PYCBC_VER_SIGN_TYPES)\ + +#define PYCBC_X_V0_ONLY_CRYPTO_METHODS(X)\ +X(v0,load_key, pycbc_cstrndup, PYCBC_LOAD_KEY_TYPES)\ +X(v0,encrypt, pycbc_cstrndup, PYCBC_V0_ENCRYPT_TYPES)\ +X(v0,decrypt, pycbc_cstrndup, PYCBC_V0_DECRYPT_TYPES)\ + +#define PYCBC_X_V0_CRYPTO_METHODS(X)\ +PYCBC_X_V0_ONLY_CRYPTO_METHODS(X)\ +PYCBC_X_COMMON_CRYPTO_METHODS(X) + +#define PYCBC_X_V1_ONLY_CRYPTO_METHODS(X)\ +X(v1,encrypt, pycbc_cstrndup, PYCBC_V1_ENCRYPT_TYPES)\ +X(v1,decrypt, pycbc_cstrndup, PYCBC_V1_DECRYPT_TYPES)\ + +#define PYCBC_X_V1_CRYPTO_METHODS(X)\ +PYCBC_X_V1_ONLY_CRYPTO_METHODS(X)\ +PYCBC_X_COMMON_CRYPTO_METHODS(X) + +#define PYCBC_SIG_METHOD(VERSION, METHOD, PROCESSOR, X_ARGS)\ +static lcb_error_t \ +pycbc_crypto_##VERSION##_##METHOD(lcbcrypto_PROVIDER *provider \ + X_ARGS(PYCBC_ARG_NAME_AND_TYPE, PYCBC_PARAM_P_NAME_AND_TYPE, PYCBC_SIZED_ARRAY, PYCBC_ARG_CSTRN,\ + PYCBC_ARG_CSTRN_TRIM_NEWLINE, PYCBC_ARG_PASSTHRU)\ + ) {\ + lcb_error_t lcb_result = LCB_GENERIC_TMPERR;\ + PyObject* method=!PyErr_Occurred()?PYCBC_CRYPTO_GET_METHOD(provider, METHOD):NULL;\ + if (method){\ + X_ARGS(PYCBC_DUMMY_ARG_ACTOR, PYCBC_PARAM_P_STORE, PYCBC_STORE_LIST, PYCBC_STORE_CSTRN,\ + PYCBC_STORE_CSTRN_TRIM_NEWLINE, PYCBC_DUMMY_ARG_ACTOR)\ + const char* PYARGS_FMTSTRING="(" X_ARGS(PARAM_FMT, PARAM_P_FMT, PARAM_L_FMT, PARAM_CSTRN_FMT,\ + PARAM_CSTRN_FMT, PARAM_PASSTHRU_FMT) ")";\ + PyObject* args = Py_BuildValue(PYARGS_FMTSTRING \ + X_ARGS(PYCBC_ARG_FWD, PYCBC_PARAM_P_CONVERTED, PYCBC_GET_LIST, PYCBC_ARG_CSTRN_FWD,\ + PYCBC_ARG_CSTRN_TRIM_NEWLINE_FWD, PYCBC_DUMMY_ARG_ACTOR));\ + PyObject* result = pycbc_python_proxy(method, args, #METHOD);\ + if (result){\ + lcb_result = PROCESSOR(subject, subject_len, result);\ + }\ + Py_DecRef(result);\ + Py_DecRef(args);\ + X_ARGS(PYCBC_DUMMY_ARG_ACTOR, PYCBC_PARAM_P_DEREF, PYCBC_DEREF_LIST, PYCBC_ARG_CSTRN_FREE,\ + PYCBC_ARG_CSTRN_FREE, PYCBC_DUMMY_ARG_ACTOR)\ + }\ + return lcb_result;\ +} +#if PYCBC_CRYPTO_VERSION > 0 +#define PYCBC_CRYPTO_METHODS(X) PYCBC_X_V1_CRYPTO_METHODS(X) +#else +#define PYCBC_CRYPTO_METHODS(X) PYCBC_X_V0_CRYPTO_METHODS(X) +#endif + +#ifdef PYCBC_GEN_METHODS +PYCBC_X_ALL_CRYPTO_VERSIONS(PYCBC_SIG_METHOD); +#else + +static lcb_error_t pycbc_crypto_generic_generate_iv(lcbcrypto_PROVIDER *provider, uint8_t **subject, + size_t *subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "generate_iv") : ((void *) 0); + if (method) { + const char *PYARGS_FMTSTRING = "(" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING); + PyObject *result = pycbc_python_proxy(method, args, "generate_iv"); + if (result) { lcb_result = pycbc_cstrndup(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + } + return lcb_result; +} + +static lcb_error_t +pycbc_crypto_generic_sign(lcbcrypto_PROVIDER *provider, const lcbcrypto_SIGV *inputs, size_t inputs_num, + uint8_t **subject, size_t *subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "sign") : ((void *) 0); + if (method) { + PyObject *inputs_list = pycbc_gen_list_lcbcrypto_SIGV(inputs, inputs_num, pycbc_convert_lcbcrypto_SIGV); + const char *PYARGS_FMTSTRING = "(" "O" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING, inputs_list); + PyObject *result = pycbc_python_proxy(method, args, "sign"); + if (result) { lcb_result = pycbc_cstrndup(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + Py_DecRef(inputs_list); + } + return lcb_result; +} + +static lcb_error_t +pycbc_crypto_generic_verify_signature(lcbcrypto_PROVIDER *provider, const lcbcrypto_SIGV *inputs, size_t inputs_num, + uint8_t *subject, size_t subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "verify_signature") : ((void *) 0); + if (method) { + PyObject *inputs_list = pycbc_gen_list_lcbcrypto_SIGV(inputs, inputs_num, pycbc_convert_lcbcrypto_SIGV); + PyObject *subject_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(subject, subject_len)); + const char *PYARGS_FMTSTRING = "(" "O" "O" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING, inputs_list, subject_cstrn); + PyObject *result = pycbc_python_proxy(method, args, "verify_signature"); + if (result) { lcb_result = pycbc_is_true(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + Py_DecRef(inputs_list); + Py_DecRef(subject_cstrn); + } + return lcb_result; +} + +static lcb_error_t +pycbc_crypto_v0_load_key(lcbcrypto_PROVIDER *provider, lcbcrypto_KEYTYPE type, const char *keyid, uint8_t **subject, + size_t *subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "load_key") : ((void *) 0); + if (method) { + PyObject *keyid_converted = pycbc_convert_char_p(keyid); + const char *PYARGS_FMTSTRING = "(" "O" "O" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING, pycbc_convert_lcbcrypto_KEYTYPE(type), keyid_converted); + PyObject *result = pycbc_python_proxy(method, args, "load_key"); + if (result) { lcb_result = pycbc_cstrndup(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + Py_DecRef(keyid_converted); + } + return lcb_result; +} + +static lcb_error_t +pycbc_crypto_v0_encrypt(lcbcrypto_PROVIDER *provider, const uint8_t *input, size_t input_len, const uint8_t *key, + size_t key_len, const uint8_t *iv, size_t iv_len, uint8_t **subject, size_t *subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "encrypt") : ((void *) 0); + if (method) { + PyObject *input_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(input, (input_len > 0) ? (input_len - 1) : 0)); + PyObject *key_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(key, key_len)); + PyObject *iv_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(iv, iv_len)); + const char *PYARGS_FMTSTRING = "(" "O" "O" "O" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING, input_cstrn, key_cstrn, iv_cstrn); + PyObject *result = pycbc_python_proxy(method, args, "encrypt"); + if (result) { lcb_result = pycbc_cstrndup(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + Py_DecRef(input_cstrn); + Py_DecRef(key_cstrn); + Py_DecRef(iv_cstrn); + } + return lcb_result; +} + +static lcb_error_t +pycbc_crypto_v0_decrypt(lcbcrypto_PROVIDER *provider, const uint8_t *input, size_t input_len, const uint8_t *key, + size_t key_len, const uint8_t *iv, size_t iv_len, uint8_t **subject, size_t *subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "decrypt") : ((void *) 0); + if (method) { + PyObject *input_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(input, input_len)); + PyObject *key_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(key, key_len)); + PyObject *iv_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(iv, iv_len)); + const char *PYARGS_FMTSTRING = "(" "O" "O" "O" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING, input_cstrn, key_cstrn, iv_cstrn); + PyObject *result = pycbc_python_proxy(method, args, "decrypt"); + if (result) { lcb_result = pycbc_cstrndup(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + Py_DecRef(input_cstrn); + Py_DecRef(key_cstrn); + Py_DecRef(iv_cstrn); + } + return lcb_result; +} + +static lcb_error_t +pycbc_crypto_v1_encrypt(lcbcrypto_PROVIDER *provider, const uint8_t *input, size_t input_len, const uint8_t *iv, + size_t iv_len, uint8_t **subject, size_t *subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "encrypt") : ((void *) 0); + if (method) { + PyObject *input_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(input, (input_len > 0) ? (input_len - 1) : 0)); + PyObject *iv_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(iv, iv_len)); + const char *PYARGS_FMTSTRING = "(" "O" "O" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING, input_cstrn, iv_cstrn); + PyObject *result = pycbc_python_proxy(method, args, "encrypt"); + if (result) { lcb_result = pycbc_cstrndup(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + Py_DecRef(input_cstrn); + Py_DecRef(iv_cstrn); + } + return lcb_result; +} + +static lcb_error_t +pycbc_crypto_v1_decrypt(lcbcrypto_PROVIDER *provider, const uint8_t *input, size_t input_len, const uint8_t *iv, + size_t iv_len, uint8_t **subject, size_t *subject_len) { + lcb_error_t lcb_result = LCB_GENERIC_TMPERR; + PyObject *method = !PyErr_Occurred() ? pycbc_retrieve_method(provider, "decrypt") : ((void *) 0); + if (method) { + PyObject *input_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(input, input_len)); + PyObject *iv_cstrn = pycbc_convert_uint8_t(pycbc_gen_buf(iv, iv_len)); + const char *PYARGS_FMTSTRING = "(" "O" "O" ")"; + PyObject *args = Py_BuildValue(PYARGS_FMTSTRING, input_cstrn, iv_cstrn); + PyObject *result = pycbc_python_proxy(method, args, "decrypt"); + if (result) { lcb_result = pycbc_cstrndup(subject, subject_len, result); } + Py_DecRef(result); + Py_DecRef(args); + Py_DecRef(input_cstrn); + Py_DecRef(iv_cstrn); + } + return lcb_result; +}; + +#endif + +static int +CryptoProvider___setattr__(PyObject *self, PyObject *attr_name, PyObject *v) { + int result= PyObject_GenericSetAttr((PyObject*)self, attr_name, v); + size_t name_n; + const char* name = PYCBC_CSTRN(attr_name, &name_n); + if (result || PyErr_Occurred() || !v || !attr_name || !PyObject_IsTrue(v) || + !PyObject_IsInstance(v, (PyObject*)&PyMethod_Type)) { + return result; + } +#define PYCBC_INSERT_METHOD(VERSION, METHOD, DUMMY, X_ARGS)\ + if(name_n == PYCBC_SIZE(#METHOD) && !strncmp(name, #METHOD, name_n))\ + {\ + ((pycbc_CryptoProvider*)self)->provider->v.PYCBC_CRYPTO_VVERSION.METHOD = pycbc_crypto_##VERSION##_##METHOD;\ + return result;\ + } + +#ifdef PYCBC_GEN_METHODS + PYCBC_CRYPTO_METHODS(PYCBC_INSERT_METHOD); +#else +#endif + return result; +} + +#ifdef PYCBC_GEN_PYTHON_STUBS +#define PYCBC_PY_PARAM(TYPE,NAME) "," #NAME +#define PYCBC_PY_PARAM_L(TYPE,NAME) "," #NAME +#define PYCBC_PY_PARAM_CSTRN(TYPE,NAME) "," #NAME +#define PYCBC_PY_PASSTHRU(TYPE,NAME) "" + +#define PYCBC_PY_GEN(VERSION,METHOD, PROCESSOR, X_ARGS)\ +printf("\n @abc.abstractmethod\n def %s(self%s):\n pass\n",#METHOD,\ + X_ARGS(PYCBC_PY_PARAM, PYCBC_PY_PARAM, PYCBC_PY_PARAM_L,\ + PYCBC_PY_PARAM_CSTRN, PYCBC_PY_PARAM_CSTRN, PYCBC_PY_PASSTHRU)); + +void pycbc_generate_stubs() +{ + printf("\nimport abc\nclass PythonCryptoProvider(CryptoProvider):\n __metaclass__ = abc.ABCMeta\n"); + PYCBC_CRYPTO_METHODS(PYCBC_PY_GEN); +} +#endif + +static PyMethodDef CryptoProvider_TABLE_methods[] = { + { NULL, NULL, 0, NULL } + +}; + +void pycbc_crypto_provider_destructor(lcbcrypto_PROVIDER *provider) { + free(provider); +} + +static int +CryptoProvider__init(pycbc_CryptoProvider *self, + PyObject *args, PyObject *kwargs) +{ + // create instance here + PyObject* provider = kwargs?PyDict_GetItemString(kwargs,"provider"):NULL; + self->provider = NULL; + if (provider) { + self->provider = PyLong_AsVoidPtr(provider); + if (PyErr_Occurred()) + { + PYCBC_EXCTHROW_ARGS(); + return -1; + } + } else{ + self->provider = calloc(1, sizeof(lcbcrypto_PROVIDER)); + self->provider->destructor=pycbc_crypto_provider_destructor; + self->provider->version = PYCBC_CRYPTO_VERSION; + } + { + PyObject *method = NULL; +#define PYCBC_POPULATE_STRUCT(VERSION, METHOD, DUMMY, X_ARGS)\ + method = PyObject_HasAttrString((PyObject*)self, #METHOD)?\ + PyObject_GetAttrString((PyObject*)self, #METHOD):\ + NULL;\ + if (method) {\ + self->provider->v.PYCBC_CRYPTO_VVERSION.METHOD = pycbc_crypto_##VERSION##_##METHOD;\ + Py_DecRef(method);\ + }\ + else if (!self->provider->v.PYCBC_CRYPTO_VVERSION.METHOD)\ + {\ + pycbc_report_method_exception(LCB_EINVAL, "Missing method %s", #METHOD);\ + } + + PYCBC_CRYPTO_METHODS(PYCBC_POPULATE_STRUCT); +#undef PYCBC_POPULATE_STRUCT + } + if (PyErr_Occurred() || !self->provider) + { + return -1; + } +#ifdef PYCBC_GEN_PYTHON_STUBS + pycbc_generate_stubs(); +#endif + self->provider->cookie = self; + lcbcrypto_ref(self->provider); + return 0; +} + +static void +CryptoProvider_dtor(pycbc_CryptoProvider *self) +{ + if (self->provider) { + lcbcrypto_unref(self->provider); + self->provider = NULL; + } + + Py_TYPE(self)->tp_free((PyObject*)self); +} + +int +pycbc_CryptoProviderType_init(PyObject **ptr) +{ + PyTypeObject *p = &CryptoProviderType; + *ptr = (PyObject*)p; + + if (p->tp_name) { + return 0; + } + + p->tp_name = "CryptoProvider"; + p->tp_new = PyType_GenericNew; + p->tp_init = (initproc)CryptoProvider__init; + p->tp_dealloc = (destructor)CryptoProvider_dtor; + + p->tp_flags = Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE; + p->tp_doc = PyDoc_STR("The encryption provider to encrypt/decrypt document fields"); + p->tp_basicsize = sizeof(pycbc_CryptoProvider); + p->tp_setattro = CryptoProvider___setattr__; + p->tp_methods = CryptoProvider_TABLE_methods; + + return PyType_Ready(p); +} \ No newline at end of file diff --git a/src/ext.c b/src/ext.c index 211d6d7d8..0adb12484 100644 --- a/src/ext.c +++ b/src/ext.c @@ -325,6 +325,7 @@ init_libcouchbase(void) X(HttpResult, pycbc_HttpResultType_init) \ X(ViewResult, pycbc_ViewResultType_init) \ X(Transcoder, pycbc_TranscoderType_init) \ + X(CryptoProvider, pycbc_CryptoProviderType_init) \ X(ObserveInfo, pycbc_ObserveInfoType_init) \ X(Item, pycbc_ItemType_init) \ X(Event, pycbc_EventType_init) \ diff --git a/src/pycbc.h b/src/pycbc.h index fc712ae20..4e01277d4 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -49,6 +49,29 @@ #include <libcouchbase/n1ql.h> #include <libcouchbase/cbft.h> #include <libcouchbase/ixmgmt.h> + +#undef PYCBC_DEBUG +#ifdef PYCBC_DEBUG +#define PYCBC_DEBUG_LOG_RAW(...) printf(__VA_ARGS__); +#else +#define PYCBC_DEBUG_LOG_RAW(...) +#endif + +// TODO: fix in libcouchbase +#ifdef _WIN32 +//#if _MSC_VER >= 1600 +// #include <cstdint> +//#else + typedef __int8 int8_t; + typedef __int16 int16_t; +//#endif +#include "libcouchbase/sysdefs.h" +#define uint8_t lcb_uint8_t +#define uint16_t lcb_uint16_t +#define uint64_t lcb_uint64_t +#endif + +#include <libcouchbase/crypto.h> #if LCB_VERSION < 0x020601 #error "Couchbase Python SDK requires libcouchbase 2.6.1 or greater" #endif @@ -108,6 +131,8 @@ typedef int pycbc_strlen_t; #define pycbc_SimpleStringZ(c) PyUnicode_FromString(c) #define pycbc_SimpleStringN(c, n) PyUnicode_FromStringAndSize(c, n) +#define PYCBC_CSTR(X) PyUnicode_AsUTF8(X) +#define PYCBC_CSTRN(X, n) PyUnicode_AsUTF8AndSize(X, (Py_ssize_t*)n) #else @@ -125,6 +150,11 @@ typedef int pycbc_strlen_t; #define pycbc_SimpleStringZ(c) PyString_FromString(c) #define pycbc_SimpleStringN(c, n) PyString_FromStringAndSize(c, n) +const char* pycbc_cstrn(PyObject* object, Py_ssize_t *length); + +#define PYCBC_CSTR(X) PyString_AsString(X) +#define PYCBC_CSTRN(X, n) pycbc_cstrn((X), (n)) + unsigned PY_LONG_LONG pycbc_IntAsULL(PyObject *o); PY_LONG_LONG pycbc_IntAsLL(PyObject *o); long pycbc_IntAsL(PyObject *o); @@ -969,6 +999,7 @@ int pycbc_OperationResultType_init(PyObject **ptr); int pycbc_SDResultType_init(PyObject **ptr); int pycbc_HttpResultType_init(PyObject **ptr); int pycbc_TranscoderType_init(PyObject **ptr); +int pycbc_CryptoProviderType_init(PyObject **ptr); int pycbc_ObserveInfoType_init(PyObject **ptr); int pycbc_ItemType_init(PyObject **ptr); int pycbc_EventType_init(PyObject **ptr); @@ -1354,6 +1385,32 @@ PyObject *pycbc_Bucket__ping(pycbc_Bucket *self, PyObject *pycbc_Bucket__diagnostics(pycbc_Bucket *self, PyObject *args, PyObject *kwargs); + +/** + * Encryption Provider + */ +typedef struct { + PyObject_HEAD + lcbcrypto_PROVIDER* provider; +} pycbc_CryptoProvider; + +#ifndef PYCBC_CRYPTO_VERSION +#if LCB_VERSION > 0x020807 +#define PYCBC_CRYPTO_VERSION 1 +#else +#define PYCBC_CRYPTO_VERSION 0 +#endif +#endif + + +#if PYCBC_CRYPTO_VERSION==1 +#define PYCBC_CRYPTO_VVERSION v1 +#define PYCBC_CRYPTO_METHODS(X) PYCBC_X_V1_CRYPTO_METHODS(X) +#else +#define PYCBC_CRYPTO_VVERSION v0 +#define PYCBC_CRYPTO_METHODS(X) PYCBC_X_V0_CRYPTO_METHODS(X) +#endif + /** * Flag to check if logging is enabled for the library via Python's logging */ From c6e25f42ecb5d280988e103389275e6ab2487e6e Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Wed, 16 May 2018 19:24:21 +0100 Subject: [PATCH 146/829] PYCBC-462: Response Time Observability Support/Threshold Tracer Motivation ---------- A user may wish to profile durations of operations and to view any slower/orphaned operations. Also to be able to integrate with external tracing systems. Changes ------- - Provide access to parameters for the Threshold Logging Tracer in libcouchbase - duration/queue size, etc - Implement support for Python-based OpenTracing providers - Allow chaining both of these, if desired - Cover HTTP-based operations such as N1QL/Views/FTS etc Results ------- - KV/N1QL/FTS/View etc operations all recorded and timed - All can be logged to either or both of the LCB Threshold Tracer and a parent Python OpenTracing Tracer. Change-Id: Ibcc6ccd79c9b3dd77558dd845fdb823e50ab5ae4 Reviewed-on: http://review.couchbase.org/93290 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- acouchbase/tests/py34only.py | 4 +- acouchbase/tests/py35only.py | 4 +- couchbase/_bootstrap.py | 3 + couchbase/_logutil.py | 4 +- couchbase/bucket.py | 13 + couchbase/tests/base.py | 156 ++++- couchbase/tests/cases/get_t.py | 11 +- couchbase/tests/cases/misc_t.py | 75 ++- couchbase/tests/cases/tracing_t.py | 72 ++ couchbase/tests/cases/view_iterator_t.py | 8 +- dev_requirements.txt | 8 +- src/bucket.c | 83 ++- src/callbacks.c | 99 ++- src/constants.c | 56 +- src/convert.c | 7 +- src/ext.c | 816 +++++++++++++++-------- src/fts.c | 21 +- src/get.c | 111 +-- src/http.c | 12 +- src/miscops.c | 28 +- src/multiresult.c | 8 +- src/n1ql.c | 63 +- src/oputil.c | 16 +- src/pipeline.c | 11 +- src/pycbc.h | 337 +++++++--- src/result.c | 21 +- src/views.c | 84 ++- 27 files changed, 1436 insertions(+), 695 deletions(-) create mode 100644 couchbase/tests/cases/tracing_t.py diff --git a/acouchbase/tests/py34only.py b/acouchbase/tests/py34only.py index fc688672c..c7907104b 100644 --- a/acouchbase/tests/py34only.py +++ b/acouchbase/tests/py34only.py @@ -9,9 +9,9 @@ class CouchbaseBeerTest(AioTestCase): - def make_connection(self): + def setUp(self): try: - return super().make_connection(bucket='beer-sample') + return super(CouchbaseBeerTest,self).setUp(bucket='beer-sample') except CouchbaseError: raise SkipTest("Need 'beer-sample' bucket for this") diff --git a/acouchbase/tests/py35only.py b/acouchbase/tests/py35only.py index c61835770..88d5d1e63 100644 --- a/acouchbase/tests/py35only.py +++ b/acouchbase/tests/py35only.py @@ -2,9 +2,9 @@ class CouchbasePy35Test(AioTestCase): - def make_connection(self): + def setUp(self): try: - return super().make_connection(bucket='beer-sample') + super(CouchbasePy35Test,self).setUp(bucket='beer-sample') except CouchbaseError: raise SkipTest("Need 'beer-sample' bucket for this") diff --git a/couchbase/_bootstrap.py b/couchbase/_bootstrap.py index 88edcfe0d..1fad8fea2 100644 --- a/couchbase/_bootstrap.py +++ b/couchbase/_bootstrap.py @@ -70,6 +70,9 @@ def _result__repr__(self): if flags & C.PYCBC_RESFLD_URL and hasattr(self, "url"): details.append("url={0}".format(self.url)) + if hasattr(self, "tracing_context"): + details.append("tracing_context={0}".format(self.tracing_context)) + if hasattr(self, '_pycbc_repr_extra'): details += self._pycbc_repr_extra() diff --git a/couchbase/_logutil.py b/couchbase/_logutil.py index 6fae72fd5..451bf6b88 100644 --- a/couchbase/_logutil.py +++ b/couchbase/_logutil.py @@ -26,7 +26,7 @@ ) -def _pylog_log_handler(**kwargs): +def pylog_log_handler(**kwargs): pylevel = LEVEL_MAP[kwargs.pop('level')] logger_name = 'couchbase' + '.' + kwargs['subsys'] msg = '[{id}] {message} (L:{c_src[1]})'.format(**kwargs) @@ -37,7 +37,7 @@ def configure(val): from couchbase.bucket import Bucket import couchbase._libcouchbase if val: - couchbase._libcouchbase.lcb_logging(_pylog_log_handler) + couchbase._libcouchbase.lcb_logging(pylog_log_handler) initmsg = 'Initializing Couchbase logging. lcb_version={0}'.format( Bucket.lcb_version()) logging.getLogger('couchbase').info(initmsg) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 20c297ae4..9825a955a 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -123,6 +123,7 @@ def newfn(self, key, *args, **kwargs): return real_decorator + class Bucket(_Base): def __init__(self, *args, **kwargs): """Connect to a bucket. @@ -1771,6 +1772,18 @@ def redaction(self): def redaction(self, val): return self._cntl(_LCB.LCB_CNTL_LOG_REDACTION, value=val, value_type='int') + def __getattribute__(self, name): + if name in _LCB.TRACING.keys(): + return self._cntl(**_LCB.TRACING[name]) + else: + return super(Bucket, self).__getattribute__(name) + + def __setattr__(self, name, value): + if name in _LCB.TRACING.keys(): + return self._cntl(value=value, **_LCB.TRACING[name]) + else: + super(Bucket, self).__setattr__(name, value) + def _cntl(self, *args, **kwargs): """Low-level interface to the underlying C library's settings. via ``lcb_cntl()``. diff --git a/couchbase/tests/base.py b/couchbase/tests/base.py index bb90249fd..0cdf9a82d 100644 --- a/couchbase/tests/base.py +++ b/couchbase/tests/base.py @@ -22,15 +22,18 @@ from testfixtures import LogCapture from testresources import ResourcedTestCase as ResourcedTestCaseReal, TestResourceManager + try: import unittest2 as unittest except ImportError: import unittest import logging import gc - import os -if os.environ.get("PYCBC_TRACE_GC"): +import time +from basictracer import BasicTracer, SpanRecorder +import couchbase +if os.environ.get("PYCBC_TRACE_GC") in ['FULL', 'STATS_LEAK_ONLY']: gc.set_debug(gc.DEBUG_STATS | gc.DEBUG_LEAK) @@ -74,6 +77,8 @@ def assertLogs(self, *args, **kwargs): from couchbase.result import ( MultiResult, ValueResult, OperationResult, ObserveInfo, Result) from couchbase._pyport import basestring +from couchbase._version import __version__ as cb_version +PYCBC_CB_VERSION = 'PYCBC/' + cb_version CONFIG_FILE = 'tests.ini' # in cwd @@ -89,6 +94,7 @@ def __init__(self): self.ipv6 = "disabled" self.protocol = "http" self.enable_tracing = "off" + self.tracingparms = {} @staticmethod def filter_opts(options): @@ -115,7 +121,9 @@ def make_connargs(self, **overrides): conn_options = '&'.join((key + "=" + value) for key, value in final_options.items()) connstr += ("?" + conn_options) if conn_options else "" - + if 'init_tracer' in overrides.keys(): + overrides['tracer']=overrides.pop("init_tracer")(PYCBC_CB_VERSION + , **self.tracingparms) ret = { 'password': self.bucket_password, 'connection_string': connstr @@ -153,6 +161,7 @@ def load(self): info.keypath = config.get('realserver', 'keypath', fallback=None) info.protocol = config.get('realserver', 'protocol', fallback="http") info.enable_tracing = config.get('realserver', 'tracing', fallback="off") + info.tracingparms['port'] = config.get('realserver', 'tracing_port', fallback=None) logging.info("info is "+str(info.__dict__)) if config.getboolean('realserver', 'enabled'): self.realserver_info = info @@ -171,6 +180,7 @@ def load(self): else: self.mock_enabled = False + class MockResourceManager(TestResourceManager): def __init__(self, config): super(MockResourceManager, self).__init__() @@ -211,7 +221,7 @@ def make(self, *args, **kw): info.admin_username = "Administrator" info.admin_password = "password" info.mock = mock - info.enable_tracing = "on" + info.enable_tracing = "true" self._info = info return info @@ -264,6 +274,7 @@ def should_check_refcount(self): cls_ObserveInfo = ObserveInfo cls_Result = Result + GLOBAL_CONFIG = ConnectionConfiguration() @@ -409,7 +420,7 @@ def assertRegex(self, *args, **kwargs): return super(CouchbaseTestCase,self).assertRegexpMatches(*args,**kwargs) -class ConnectionTestCase(CouchbaseTestCase): +class ConnectionTestCaseBase(CouchbaseTestCase): def checkCbRefcount(self): if not self.should_check_refcount: return @@ -417,7 +428,32 @@ def checkCbRefcount(self): import gc if platform.python_implementation() == 'PyPy': return + if os.environ.get("PYCBC_TRACE_GC") in ['FULL','GRAPH_ONLY']: + import objgraph + graphdir=os.path.join(os.getcwd(),"ref_graphs") + try: + os.makedirs(graphdir) + except: + pass + for attrib_name in ["cb.tracer.parent", "cb"]: + try: + logging.info("evaluating "+attrib_name) + attrib = eval("self." + attrib_name) + options = dict(refcounts=True, max_depth=3, too_many=10, shortnames=False) + objgraph.show_refs(attrib, + filename=os.path.join(graphdir, '{}_{}_refs.dot'.format(self._testMethodName, + attrib_name)), + **options) + objgraph.show_backrefs(attrib, + filename=os.path.join(graphdir, + '{}_{}_backrefs.dot'.format(self._testMethodName, + attrib_name)), + **options) + logging.info("got referrents {}".format(repr(gc.get_referents(attrib)))) + logging.info("got referrers {}".format(repr(gc.get_referrers(attrib)))) + except: + pass gc.collect() for x in range(10): oldrc = sys.getrefcount(self.cb) @@ -429,11 +465,11 @@ def checkCbRefcount(self): #self.assertEqual(oldrc, 2) def setUp(self, **kwargs): - super(ConnectionTestCase, self).setUp() + super(ConnectionTestCaseBase, self).setUp() self.cb = self.make_connection(**kwargs) def tearDown(self): - super(ConnectionTestCase, self).tearDown() + super(ConnectionTestCaseBase, self).tearDown() if hasattr(self, '_implDtorHook'): self._implDtorHook() else: @@ -443,51 +479,107 @@ def tearDown(self): del self.cb -import time - -import logging -from basictracer import BasicTracer, SpanRecorder -import couchbase - - class LogRecorder(SpanRecorder): - def record_span(self, span): logging.info("recording span: "+str(span.__dict__)) -class TracedCase(ConnectionTestCase): +def basic_tracer(): + return BasicTracer(LogRecorder()) + +try: + from opentracing_pyzipkin.tracer import Tracer + import requests + + def http_transport(encoded_span): + # The collector expects a thrift-encoded list of spans. + import logging + requests.post( + 'http://localhost:9411/api/v1/spans', + data=encoded_span, + headers={'Content-Type': 'application/x-thrift'} + ) + + def jaeger_tracer(service, port = 9414, **kwargs ): + port = 9411 + tracer= Tracer(PYCBC_CB_VERSION, 100, http_transport, port ) + logging.error(tracer) + return tracer + +except Exception as e: + + logging.error(e) + + def jaeger_tracer(service, port = None): + logging.error("No Jaeger import available") + return basic_tracer() + + +class TracedCase(ConnectionTestCaseBase): _tracer = None - @property - def tracer(self): + + def init_tracer(self, service, **kwargs): if not TracedCase._tracer: - TracedCase._tracer = BasicTracer(recorder=LogRecorder()) + if self.using_jaeger: + TracedCase._tracer = jaeger_tracer(service,**kwargs) + self.using_jaeger = True + if not TracedCase._tracer: + TracedCase._tracer = basic_tracer() + self.using_jaeger = False return TracedCase._tracer - def setUp(self): - log_level = logging.INFO - logging.getLogger('').handlers = [] - logging.basicConfig(format='%(asctime)s %(message)s', level=log_level) + @property + def tracer(self): + return TracedCase._tracer - super(TracedCase, self).setUp(tracer=self.tracer) - couchbase.enable_logging() + def setUp(self, trace_all = True, flushcount = 0, *args, **kwargs): + self.using_jaeger =(os.environ.get("PYCBC_USE_JAEGER") == "TRUE") + self.flushdict = {k: v for k, v in zip(map(str, range(1, 100)), map(str, range(1, 100)))} + self.trace_all = os.environ.get("PYCBC_TRACE_ALL") or trace_all + self.flushcount = flushcount + if self.using_jaeger and self.flushcount>5: + raise SkipTest("too slow when using jaeger") + if self.trace_all: + couchbase.enable_logging() + super(TracedCase, self).setUp(enable_tracing = "true", init_tracer = self.init_tracer, **kwargs) + if self.trace_all: + self.cb.TRACING_ORPHANED_QUEUE_FLUSH_INTERVAL = 0.0001 + self.cb.TRACING_ORPHANED_QUEUE_SIZE =10 + self.cb.TRACING_THRESHOLD_QUEUE_FLUSH_INTERVAL = 0.00001 + self.cb.TRACING_THRESHOLD_QUEUE_SIZE = 10 + self.cb.TRACING_THRESHOLD_KV = 0.00001 + self.cb.TRACING_THRESHOLD_N1QL= 0.00001 + self.cb.TRACING_THRESHOLD_VIEW =0.00001 + self.cb.TRACING_THRESHOLD_FTS =0.00001 + self.cb.TRACING_THRESHOLD_ANALYTICS =0.00001 + + def flush_tracer(self): + for entry in range(1, self.flushcount): + self.cb.upsert_multi(self.flushdict) def tearDown(self): + if self.trace_all and not self.using_jaeger: + self.flush_tracer() super(TracedCase,self).tearDown() - + couchbase.disable_logging() if self.tracer and getattr(self.tracer,"close", None): - time.sleep(2) # yield to IOLoop to flush the spans - https://github.com/jaegertracing/jaeger-client-python/issues/50 try: + time.sleep(2) # yield to IOLoop to flush the spans - https://github.com/jaegertracing/jaeger-client-python/issues/50 self.tracer.close() # flush any buffered spans except: pass - couchbase.disable_logging() + + +if os.environ.get("PYCBC_TRACE_ALL"): + ConnectionTestCase = TracedCase +else: + ConnectionTestCase = ConnectionTestCaseBase class RealServerTestCase(ConnectionTestCase): - def setUp(self): - super(RealServerTestCase, self).setUp() + def setUp(self, **kwargs): + super(RealServerTestCase, self).setUp(**kwargs) if not self._realserver_info: raise SkipTest("Need real server") @@ -499,8 +591,8 @@ def cluster_info(self): # Class which sets up all the necessary Mock stuff class MockTestCase(ConnectionTestCase): - def setUp(self): - super(MockTestCase, self).setUp() + def setUp(self, **kwargs): + super(MockTestCase, self).setUp(**kwargs) self.skipUnlessMock() self.mockclient = MockControlClient(self.mock.rest_port) diff --git a/couchbase/tests/cases/get_t.py b/couchbase/tests/cases/get_t.py index e15f8f26d..21519e675 100644 --- a/couchbase/tests/cases/get_t.py +++ b/couchbase/tests/cases/get_t.py @@ -15,21 +15,19 @@ # limitations under the License. # -import pickle from time import sleep from nose.plugins.attrib import attr -from couchbase import FMT_JSON, FMT_PICKLE, FMT_UTF8, FMT_BYTES +from couchbase import FMT_JSON, FMT_UTF8 from couchbase.exceptions import ( CouchbaseError, ValueFormatError, NotFoundError) -from couchbase.result import MultiResult, Result -from couchbase.tests.base import ConnectionTestCase, SkipTest -from couchbase.tests.base import TracedCase +from couchbase.tests.base import ConnectionTestCase -class GetTest(TracedCase): + +class GetTest(ConnectionTestCase): def test_trivial_get(self): key = self.gen_key('trivial_get') self.cb.upsert(key, 'value1') @@ -41,7 +39,6 @@ def test_trivial_get(self): self.assertEqual(len(rvs), 1) self.assertEqual(rvs[key].value, 'value1') - def test_get_missing_key(self): rv = self.cb.get('key_missing_1', quiet=True) self.assertIsNone(rv.value) diff --git a/couchbase/tests/cases/misc_t.py b/couchbase/tests/cases/misc_t.py index 434b51d99..3eef74ba2 100644 --- a/couchbase/tests/cases/misc_t.py +++ b/couchbase/tests/cases/misc_t.py @@ -27,7 +27,7 @@ unittest2 = None -from couchbase.tests.base import ConnectionTestCase +from couchbase.tests.base import ConnectionTestCaseBase from couchbase.user_constants import FMT_JSON, FMT_AUTO, FMT_JSON, FMT_PICKLE from couchbase.exceptions import ClientTemporaryFailError from couchbase.exceptions import CouchbaseError @@ -38,7 +38,7 @@ import logging -class MiscTest(ConnectionTestCase): +class MiscTest(ConnectionTestCaseBase): def test_server_nodes(self): nodes = self.cb.server_nodes @@ -175,40 +175,43 @@ def test_redaction(self): re.match(r'.*LCB_LOG_(SD|MD|UD)_[OC]TAG.*', k)) enable_logging() - contains_no_tags = r'^(.(?!<' + all_tags + r'))*$' - contains_tags = r'^.*(' + all_tags + r').*$' - expected = {0: {logging.DEBUG: {'text': 'off', 'pattern': contains_no_tags}}, - 1: {logging.DEBUG: {'text': 'on', 'pattern': contains_tags}}} - - for num, entry in reversed(list(expected.items())): - for level, val in entry.items(): - - optype='connstr' - with self.assertLogs(level=level, recursive_check=True) as cm: - curbc = self.make_connection(log_redaction=val['text']) - self.assertEqual(num != 0, curbc.redaction != 0) - result_str=''.join(cm.output) - logging.info( - 'checking {pattern} matches {optype} addition {text} result:{result_str}'.format(optype=optype, - result_str=result_str, - **val)) - self.assertRegex(result_str, val['pattern']) - - opposite = 1 - num - opposite_val = expected[opposite][level] - - optype='cntl' - with self.assertLogs(level=level) as cm: - curbc.redaction = opposite - curbc.upsert(key='test', value='value') - self.assertEqual(opposite != 0, curbc.redaction != 0) - - result_str=''.join(cm.output) - logging.info( - 'checking {pattern} matches {optype} addition {text} result:{result_str}'.format(optype=optype, - result_str=result_str, - **val)) - self.assertRegex(''.join(cm.output), opposite_val['pattern']) + try: + contains_no_tags = r'^(.(?!<' + all_tags + r'))*$' + contains_tags = r'^.*(' + all_tags + r').*$' + expected = {0: {logging.DEBUG: {'text': 'off', 'pattern': contains_no_tags}}, + 1: {logging.DEBUG: {'text': 'on', 'pattern': contains_tags}}} + + for num, entry in reversed(list(expected.items())): + for level, val in entry.items(): + + optype='connstr' + with self.assertLogs(level=level, recursive_check=True) as cm: + curbc = self.make_connection(log_redaction=val['text']) + self.assertEqual(num != 0, curbc.redaction != 0) + result_str=''.join(cm.output) + logging.info( + 'checking {pattern} matches {optype} addition {text} result:{result_str}'.format(optype=optype, + result_str=result_str, + **val)) + self.assertRegex(result_str, val['pattern']) + + opposite = 1 - num + opposite_val = expected[opposite][level] + + optype='cntl' + with self.assertLogs(level=level) as cm: + curbc.redaction = opposite + curbc.upsert(key='test', value='value') + self.assertEqual(opposite != 0, curbc.redaction != 0) + + result_str=''.join(cm.output) + logging.info( + 'checking {pattern} matches {optype} addition {text} result:{result_str}'.format(optype=optype, + result_str=result_str, + **val)) + self.assertRegex(''.join(cm.output), opposite_val['pattern']) + finally: + couchbase.disable_logging() def test_compat_timeout(self): cb = self.make_connection(timeout=7.5) diff --git a/couchbase/tests/cases/tracing_t.py b/couchbase/tests/cases/tracing_t.py new file mode 100644 index 000000000..3b04cd142 --- /dev/null +++ b/couchbase/tests/cases/tracing_t.py @@ -0,0 +1,72 @@ +# +# Copyright 2013, Couchbase, Inc. +# All Rights Reserved +# +# Licensed under the Apache License, Version 2.0 (the "License") +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +from time import sleep +from unittest import SkipTest + +from nose.plugins.attrib import attr + +from couchbase import FMT_JSON, FMT_UTF8 + +from couchbase.exceptions import ( + CouchbaseError, ValueFormatError, NotFoundError) + +from couchbase.tests.base import TracedCase, ConnectionTestCase +import logging +import couchbase._libcouchbase +import couchbase._logutil + +class TracingTest(TracedCase): + + def setUp(self, *args, **kwargs): + super(TracingTest,self).setUp(trace_all=True, flushcount=400) + + class BogusHandler: + import couchbase.exceptions + couchbase.exceptions.TimeoutError + def __init__(self): + self.records=[] + + def handler(self,**kwargs): + couchbase._logutil.pylog_log_handler(**kwargs) + self.records.append(kwargs) + + def test_threshold_multi_get(self): + handler = TracingTest.BogusHandler() + couchbase._libcouchbase.lcb_logging(handler.handler) + + kv = self.gen_kv_dict(amount=3, prefix='get_multi') + for i in range(0,50): + rvs = self.cb.upsert_multi(kv) + self.assertTrue(rvs.all_ok) + + k_subset = list(kv.keys())[:2] + + rvs1 = self.cb.get_multi(k_subset) + self.assertEqual(len(rvs1), 2) + self.assertEqual(rvs1[k_subset[0]].value, kv[k_subset[0]]) + self.assertEqual(rvs1[k_subset[1]].value, kv[k_subset[1]]) + + rv2 = self.cb.get_multi(kv.keys()) + self.assertEqual(rv2.keys(), kv.keys()) + self.flush_tracer() + + logging.error("finished test") + self.assertRegex(str(handler.records),r'.*Operations over threshold:.*') + +if __name__ == '__main__': + unittest.main() diff --git a/couchbase/tests/cases/view_iterator_t.py b/couchbase/tests/cases/view_iterator_t.py index 93a13128d..d6b8c8fa1 100644 --- a/couchbase/tests/cases/view_iterator_t.py +++ b/couchbase/tests/cases/view_iterator_t.py @@ -163,15 +163,11 @@ def __iter__(self): class ViewIteratorTest(ViewTestCase): def setUp(self): - super(ViewIteratorTest, self).setUp() - self.skipIfMock() - - def make_connection(self): try: - return super(ViewIteratorTest, - self).make_connection(bucket='beer-sample') + super(ViewIteratorTest, self).setUp(bucket='beer-sample') except CouchbaseError: raise SkipTest("Need 'beer-sample' bucket for this") + self.skipIfMock() def test_simple_query(self): ret = self.cb.query("beer", "brewery_beers", limit=3) diff --git a/dev_requirements.txt b/dev_requirements.txt index 8e51873e7..82dca86d0 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,3 +1,4 @@ +requests nose==1.3.0 pbr==1.8.1 numpydoc==0.4 @@ -6,4 +7,9 @@ testresources>=0.2.7 jsonschema==2.6.0 configparser==3.5.0 testfixtures==5.4.0 -basictracer==2.2.0 \ No newline at end of file +basictracer==2.2.0 +jaeger-client==3.9.0 +docker==3.3.0 +py-zipkin==0.11.0 +zipkin==0.1.0 +opentracing-pyzipkin==0.0.1 \ No newline at end of file diff --git a/src/bucket.c b/src/bucket.c index 00d84a99a..2eba389fa 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -298,6 +298,14 @@ Bucket_connected(pycbc_Bucket *self, void *unused) return ret; } +#ifdef PYCBC_TRACING +static PyObject *Bucket_tracer(pycbc_Bucket *self, void *unused) +{ + PyObject *result = self->tracer ? (PyObject *)self->tracer : Py_None; + PYCBC_INCREF(result); + return result; +} +#endif static PyObject * Bucket__instance_pointer(pycbc_Bucket *self, void *unused) { @@ -362,7 +370,10 @@ Bucket__close(pycbc_Bucket *self) } self->flags |= PYCBC_CONN_F_CLOSED; - +#ifdef PYCBC_TRACING + Py_XDECREF(self->tracer); + self->tracer = NULL; +#endif lcb_destroy(self->instance); if (self->iopswrap) { @@ -517,7 +528,13 @@ static PyGetSetDef Bucket_TABLE_getset[] = { "Note that this will still return true even if\n" "it is subsequently closed via :meth:`_close`\n") }, - +#ifdef PYCBC_TRACING + {"tracer", + (getter) Bucket_tracer, + NULL, + PyDoc_STR("Tracer used by bucket, if any.\n") + }, +#endif { "_instance_pointer", (getter)Bucket__instance_pointer, NULL, @@ -777,6 +794,11 @@ static PyMethodDef Bucket_TABLE_methods[] = { { NULL, NULL, 0, NULL } }; +PyObject *pycbc_value_or_none(PyObject *tracer) +{ + return tracer ? tracer : Py_None; +} + static int Bucket__init__(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) @@ -791,7 +813,9 @@ Bucket__init__(pycbc_Bucket *self, PyObject *tc = NULL; PyObject *tracer = NULL; struct lcb_create_st create_opts = { 0 }; - +#ifdef PYCBC_TRACING + lcbtrace_TRACER *threshold_tracer = NULL; +#endif /** * This xmacro enumerates the constructor keywords, targets, and types. @@ -848,17 +872,8 @@ Bucket__init__(pycbc_Bucket *self, self->unlock_gil = 0; } #ifdef PYCBC_TRACING - if (tracer){ - PyObject *tracer_args = PyTuple_New(2); - PyTuple_SetItem(tracer_args, 0, tracer); - PyTuple_SetItem(tracer_args, 1, (PyObject*) self); - self->tracer = (pycbc_Tracer_t *) PyObject_CallFunction((PyObject *) &pycbc_TracerType, "O", tracer_args); - if (PyErr_Occurred()) { - PYCBC_EXCEPTION_LOG_NOCLEAR; - PYCBC_XDECREF(self->tracer); - self->tracer = NULL; - } - } + PYCBC_DEBUG_LOG("threshold tracer %p", threshold_tracer); + #endif create_opts.version = 3; create_opts.v.v3.type = conntype; @@ -903,8 +918,30 @@ Bucket__init__(pycbc_Bucket *self, err = lcb_create(&self->instance, &create_opts); #ifdef PYCBC_TRACING - if (self->tracer) { - lcb_set_tracer(self->instance, self->tracer->tracer); + threshold_tracer = lcb_get_tracer(self->instance); + + if (tracer || threshold_tracer) { + PyObject *tracer_args = PyTuple_New(2); + PyObject *threshold_tracer_capsule = + threshold_tracer + ? PyCapsule_New( + threshold_tracer, "threshold_tracer", NULL) + : NULL; + PyTuple_SetItem(tracer_args, 0, pycbc_value_or_none(tracer)); + PyTuple_SetItem( + tracer_args, 1, pycbc_value_or_none(threshold_tracer_capsule)); + self->tracer = + (pycbc_Tracer_t *)PyObject_Call((PyObject *)&pycbc_TracerType, + tracer_args, + pycbc_DummyKeywords); + if (PyErr_Occurred()) { + PYCBC_EXCEPTION_LOG_NOCLEAR; + self->tracer = NULL; + } else { + PYCBC_XINCREF(self->tracer); + lcb_set_tracer(self->instance, self->tracer->tracer); + } + PYCBC_DECREF(tracer_args); } #endif if (err != LCB_SUCCESS) { @@ -941,12 +978,16 @@ Bucket__init__(pycbc_Bucket *self, return 0; } - static PyObject* Bucket__connect(pycbc_Bucket *self, PyObject* args, PyObject* kwargs) { - pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, - self->tracer, "bucket.connect"); +#ifdef PYCBC_TRACING + pycbc_stack_context_handle context = + PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, + LCBTRACE_OP_REQUEST_ENCODING, + self->tracer, + "bucket.connect"); +#endif lcb_error_t err; if (self->flags & PYCBC_CONN_F_CONNECTED) { @@ -962,7 +1003,7 @@ Bucket__connect(pycbc_Bucket *self, PyObject* args, PyObject* kwargs) return NULL; } - PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, NULL, self); + PYCBC_TRACE_WRAP_VOID(pycbc_oputil_wait_common, NULL, self); if ((self->flags & PYCBC_CONN_F_ASYNC) == 0) { err = lcb_get_bootstrap_status(self->instance); if (err != LCB_SUCCESS) { @@ -984,6 +1025,7 @@ Bucket__connect(pycbc_Bucket *self, PyObject* args, PyObject* kwargs) static void Bucket_dtor(pycbc_Bucket *self) { + PYCBC_DEBUG_LOG("destroying %p", self); if (self->flags & PYCBC_CONN_F_CLOSED) { lcb_destroy(self->instance); self->instance = NULL; @@ -1007,6 +1049,7 @@ Bucket_dtor(pycbc_Bucket *self) } #ifdef PYCBC_TRACING PYCBC_XDECREF((PyObject*)self->tracer); + self->tracer = NULL; #endif #ifdef WITH_THREAD diff --git a/src/callbacks.c b/src/callbacks.c index c9dabf62c..aeee88583 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -28,7 +28,6 @@ static PyObject * mk_sd_tuple(const lcb_SDENTRY *ent); */ static void mk_sd_error(pycbc__SDResult *res, pycbc_MultiResult *mres, lcb_error_t rc, size_t ix); - static void cb_thr_end(pycbc_Bucket *self) { @@ -39,6 +38,9 @@ cb_thr_end(pycbc_Bucket *self) static void cb_thr_begin(pycbc_Bucket *self) { +#ifdef PYCBC_TRACING + pycbc_Tracer_propagate(self->tracer); +#endif if (Py_REFCNT(self) > 1) { Py_DECREF(self); PYCBC_CONN_THR_BEGIN(self); @@ -95,9 +97,6 @@ static void operation_completed3(pycbc_Bucket *self, { pycbc_assert(self->nremaining); --self->nremaining; -#ifdef PYCBC_TRACING - pycbc_Tracer_propagate(self->tracer); -#endif if (mres) { mres->err_info = err_info; Py_XINCREF(err_info); @@ -162,13 +161,16 @@ static pycbc_enhanced_err_info *pycbc_enhanced_err_info_store( static void operation_completed_with_err_info(pycbc_Bucket *self, pycbc_MultiResult *mres, int cbtype, - const lcb_RESPBASE *resp) + const lcb_RESPBASE *resp, + pycbc_Result *res) { pycbc_enhanced_err_info *err_info = pycbc_enhanced_err_info_store(resp, cbtype); + PYCBC_CONTEXT_DEREF(PYCBC_RESULT_EXTRACT_CONTEXT(res), 0); operation_completed3(self, mres, err_info); Py_XDECREF(err_info); } + /** * Call this function for each callback. Note that even if this function * returns nonzero, CB_THR_BEGIN() must still be called, and the `conn` @@ -191,7 +193,8 @@ get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, PyObject *mrdict; int rv; #ifdef PYCBC_TRACING - pycbc_stack_context_handle stack_context_handle = NULL; + pycbc_stack_context_handle parent_context = NULL; + pycbc_stack_context_handle decoding_context = NULL; #endif pycbc_assert(pycbc_multiresult_check(resp->cookie)); *mres = (pycbc_MultiResult*)resp->cookie; @@ -207,34 +210,15 @@ get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, } mrdict = pycbc_multiresult_dict(*mres); - *res = (pycbc_Result*)PyDict_GetItem(mrdict, hkey); -#ifdef PYCBC_TRACING - pycbc_print_repr(mrdict); - - PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("&res %p: coming back from callback on key [%.*s] or PyString: [",res, (int)resp->nkey,(const char*)resp->key); - pycbc_stack_context_handle handle = NULL; - PYCBC_DEBUG_LOG_RAW("]\n"); - if( *res ) { - handle = (*res)->tracing_context; - PYCBC_DEBUG_LOG("res %p",*res); - - if (PYCBC_CHECK_CONTEXT(handle)) { - stack_context_handle = pycbc_Tracer_span_start(handle->tracer, NULL, - LCBTRACE_OP_RESPONSE_DECODING, 0, - handle, LCBTRACE_REF_CHILD_OF, "get_common_objects"); - PYCBC_DEBUG_LOG("res %p: starting new context on key %.*s\n", *res, (int) resp->nkey, - (const char *) resp->key); - } - if ((*res)->is_tracing_stub) { - PyDict_DelItem(mrdict, hkey); - - *res = NULL; - - } - - } +#ifdef PYCBC_TRACING + parent_context = PYCBC_MULTIRESULT_EXTRACT_CONTEXT(*mres, hkey, res); + decoding_context = + pycbc_Result_start_context(parent_context, + hkey, + "get_common_objects", + LCBTRACE_OP_RESPONSE_DECODING); #endif if (*res) { int exists_ok = (restype & RESTYPE_EXISTS_OK) || @@ -286,15 +270,12 @@ get_common_objects(const lcb_RESPBASE *resp, pycbc_Bucket **conn, abort(); } PyDict_SetItem(mrdict, hkey, (PyObject*)*res); -#ifdef PYCBC_TRACING - handle = stack_context_handle; - (*res)->is_tracing_stub = 0; -#endif (*res)->key = hkey; PYCBC_DECREF(*res); } - PYCBC_TRACE_POP_CONTEXT(stack_context_handle); + PYCBC_CONTEXT_DEREF(decoding_context, 1); + if (resp->rc) { (*res)->rc = resp->rc; } @@ -352,7 +333,8 @@ dur_chain2(pycbc_Bucket *conn, maybe_push_operr(mres, (pycbc_Result*)res, resp->rc, is_delete ? 1 : 0); if ((mres->mropts & PYCBC_MRES_F_DURABILITY) == 0 || resp->rc != LCB_SUCCESS) { - operation_completed_with_err_info(conn, mres, cbtype, resp); + operation_completed_with_err_info( + conn, mres, cbtype, resp, (pycbc_Result *)res); CB_THR_BEGIN(conn); return; } @@ -396,7 +378,8 @@ dur_chain2(pycbc_Bucket *conn, if (err != LCB_SUCCESS) { res->rc = err; maybe_push_operr(mres, (pycbc_Result*)res, err, 0); - operation_completed_with_err_info(conn, mres, cbtype, resp); + operation_completed_with_err_info( + conn, mres, cbtype, resp, (pycbc_Result *)res); } CB_THR_BEGIN(conn); @@ -420,7 +403,8 @@ durability_chain_common(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) } if (get_common_objects(resp, &conn, (pycbc_Result**)&res, restype, &mres) != 0) { - operation_completed_with_err_info(conn, mres, cbtype, resp); + operation_completed_with_err_info( + conn, mres, cbtype, resp, (pycbc_Result *)res); CB_THR_BEGIN(conn); return; } @@ -476,9 +460,9 @@ value_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) const lcb_RESPCOUNTER *cresp = (const lcb_RESPCOUNTER *)resp; res->value = pycbc_IntFromULL(cresp->value); } - GT_DONE: - operation_completed_with_err_info(conn, mres, cbtype, resp); + operation_completed_with_err_info( + conn, mres, cbtype, resp, (pycbc_Result *)res); CB_THR_BEGIN(conn); (void)instance; } @@ -571,7 +555,8 @@ subdoc_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *rb) } GT_ERROR: - operation_completed_with_err_info(conn, mres, cbtype, rb); + operation_completed_with_err_info( + conn, mres, cbtype, rb, (pycbc_Result *)res); CB_THR_BEGIN(conn); (void)instance; } @@ -599,7 +584,8 @@ keyop_simple_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp) res->cas = resp->cas; } - operation_completed_with_err_info(conn, mres, cbtype, resp); + operation_completed_with_err_info( + conn, mres, cbtype, resp, (pycbc_Result *)res); CB_THR_BEGIN(conn); (void)instance; @@ -613,6 +599,7 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) PyObject *skey, *knodes; PyObject *mrdict; pycbc_Bucket *parent; + pycbc_Result *res = NULL; const lcb_RESPSTATS *resp = (const lcb_RESPSTATS *)resp_base; int do_return = 0; @@ -623,7 +610,7 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) if (resp->rc != LCB_SUCCESS) { do_return = 1; if (mres->errop == NULL) { - pycbc_Result *res = (pycbc_Result*)pycbc_result_new(parent); + res = (pycbc_Result *)pycbc_result_new(parent); res->rc = resp->rc; res->key = Py_None; Py_INCREF(res->key); maybe_push_operr(mres, res, resp->rc, 0); @@ -632,7 +619,7 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) if (resp->rflags & LCB_RESP_F_FINAL) { /* Note this can happen in both success and error cases! */ do_return = 1; - operation_completed_with_err_info(parent, mres, cbtype, resp_base); + operation_completed_with_err_info(parent, mres, cbtype, resp_base, res); } if (do_return) { CB_THR_BEGIN(parent); @@ -640,6 +627,10 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) } skey = pycbc_SimpleStringN(resp->key, resp->nkey); + + mrdict = pycbc_multiresult_dict(mres); + knodes = PyDict_GetItem(mrdict, skey); + value = pycbc_SimpleStringN(resp->value, resp->nvalue); { PyObject *intval = pycbc_maybe_convert_to_int(value); @@ -652,15 +643,12 @@ stats_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) } } - mrdict = pycbc_multiresult_dict(mres); - knodes = PyDict_GetItem(mrdict, skey); if (!knodes) { knodes = PyDict_New(); PyDict_SetItem(mrdict, skey, knodes); } PyDict_SetItemString(knodes, resp->server, value); - Py_DECREF(skey); Py_DECREF(value); @@ -676,14 +664,13 @@ observe_callback(lcb_t instance, int cbtype, const lcb_RESPBASE *resp_base) int rv; pycbc_ObserveInfo *oi; pycbc_Bucket *conn; - pycbc_ValueResult *vres; + pycbc_ValueResult *vres = NULL; pycbc_MultiResult *mres; const lcb_RESPOBSERVE *oresp = (const lcb_RESPOBSERVE *)resp_base; - if (resp_base->rflags & LCB_RESP_F_FINAL) { mres = (pycbc_MultiResult*)resp_base->cookie; operation_completed_with_err_info( - mres->parent, mres, cbtype, resp_base); + mres->parent, mres, cbtype, resp_base, (pycbc_Result *)vres); return; } @@ -848,7 +835,8 @@ static void ping_callback(lcb_t instance, } if (resp->rflags & LCB_RESP_F_FINAL) { /* Note this can happen in both success and error cases!*/ - operation_completed_with_err_info(parent, mres, cbtype, resp_base); + operation_completed_with_err_info( + parent, mres, cbtype, resp_base, NULL); } CB_THR_BEGIN(parent); } @@ -863,11 +851,12 @@ static void diag_callback(lcb_t instance, pycbc_MultiResult *mres = (pycbc_MultiResult *)resp->cookie; PyObject *resultdict = pycbc_multiresult_dict(mres); + pycbc_Result *res = NULL; parent = mres->parent; CB_THR_END(parent); if (resp->rc != LCB_SUCCESS) { if (mres->errop == NULL) { - pycbc_Result *res = (pycbc_Result *)pycbc_result_new(parent); + res = (pycbc_Result *)pycbc_result_new(parent); res->rc = resp->rc; res->key = Py_None; Py_INCREF(res->key); @@ -880,7 +869,7 @@ static void diag_callback(lcb_t instance, } if (resp->rflags & LCB_RESP_F_FINAL) { /* Note this can happen in both success and error cases!*/ - operation_completed_with_err_info(parent, mres, cbtype, resp_base); + operation_completed_with_err_info(parent, mres, cbtype, resp_base, res); } CB_THR_BEGIN(parent); diff --git a/src/constants.c b/src/constants.c index a78b7bc7b..f01818525 100644 --- a/src/constants.c +++ b/src/constants.c @@ -85,9 +85,9 @@ typedef void (*pycbc_constant_handler)(PyObject *, const char *, long long int); -static PyObject *setup_compression_map(PyObject *module, - pycbc_constant_handler handler); - +static void setup_compression_map(PyObject *module, + pycbc_constant_handler handler); +static void setup_tracing_map(PyObject *module, pycbc_constant_handler handler); static void do_all_constants(PyObject *module, pycbc_constant_handler handler) { @@ -210,21 +210,58 @@ do_all_constants(PyObject *module, pycbc_constant_handler handler) ADD_MACRO(LCB_BTYPE_COUCHBASE); ADD_MACRO(LCB_BTYPE_EPHEMERAL); ADD_MACRO(LCB_BTYPE_MEMCACHED); - /* Encryption options */ ADD_MACRO(LCBCRYPTO_KEY_ENCRYPT); ADD_MACRO(LCBCRYPTO_KEY_DECRYPT); LCB_CONSTANT(VERSION); ADD_MACRO(PYCBC_CRYPTO_VERSION); - PyModule_AddObject(module, "COMPRESSION", setup_compression_map(module, handler)); - + setup_tracing_map(module, handler); + setup_compression_map(module, handler); #ifdef LCB_N1XSPEC_F_DEFER ADD_MACRO(LCB_N1XSPEC_F_DEFER); #endif } -static PyObject *setup_compression_map(PyObject *module, - pycbc_constant_handler handler) +static void setup_tracing_map(PyObject *module, + pycbc_constant_handler handler) { +#define LCB_CNTL_CONSTANT(postfix, ...) ADD_CONSTANT(#postfix, LCB_CNTL_##postfix) +#define LCB_FOR_EACH_THRESHOLD_PARAM(X, DIV)\ + X(TRACING_ORPHANED_QUEUE_FLUSH_INTERVAL, convert_timevalue) DIV\ + X(TRACING_ORPHANED_QUEUE_SIZE, convert_u32) DIV\ + X(TRACING_THRESHOLD_QUEUE_FLUSH_INTERVAL, convert_timevalue) DIV\ + X(TRACING_THRESHOLD_QUEUE_SIZE, convert_u32) DIV\ + X(TRACING_THRESHOLD_KV, convert_timevalue) DIV\ + X(TRACING_THRESHOLD_N1QL, convert_timevalue) DIV\ + X(TRACING_THRESHOLD_VIEW, convert_timevalue) DIV\ + X(TRACING_THRESHOLD_FTS, convert_timevalue) DIV\ + X(TRACING_THRESHOLD_ANALYTICS, convert_timevalue) DIV \ + X(ENABLE_TRACING, convert_intbool) + PyObject* convert_timevalue = pycbc_SimpleStringZ("timeout"); + PyObject* convert_u32 = pycbc_SimpleStringZ("uint32_t"); + PyObject* convert_intbool = pycbc_SimpleStringZ("int"); + PyObject *result = PyDict_New(); + LCB_FOR_EACH_THRESHOLD_PARAM(LCB_CNTL_CONSTANT, ;); + LCB_CNTL_TRACING_THRESHOLD_KV; +#define X(NAME, VALUE) \ + { \ + PyObject *attrdict = PyDict_New(); \ + PyObject *val = PyLong_FromLong(LCB_CNTL_##NAME); \ + PyDict_SetItemString(attrdict, "op", val); \ + PyDict_SetItemString(attrdict, "value_type", VALUE); \ + PyDict_SetItemString(result, #NAME, attrdict); \ + Py_DecRef(val); \ + Py_DecRef(attrdict); \ + } + LCB_FOR_EACH_THRESHOLD_PARAM(X, ;); +#undef X + Py_DecRef(convert_timevalue); + Py_DecRef(convert_u32); + PyModule_AddObject(module, "TRACING", result); +#undef LCB_FOR_EACH_THRESHOLD_PARAM +} + +static void setup_compression_map(PyObject *module, + pycbc_constant_handler handler) { /* Compression options */ #define LCB_FOR_EACH_COMPRESS_TYPE(X, DIV)\ @@ -246,7 +283,8 @@ static PyObject *setup_compression_map(PyObject *module, #define X(NAME, VALUE) PyDict_SetItemString(result,#NAME,PyLong_FromLong(VALUE)) PP_FOR_EACH(X, ;); #undef X - return result; +#undef PP_FOR_EACH + PyModule_AddObject(module, "COMPRESSION", result); } diff --git a/src/convert.c b/src/convert.c index fed362df7..5c7333af8 100644 --- a/src/convert.c +++ b/src/convert.c @@ -265,6 +265,7 @@ enum { DECODE_KEY, DECODE_VALUE }; + static int do_call_tc(pycbc_Bucket *conn, PyObject *obj, PyObject *flags, PyObject **result, int mode) @@ -306,7 +307,8 @@ do_call_tc(pycbc_Bucket *conn, PyObject *obj, PyObject *flags, conn->tc); goto GT_DONE; } - *result = PyObject_Call(meth, args, NULL); + PYCBC_EXCEPTION_LOG_NOCLEAR; + PYCBC_STASH_EXCEPTION(*result = PyObject_Call(meth, args, NULL)); if (*result) { ret = 0; } else { @@ -388,7 +390,8 @@ pycbc_tc_decode_key(pycbc_Bucket *conn, const void *key, size_t nkey, } else { bobj = PyBytes_FromStringAndSize(key, nkey); if (bobj) { - rv = do_call_tc(conn, bobj, NULL, pobj, DECODE_KEY); + PYCBC_STASH_EXCEPTION( + rv = do_call_tc(conn, bobj, NULL, pobj, DECODE_KEY)); Py_XDECREF(bobj); } else { diff --git a/src/ext.c b/src/ext.c index 0adb12484..9d5e492ba 100644 --- a/src/ext.c +++ b/src/ext.c @@ -30,7 +30,6 @@ static void log_handler(struct lcb_logprocs_st *procs, unsigned int iid, struct lcb_logprocs_st pycbc_lcb_logprocs = { 0 }; - static PyObject * _libcouchbase_init_helpers(PyObject *self, PyObject *args, PyObject *kwargs) { @@ -429,7 +428,6 @@ static void log_handler(struct lcb_logprocs_st *procs, va_end(ap); PyDict_SetItemString(kwargs, "message", tmp); Py_DECREF(tmp); - tmp = pycbc_IntFromL(iid); PyDict_SetItemString(kwargs, "id", tmp); Py_DECREF(tmp); @@ -442,7 +440,9 @@ static void log_handler(struct lcb_logprocs_st *procs, tmp = pycbc_SimpleStringZ(subsys); PyDict_SetItemString(kwargs, "subsys", tmp); Py_DECREF(tmp); - PyObject_Call(pycbc_log_handler, pycbc_DummyTuple, kwargs); + PYCBC_STASH_EXCEPTION( + PyObject_Call(pycbc_log_handler, pycbc_DummyTuple, kwargs)); + Py_DECREF(kwargs); PyGILState_Release(gil_prev); } @@ -458,113 +458,219 @@ static void log_handler(struct lcb_logprocs_st *procs, #endif #include "oputil.h" - - - -void pycbc_print_string( PyObject *curkey) { #if PYTHON_ABI_VERSION >= 3 - { - const char *keyname = PyUnicode_AsUTF8(curkey); - PYCBC_DEBUG_LOG_RAW("%s", keyname);//(int)length, keyname); - PYCBC_EXCEPTION_LOG_NOCLEAR; - } +#define PYCBC_CSTR(X) PyUnicode_AsUTF8(X) #else - { - PYCBC_DEBUG_LOG("%s",PyString_AsString(curkey)); - }; +#define PYCBC_CSTR(X) PyString_AsString(X) #endif -} +void pycbc_fetch_error(PyObject *err[3]) +{ + PyErr_Restore(err[0], err[1], err[2]); +} -void pycbc_print_repr( PyObject *pobj) { - PyObject* x,*y,*z; - PyErr_Fetch(&x,&y,&z); - { - PyObject *curkey = PyObject_Repr(pobj); - PYCBC_EXCEPTION_LOG; - PyErr_Restore(x, y, z); - if (!curkey) { - PYCBC_DEBUG_LOG("got null repr from %p", pobj); - return; - } - pycbc_print_string(curkey); - PYCBC_XDECREF(curkey); - } +void pycbc_store_error(PyObject *err[3]) +{ + PyErr_Fetch(&err[0], &err[1], &err[2]); } +#ifdef PYCBC_DEBUG void pycbc_exception_log(const char* file, int line, int clear) { if (PyErr_Occurred()) { PyObject* type, *value, *traceback; - PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(file, line, "***** EXCEPTION:[", ""); PyErr_Fetch(&type,&value,&traceback); - pycbc_print_repr(type); - PYCBC_DEBUG_LOG_RAW(","); - pycbc_print_repr(value); - PYCBC_DEBUG_LOG_RAW(","); - if (0){ - /* See if we can get a full traceback */ - PyObject* module_name = PyString_FromString("traceback"); - PyObject* pyth_module = PyImport_Import(module_name); - PYCBC_DECREF(module_name); - - if (pyth_module) { - PyObject* pyth_func = PyObject_GetAttrString(pyth_module, "format_exception"); - if (pyth_func && PyCallable_Check(pyth_func)) { - PyObject *pyth_val; - - pyth_val = PyObject_CallFunctionObjArgs(pyth_func, type, value, traceback, NULL); - pycbc_print_string(pyth_val); - PYCBC_DECREF(pyth_val); - } - } - } - PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line, "]: END OF EXCEPTION *****"); + PYCBC_DEBUG_PYFORMAT("***** EXCEPTION:[%R], [%R] *****", type, value); if (clear) { - PYCBC_XDECREF(type); - PYCBC_XDECREF(value); - PYCBC_XDECREF(traceback); + Py_XDECREF(type); + Py_XDECREF(value); + Py_XDECREF(traceback); } else { PyErr_Restore(type,value,traceback); - } } } + +void pycbc_log_pyformat(const char *file, int line, const char *format, ...) +{ + va_list v1; + PyObject *type = NULL, *value = NULL, *traceback = NULL; + PyObject *formatted; + PyErr_Fetch(&type, &value, &traceback); + va_start(v1, format); + formatted = PyUnicode_FromFormatV(format, v1); + va_end(v1); + if (!formatted || PyErr_Occurred()) { + PYCBC_EXCEPTION_LOG + } else { + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( + file, line, "%s", PYCBC_CSTR(formatted)); + } + Py_XDECREF(formatted); + PyErr_Print(); + if (type || value || traceback) { + PyErr_Restore(type, value, traceback); + } +} + +#endif + #ifdef PYCBC_TRACING +pycbc_stack_context_handle pycbc_Context_deref_debug( + pycbc_stack_context_handle context, + int should_be_final, + const char *file, + int line) +{ + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( + file, + line, + "dereffing %p, %s", + context, + should_be_final ? "should be final" : "not necessarily final"); + return pycbc_Context_deref(context, should_be_final); +} -pycbc_stack_context_handle -pycbc_Context_init(pycbc_Tracer_t *py_tracer, const char *operation, lcb_uint64_t now, lcbtrace_REF *ref, const char* component) { - pycbc_stack_context_handle context = malloc(sizeof(pycbc_stack_context)); - pycbc_assert(py_tracer); - context->tracer = py_tracer; - context->span = lcbtrace_span_start(py_tracer->tracer, operation, now, ref); - lcbtrace_span_add_tag_str(context->span, LCBTRACE_TAG_COMPONENT, component); - PYCBC_DEBUG_LOG("Created context %p with span %p: component: %s, operation %s",context, context->span, component, operation); - return context; +static void pycbc_Context_deallocate_chiidren( + pycbc_stack_context_handle context, int should_clean_subnodes) +{ + pycbc_context_children *current_node = context->children; + PYCBC_DEBUG_LOG("freeing children of %p, should already all be free!", + context); + while (current_node) { + pycbc_context_children *next_node = current_node->next; + PYCBC_DEBUG_LOG( + "context %p: freeing node %p", context, current_node->value); + if (should_clean_subnodes) { + PYCBC_CONTEXT_DEREF(current_node->value, 1); + } + + free(current_node); + current_node = next_node; + } + context->children = NULL; } -PyObject* pycbc_Context_finish(pycbc_stack_context_handle context ) +pycbc_stack_context_handle pycbc_Context_deref( + pycbc_stack_context_handle context, int should_be_final) { + pycbc_stack_context_handle parent = NULL; if (PYCBC_CHECK_CONTEXT(context)) { - PYCBC_DEBUG_LOG("closing span %p",context->span); - lcbtrace_span_finish(context->span, 0); - (context)->span = NULL; + parent = context->parent; + --context->ref_count; + PYCBC_DEBUG_LOG( + "context %p: %d dereffed", context, (int)context->ref_count); + + if (context->ref_count == 0) { + pycbc_Context_deallocate_chiidren(context, 0); + PYCBC_DEBUG_LOG("closing span %p", context->span); + lcbtrace_span_finish(context->span, 0); + free(context); + PYCBC_CONTEXT_DEREF(parent, 0); + } else { + if (should_be_final && context->children) { + PYCBC_DEBUG_LOG( + "*** %p Should have lost all children by now ***", + context); + pycbc_Context_deallocate_chiidren(context, 0); + } + } }; - return Py_None; + return parent; +} + +int pycbc_wrap_and_pop(pycbc_stack_context_handle *context, int result) +{ + if (context && PYCBC_CHECK_CONTEXT(*context)) { + *context = PYCBC_CONTEXT_DEREF(*context, 0); + } + return result; +} + +static void pycbc_Context_ref(pycbc_stack_context_handle ref_context, + pycbc_stack_context_handle child) +{ + pycbc_context_children *child_node = + calloc(1, sizeof(pycbc_context_children)); + child_node->value = child; + child_node->next = ref_context->children; + ++ref_context->ref_count; + ref_context->children = child_node; + PYCBC_DEBUG_LOG( + "context %p: %d reffed", ref_context, (int)ref_context->ref_count); } -pycbc_stack_context_handle pycbc_check_context(pycbc_stack_context_handle CONTEXT, const char* file, int line) + +pycbc_stack_context_handle pycbc_Context_init( + pycbc_Tracer_t *py_tracer, + const char *operation, + lcb_uint64_t now, + pycbc_stack_context_handle ref_context, + lcbtrace_REF_TYPE ref_type, + const char *component) { + pycbc_stack_context_handle context = calloc(1, sizeof(pycbc_stack_context)); + + lcbtrace_REF ref; + ref.type = ref_context ? ref_type : LCBTRACE_REF_NONE; + ref.span = ref_context ? ref_context->span : NULL; + pycbc_assert(py_tracer); + context->tracer = py_tracer; + context->span = lcbtrace_span_start( + py_tracer->tracer, operation, now, ref_context ? &ref : NULL); + context->ref_count = 1; + context->parent = NULL; + if (ref_context) { + switch (ref_type) { + case LCBTRACE_REF_CHILD_OF: + context->parent = ref_context; + pycbc_Context_ref(ref_context, context); + break; + case LCBTRACE_REF_FOLLOWS_FROM: + if (ref_context->parent) { + context->parent = ref_context->parent; + pycbc_Context_ref(ref_context->parent, context); + } + break; + case LCBTRACE_REF_NONE: + case LCBTRACE_REF__MAX: + default: + break; + } + } + lcbtrace_span_add_tag_str(context->span, LCBTRACE_TAG_COMPONENT, component); + PYCBC_DEBUG_LOG( + "Created context %p with span %p: component: %s, operation %s, " + "ref_context %p", + context, + context->span, + component, + operation, + context->parent); + return context; +} + +pycbc_stack_context_handle pycbc_Context_check( + pycbc_stack_context_handle CONTEXT, const char *file, int line) +{ + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( + file, line, "checking context %p", CONTEXT); if (!(CONTEXT)){ PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line,"warning: got null context"); } else if (!(CONTEXT)->tracer){\ PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line,"warning: got null tracer"); } else if (!(CONTEXT)->span){ PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(file,line,"warning: got null span"); + } else if (!(CONTEXT)->tracer->tracer) { + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( + file, line, "warning: got null lcb_tracer"); + } else if (!(CONTEXT->tracer->tracer->destructor)) { + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( + file, line, "warning: got null destructor"); } else { return CONTEXT; @@ -573,108 +679,141 @@ pycbc_stack_context_handle pycbc_check_context(pycbc_stack_context_handle CONTEX } pycbc_stack_context_handle -pycbc_Tracer_span_start(pycbc_Tracer_t *py_tracer, PyObject *kwargs, const char *operation, lcb_uint64_t now, +pycbc_Tracer_start_span(pycbc_Tracer_t *py_tracer, PyObject *kwargs, const char *operation, lcb_uint64_t now, pycbc_stack_context_handle context, lcbtrace_REF_TYPE ref_type, const char* component) { PyObject *tracer = kwargs?PyDict_GetItemString(kwargs, "tracer"):NULL; if (!(py_tracer || (tracer && PyArg_ParseTuple(tracer, "O!", &pycbc_TracerType, &py_tracer) && py_tracer))) { PYCBC_EXCEPTION_LOG; - printf("Warning - got NULL tracer\n"); return NULL; } - if (context ) - { - lcbtrace_REF ref; - ref.type = ref_type; - ref.span = context->span; - return pycbc_Context_init(py_tracer, operation, now, &ref, component); + return pycbc_Context_init(py_tracer, operation, now, context, ref_type, component); + +} + +pycbc_stack_context_handle pycbc_Result_start_context( + pycbc_stack_context *parent_context, + PyObject *hkey, + char *component, + char *operation) +{ + pycbc_stack_context_handle stack_context_handle = NULL; + if (PYCBC_CHECK_CONTEXT(parent_context)) { + pycbc_Tracer_t *py_tracer = (parent_context)->tracer; + if (py_tracer) { + stack_context_handle = pycbc_Context_init(py_tracer, + operation, + 0, + parent_context, + LCBTRACE_REF_CHILD_OF, + component); + } + PYCBC_DEBUG_PYFORMAT("starting new context on key:[%S]", hkey); + } + return stack_context_handle; +} + +void pycbc_Result_propagate_context(pycbc_Result *res, pycbc_stack_context_handle parent_context) { + if (PYCBC_CHECK_CONTEXT(parent_context)) { + ++(parent_context->ref_count); + res->tracing_context = parent_context; } else { - return pycbc_Context_init(py_tracer, operation, now, NULL, component); + res->tracing_context = NULL; } + + res->is_tracing_stub = 0; } -void pycbc_init_traced_result(pycbc_Bucket *self, PyObject* mres_dict, PyObject *curkey, - pycbc_stack_context_handle context) { +pycbc_stack_context_handle pycbc_MultiResult_extract_context( + pycbc_MultiResult *self, PyObject *hkey, pycbc_Result **res) +{ + PyObject *mrdict = pycbc_multiresult_dict(self); + pycbc_stack_context_handle parent_context = NULL; + if (*res) { + PYCBC_DEBUG_PYFORMAT( + "[%R]\n" + "&res %p: coming back from callback on key: [%R]", + mrdict, + res, + hkey); + PYCBC_DEBUG_LOG("res %p", *res); + + parent_context = PYCBC_CHECK_CONTEXT((*res)->tracing_context); + + if ((*res)->is_tracing_stub) { + PyDict_DelItem(mrdict, hkey); + *res = NULL; + } + } + return parent_context; +}; +pycbc_stack_context_handle pycbc_Result_extract_context(const pycbc_Result *res) +{ + return res ? (res->tracing_context) : NULL; +} +void pycbc_MultiResult_init_context(pycbc_MultiResult *self, PyObject *curkey, + pycbc_stack_context_handle context, pycbc_Bucket *bucket) { + PyObject* mres_dict = pycbc_multiresult_dict(self); pycbc_pybuffer keybuf={0}; pycbc_Result *item; - pycbc_tc_encode_key(self, curkey, &keybuf); + if (!context) { + return; + } + pycbc_tc_encode_key(bucket, curkey, &keybuf); PYCBC_EXCEPTION_LOG_NOCLEAR; - pycbc_tc_decode_key(self, keybuf.buffer, keybuf.length, &curkey); + pycbc_tc_decode_key(bucket, keybuf.buffer, keybuf.length, &curkey); item=(pycbc_Result*)PyDict_GetItem(mres_dict, curkey); if (!item) { - item = (pycbc_Result*) pycbc_valresult_new(self); + PYCBC_DEBUG_PYFORMAT("Prior to insertion:[%R]", mres_dict); + PYCBC_EXCEPTION_LOG_NOCLEAR; + item = (pycbc_Result *)pycbc_valresult_new(bucket); PyDict_SetItem(mres_dict, curkey, (PyObject*)item); item->is_tracing_stub = 1; } PYCBC_EXCEPTION_LOG_NOCLEAR; + ++context->ref_count; item->tracing_context = context; - PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("\nres %p: binding context %p to [", item, context); - pycbc_print_repr(curkey); - PYCBC_DEBUG_LOG_RAW("]\n"); - PYCBC_EXCEPTION_LOG_NOCLEAR; - PYCBC_DEBUG_LOG("\nPrior to insertion:["); - pycbc_print_repr(mres_dict); - PYCBC_DEBUG_LOG_RAW("]\n"); + PYCBC_DEBUG_PYFORMAT( + "res %p: binding context %p to [%R]", item, context, curkey); PYCBC_EXCEPTION_LOG_NOCLEAR; - PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("After insertion:["); - pycbc_print_repr(mres_dict); - PYCBC_DEBUG_LOG("]\n"); + PYCBC_DEBUG_PYFORMAT("After insertion:[%R]", mres_dict); } int pycbc_is_async_or_pipeline(const pycbc_Bucket *self) { return self->flags & PYCBC_CONN_F_ASYNC || self->pipeline_queue; } -void pycbc_tracer_destructor(lcbtrace_TRACER *tracer) -{ - if (tracer) { - if (tracer->cookie) { - free(tracer->cookie); - tracer->cookie = NULL; - } - free(tracer); - } -} - #define LOGARGS(instance, lvl) instance->settings, "bootstrap", LCB_LOG_##lvl, __FILE__, __LINE__ #endif -void pycbc_set_dict_kv_object(PyObject *dict, PyObject *key, const char* value_str) { - PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("adding ["); - pycbc_print_repr(key); - PYCBC_DEBUG_LOG_RAW("], value %s to [", value_str); - pycbc_print_repr(dict); - PYCBC_DEBUG_LOG_RAW("]\n"); - { - PyObject *value = pycbc_SimpleStringZ(value_str); - PyDict_SetItem(dict, key, value); - PYCBC_DECREF(value); - } +void pycbc_set_dict_kv_object(PyObject *dict, + PyObject *key, + const char *value_str) +{ + PyObject *value = pycbc_SimpleStringZ(value_str); + PYCBC_DEBUG_PYFORMAT("adding [%R], value %S to [%R]", key, value, dict); + PyDict_SetItem(dict, key, value); + PYCBC_DECREF(value); } void pycbc_set_kv_ull(PyObject *dict, PyObject *keystr, lcb_uint64_t parenti_id) { - PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("adding ["); - pycbc_print_repr(keystr); - PYCBC_DEBUG_LOG_RAW("], value %" PRId64 " to [", parenti_id); - pycbc_print_repr(dict); - PYCBC_DEBUG_LOG_RAW("]\n"); - { - PyObject *pULL = PyLong_FromUnsignedLongLong(parenti_id); - PyDict_SetItem(dict, keystr, pULL); - PYCBC_DECREF(pULL); - } + PyObject *pULL = PyLong_FromUnsignedLongLong(parenti_id); + PYCBC_DEBUG_PYFORMAT("adding [%R], value %S to [%R]", keystr, pULL, dict); + PyDict_SetItem(dict, keystr, pULL); + PYCBC_DECREF(pULL); } #ifdef PYCBC_TRACING -#define PYCBC_X_SPAN_ARGS(TEXT,ULL,TAGS)\ - TEXT(operation_name) \ - ULL(child_of) \ - ULL(start_time) \ +#define PYCBC_X_SPAN_ARGS(TEXT, ULL, TAGS, ID) \ + TEXT(operation_name) \ + ID(child_of) \ + ID(id) \ + ULL(start_time) \ TAGS(tags) #define PYCBC_X_FINISH_ARGS(TEXT,ULL)\ @@ -691,7 +830,7 @@ void pycbc_set_kv_ull(PyObject *dict, PyObject *keystr, lcb_uint64_t parenti_id) #undef X #define X(NAME) PyObject* pycbc_##NAME; -PYCBC_X_SPAN_ARGS(X,X,X) +PYCBC_X_SPAN_ARGS(X, X, X, X) PYCBC_X_LITERALTAGNAMES(X,X) PYCBC_X_FINISH_ARGS(X,X) #undef X @@ -703,7 +842,7 @@ void pycbc_Tracer_init_constants() pycbc_default_key = pycbc_SimpleStringZ("__PYCBC_DEFAULT_KEY"); #define X(NAME) pycbc_##NAME=pycbc_SimpleStringZ(#NAME); - PYCBC_X_SPAN_ARGS(X,X,X) + PYCBC_X_SPAN_ARGS(X, X, X, X) PYCBC_X_FINISH_ARGS(X,X) #undef X #define X(NAME) pycbc_##NAME=pycbc_SimpleStringZ(LCBTRACE_TAG_##NAME); @@ -721,7 +860,10 @@ typedef struct pycbc_tracer_tags { #define PYCBC_TAG_STRUCT(NAME) pycbc_tracer_tags_t* NAME; typedef struct pycbc_tracer_span_args { - PYCBC_X_SPAN_ARGS(PYCBC_TAG_TEXT,PYCBC_TAG_ULL,PYCBC_TAG_STRUCT) + PYCBC_X_SPAN_ARGS(PYCBC_TAG_TEXT, + PYCBC_TAG_ULL, + PYCBC_TAG_STRUCT, + PYCBC_TAG_ULL) } pycbc_tracer_span_args_t; typedef struct pycbc_tracer_finish_args @@ -747,11 +889,13 @@ PyObject* pycbc_set_tags_from_payload(pycbc_tracer_tags_t *args) { } #define TAGS(NAME) if(args->NAME) {PyDict_SetItem(dict, pycbc_##NAME, pycbc_set_tags_from_payload((args->NAME))); } - +#define PYCBC_CCBC_TO_OT_ID(NAME) PyObject* pycbc_set_args_from_payload(pycbc_tracer_span_args_t *args) { PyObject* dict = PyDict_New(); - PYCBC_X_SPAN_ARGS(PYCBC_TEXT_TO_DICT,PYCBC_ULL_TO_DICT,TAGS) + + PYCBC_X_SPAN_ARGS( + PYCBC_TEXT_TO_DICT, PYCBC_ULL_TO_DICT, TAGS, PYCBC_CCBC_TO_OT_ID); return dict; } @@ -775,7 +919,8 @@ void pycbc_span_tags_args_dealloc(pycbc_tracer_tags_t* args) { #define PYCBC_TAGS_FREE(NAME) if(args->NAME) { pycbc_span_tags_args_dealloc(args->NAME); } void pycbc_span_args_dealloc(pycbc_tracer_span_args_t *args) { - PYCBC_X_SPAN_ARGS(PYCBC_TEXT_FREE,PYCBC_ULL_FREE, PYCBC_TAGS_FREE) + PYCBC_X_SPAN_ARGS( + PYCBC_TEXT_FREE, PYCBC_ULL_FREE, PYCBC_TAGS_FREE, PYCBC_ULL_FREE) free(args); } @@ -785,6 +930,8 @@ void pycbc_span_finish_args_dealloc(struct pycbc_tracer_finish_args *args) { } #undef PYCBC_TEXT_FREE #undef PYCBC_ULL_FREE +#undef PYCBC_X_FINISH_ARGS +#undef PYCBC_X_SPAN_ARGS struct pycbc_tracer_payload; struct pycbc_tracer_span_args; @@ -799,20 +946,18 @@ typedef struct pycbc_tracer_payload { typedef struct pycbc_tracer_state { pycbc_tracer_payload *root; pycbc_tracer_payload *last; - pycbc_Bucket* bucket; PyObject* parent; PyObject *start_span_method; + lcbtrace_TRACER *child; + PyObject *id_map; } pycbc_tracer_state; void pycbc_init_span_args(pycbc_tracer_payload* payload) { - payload->span_start_args=malloc(sizeof(pycbc_tracer_span_args_t)); - memset((payload->span_start_args),0, sizeof(pycbc_tracer_span_args_t)); - payload->span_start_args->tags=malloc(sizeof(pycbc_tracer_tags_t)); - memset((payload->span_start_args->tags),0, sizeof(pycbc_tracer_tags_t)); - payload->span_finish_args=malloc(sizeof(pycbc_tracer_finish_args_t)); - memset((payload->span_finish_args),0, sizeof(pycbc_tracer_finish_args_t)); + payload->span_start_args = calloc(1, sizeof(pycbc_tracer_span_args_t)); + payload->span_start_args->tags = calloc(1, sizeof(pycbc_tracer_tags_t)); + payload->span_finish_args = calloc(1, sizeof(pycbc_tracer_finish_args_t)); } void pycbc_payload_dealloc(pycbc_tracer_payload *pPayload) { @@ -820,142 +965,187 @@ void pycbc_payload_dealloc(pycbc_tracer_payload *pPayload) { pycbc_span_finish_args_dealloc(pPayload->span_finish_args); } +void pycbc_Tracer_enqueue_payload(pycbc_tracer_state *state, + pycbc_tracer_payload *payload) +{ + if (state->last) { + state->last->next = payload; + } + state->last = payload; + if (state->root == NULL) { + state->root = payload; + } +} +pycbc_tracer_payload *pycbc_persist_span(lcbtrace_SPAN *span); + +void pycbc_Tracer_span_finish(const struct pycbc_tracer_payload *payload, + const pycbc_tracer_state *state, + PyObject *fresh_span); + void pycbc_span_report(lcbtrace_TRACER *tracer, lcbtrace_SPAN *span) { pycbc_tracer_state *state = NULL; + pycbc_tracer_payload *payload; if (tracer == NULL) { return; } + state = tracer->cookie; if (state == NULL) { return; } - { + if (state->child) { + state->child->v.v0.report(state->child, span); + } + if (!state->parent) { + return; + } + payload = pycbc_persist_span(span); + pycbc_Tracer_enqueue_payload(state, payload); +#undef PYCBC_TRACING_BUFSZ +} + +pycbc_tracer_payload *pycbc_persist_span(lcbtrace_SPAN *span) +{ #define PYCBC_TRACING_BUFSZ 1000 - size_t nbuf = PYCBC_TRACING_BUFSZ; - char bufarray[PYCBC_TRACING_BUFSZ]={0}; - char* buf = &bufarray[0]; - lcbtrace_SPAN *parent; - lcb_uint64_t start; - pycbc_tracer_payload *payload = calloc(1, sizeof(pycbc_tracer_payload)); - pycbc_init_span_args(payload); + size_t nbuf = PYCBC_TRACING_BUFSZ; + char bufarray[PYCBC_TRACING_BUFSZ] = {0}; + char *buf = &bufarray[0]; + lcbtrace_SPAN *parent; + lcb_uint64_t start; + pycbc_tracer_payload *payload = calloc(1, sizeof(pycbc_tracer_payload)); + pycbc_init_span_args(payload); + { + pycbc_tracer_span_args_t *span_args = payload->span_start_args; + pycbc_tracer_tags_t *tags_p = span_args->tags; + pycbc_tracer_finish_args_t *span_finish_args = + payload->span_finish_args; + PYCBC_DEBUG_LOG("got span %p", span); + + PYCBC_TRACING_ADD_TEXT(span_args, operation_name, lcbtrace_span_get_operation(span)); + parent = lcbtrace_span_get_parent(span); + if (parent) { + lcb_uint64_t parenti_id = lcbtrace_span_get_trace_id(parent); + PYCBC_TRACING_ADD_U64(span_args, child_of, parenti_id); + } + PYCBC_TRACING_ADD_U64(span_args, id, lcbtrace_span_get_span_id(span)); + start = lcbtrace_span_get_start_ts(span); + PYCBC_TRACING_ADD_U64(span_finish_args, finish_time, lcbtrace_span_get_finish_ts(span)); + PYCBC_TRACING_ADD_U64(span_args, start_time, start); { - pycbc_tracer_span_args_t* span_args = payload->span_start_args; - pycbc_tracer_tags_t* tags_p = span_args->tags; - pycbc_tracer_finish_args_t* span_finish_args = payload->span_finish_args; - PYCBC_DEBUG_LOG("got span %p\n", span); - - buf = calloc(nbuf, sizeof(char)); - PYCBC_TRACING_ADD_TEXT(span_args, operation_name, lcbtrace_span_get_operation(span)); - parent = lcbtrace_span_get_parent(span); - if (parent) { - lcb_uint64_t parenti_id = lcbtrace_span_get_trace_id(parent); - snprintf(buf, nbuf, "%" PRIx64, parenti_id); - } - start = lcbtrace_span_get_start_ts(span); - PYCBC_TRACING_ADD_U64(span_finish_args, finish_time, lcbtrace_span_get_finish_ts(span)); - PYCBC_TRACING_ADD_U64(span_args, start_time, start); - { - nbuf = PYCBC_TRACING_BUFSZ; - if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_DB_TYPE, &buf, &nbuf) == LCB_SUCCESS) { - buf[nbuf] = '\0'; - PYCBC_TRACING_ADD_TEXT(tags_p, DB_TYPE, buf); - } - } - - { - lcb_uint64_t latency, operation_id; - if (lcbtrace_span_get_tag_uint64(span, LCBTRACE_TAG_PEER_LATENCY, &latency) == LCB_SUCCESS) { - PYCBC_TRACING_ADD_U64(tags_p, PEER_LATENCY, latency); - } - if (lcbtrace_span_get_tag_uint64(span, LCBTRACE_TAG_OPERATION_ID, &operation_id) == LCB_SUCCESS) { - PYCBC_TRACING_ADD_U64(tags_p, OPERATION_ID, operation_id); - } - nbuf = PYCBC_TRACING_BUFSZ; - if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_COMPONENT, &buf, &nbuf) == LCB_SUCCESS) { - buf[nbuf] = '\0'; - PYCBC_TRACING_ADD_TEXT(tags_p, COMPONENT, buf); - } - nbuf = PYCBC_TRACING_BUFSZ; - if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_PEER_ADDRESS, &buf, &nbuf) == LCB_SUCCESS) { - buf[nbuf] = '\0'; - PYCBC_TRACING_ADD_TEXT(tags_p, PEER_ADDRESS, buf); - } - nbuf = PYCBC_TRACING_BUFSZ; - if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_LOCAL_ADDRESS, &buf, &nbuf) == LCB_SUCCESS) { - buf[nbuf] = '\0'; - PYCBC_TRACING_ADD_TEXT(tags_p, LOCAL_ADDRESS, buf); - } - nbuf = PYCBC_TRACING_BUFSZ; - if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_DB_INSTANCE, &buf, &nbuf) == LCB_SUCCESS) { - buf[nbuf] = '\0'; - PYCBC_TRACING_ADD_TEXT(tags_p, DB_INSTANCE, buf); - } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_DB_TYPE, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, DB_TYPE, buf); } } - if (state->last) { - state->last->next = payload; - } - state->last = payload; - if (state->root == NULL) { - state->root = payload; + { + lcb_uint64_t latency, operation_id; + if (lcbtrace_span_get_tag_uint64(span, LCBTRACE_TAG_PEER_LATENCY, &latency) == LCB_SUCCESS) { + PYCBC_TRACING_ADD_U64(tags_p, PEER_LATENCY, latency); + } + if (lcbtrace_span_get_tag_uint64(span, LCBTRACE_TAG_OPERATION_ID, &operation_id) == LCB_SUCCESS) { + PYCBC_TRACING_ADD_U64(tags_p, OPERATION_ID, operation_id); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_COMPONENT, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, COMPONENT, buf); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_PEER_ADDRESS, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, PEER_ADDRESS, buf); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_LOCAL_ADDRESS, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, LOCAL_ADDRESS, buf); + } + nbuf = PYCBC_TRACING_BUFSZ; + if (lcbtrace_span_get_tag_str(span, LCBTRACE_TAG_DB_INSTANCE, &buf, &nbuf) == LCB_SUCCESS) { + buf[nbuf] = '\0'; + PYCBC_TRACING_ADD_TEXT(tags_p, DB_INSTANCE, buf); + } } } -#undef PYCBC_TRACING_BUFSZ + return payload; } - - -void pycbc_Tracer_propagate_span(pycbc_Tracer_t *tracer, struct pycbc_tracer_payload *payload) { +pycbc_tracer_payload *pycbc_Tracer_propagate_span( + pycbc_Tracer_t *tracer, struct pycbc_tracer_payload *payload) +{ pycbc_tracer_state *state = (pycbc_tracer_state *) tracer->tracer->cookie; + PyObject *ptype = NULL, *pvalue = NULL, *ptraceback = NULL; + PyErr_Fetch(&ptype, &pvalue, &ptraceback); pycbc_assert(state->parent); if (state->start_span_method && PyObject_IsTrue(state->start_span_method)) { PyObject* start_span_args = pycbc_set_args_from_payload(payload->span_start_args); - PyObject *fresh_span; - - PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("calling start method:["); - pycbc_print_repr(state->start_span_method); - PYCBC_DEBUG_LOG_RAW("]"); - PYCBC_DEBUG_LOG("\nabout to call: %p[\n", state->start_span_method); - PYCBC_DEBUG_LOG("] on %p=[", state->parent); - pycbc_print_repr(state->parent); - PYCBC_DEBUG_LOG_WITHOUT_NEWLINE("full args to start_span:["); - pycbc_print_repr(start_span_args); - PYCBC_DEBUG_LOG_RAW("\n"); - - fresh_span= PyObject_Call(state->start_span_method, pycbc_DummyTuple, - start_span_args); - if(fresh_span) - { - PyObject *finish_method = PyObject_GetAttrString(fresh_span, "finish"); - PYCBC_DEBUG_LOG("Got span'["); - pycbc_print_repr(fresh_span); - PYCBC_DEBUG_LOG("]\n"); - pycbc_assert(finish_method); - PYCBC_DEBUG_LOG("Got finish method'["); - pycbc_print_repr(finish_method); - PYCBC_DEBUG_LOG("]\n"); - if (finish_method) { - PyObject* span_finish_args = pycbc_set_finish_args_from_payload(payload->span_finish_args); - PYCBC_DEBUG_LOG("calling finish method with;["); - pycbc_print_repr(span_finish_args); - PYCBC_DEBUG_LOG("]\n"); - PyObject_Call(finish_method, pycbc_DummyTuple, span_finish_args); - PYCBC_XDECREF(span_finish_args); + if (payload->span_start_args->child_of) { + PyObject *key = PyLong_FromUnsignedLongLong( + *payload->span_start_args->child_of); + PyObject *parent_span = PyDict_GetItem(state->id_map, key); + Py_DecRef(key); + if (parent_span) { + PyDict_SetItem(start_span_args, pycbc_child_of, parent_span); + } else { + // PYCBC_DEBUG_LOG("requeueing %p", payload); + // return payload; } - PYCBC_DECREF(finish_method); - PYCBC_DECREF(fresh_span); - } else{ - PYCBC_DEBUG_LOG("Yielded no span!\n"); } + PYCBC_DEBUG_PYFORMAT("calling start method: %R ( %R )", + state->start_span_method, + start_span_args); + + { + PyObject *fresh_span = PyObject_Call(state->start_span_method, + pycbc_DummyTuple, + start_span_args); + if (fresh_span) { + pycbc_Tracer_span_finish(payload, state, fresh_span); + + } else { + PYCBC_DEBUG_LOG("Yielded no span!"); + } + } + PYCBC_EXCEPTION_LOG; PYCBC_DECREF(start_span_args); - PYCBC_EXCEPTION_LOG_NOCLEAR; } + if (ptype || pvalue || ptraceback) { + PyErr_Restore(ptype, pvalue, ptraceback); + } + return NULL; +} + +void pycbc_Tracer_span_finish(const struct pycbc_tracer_payload *payload, + const pycbc_tracer_state *state, + PyObject *fresh_span) +{ + PyObject *key = PyLong_FromUnsignedLongLong(*payload->span_start_args->id); + PyObject *finish_method; + PyDict_SetItem(state->id_map, key, fresh_span); + Py_DecRef(key); + finish_method = PyObject_GetAttrString(fresh_span, "finish"); + PYCBC_DEBUG_PYFORMAT("Got span'[%R]", fresh_span); + pycbc_assert(finish_method); + PYCBC_DEBUG_PYFORMAT("Got finish method'[%R]", finish_method); + if (finish_method) { + PyObject *span_finish_args = + pycbc_set_finish_args_from_payload(payload->span_finish_args); + PYCBC_DEBUG_PYFORMAT("calling finish method with;[%R]", + span_finish_args); + PyObject_Call(finish_method, pycbc_DummyTuple, span_finish_args); + PYCBC_EXCEPTION_LOG; + PYCBC_XDECREF(span_finish_args); + } + + PYCBC_XDECREF(finish_method); + PYCBC_DECREF(fresh_span); } void pycbc_tracer_flush(pycbc_Tracer_t *tracer) @@ -974,27 +1164,41 @@ void pycbc_tracer_flush(pycbc_Tracer_t *tracer) } { pycbc_tracer_payload *ptr = state->root; - PYCBC_DEBUG_LOG("flushing\n"); + PYCBC_DEBUG_LOG("flushing"); while (ptr) { pycbc_tracer_payload *tmp = ptr; - if (tracer->parent) { + ptr = ptr->next; + if (state->parent) { pycbc_Tracer_propagate_span(tracer, tmp); } - ptr = ptr->next; pycbc_payload_dealloc(tmp); } } state->root = state->last = NULL; } - -void pycbc_Tracer_propagate( pycbc_Tracer_t *tracer) { +void pycbc_Tracer_propagate(pycbc_Tracer_t *tracer) +{ pycbc_tracer_flush(tracer); } +void pycbc_tracer_destructor(lcbtrace_TRACER *tracer) +{ + if (tracer) { + pycbc_tracer_state *state = tracer->cookie; + if (state) { + // Py_XDECREF(state->parent); + Py_XDECREF(state->id_map); + Py_XDECREF(state->start_span_method); + free(state); + tracer->cookie = NULL; + } + free(tracer); + } +} - -lcbtrace_TRACER *pycbc_tracer_new(PyObject *parent, pycbc_Bucket *bucket) +lcbtrace_TRACER *pycbc_tracer_new(PyObject *parent, + lcbtrace_TRACER *child_tracer) { lcbtrace_TRACER *tracer = calloc(1, sizeof(lcbtrace_TRACER)); pycbc_tracer_state *pycbc_tracer = calloc(1, sizeof(pycbc_tracer_state)); @@ -1005,18 +1209,18 @@ lcbtrace_TRACER *pycbc_tracer_new(PyObject *parent, pycbc_Bucket *bucket) pycbc_tracer->root = NULL; pycbc_tracer->last = NULL; tracer->cookie = pycbc_tracer; - pycbc_tracer->bucket = bucket; + pycbc_tracer->id_map = PyDict_New(); + pycbc_tracer->child = child_tracer; + if (parent) { - PYCBC_DEBUG_LOG("\ninitialising tracer start_span method from:["); - pycbc_print_repr(parent); - PYCBC_DEBUG_LOG("]\n"); + PYCBC_DEBUG_PYFORMAT("initialising tracer start_span method from:[%R]", + parent); pycbc_tracer->start_span_method = PyObject_GetAttrString(parent, "start_span"); - if (pycbc_tracer->start_span_method) - { - PYCBC_DEBUG_LOG("got start_span method:["); - pycbc_print_repr(pycbc_tracer->start_span_method); - PYCBC_DEBUG_LOG("]\n"); + if (!PyErr_Occurred() && pycbc_tracer->start_span_method) { + PYCBC_DEBUG_PYFORMAT("got start_span method:[%R]", + pycbc_tracer->start_span_method); pycbc_tracer->parent = parent; + PYCBC_INCREF(parent); } else { @@ -1028,7 +1232,63 @@ lcbtrace_TRACER *pycbc_tracer_new(PyObject *parent, pycbc_Bucket *bucket) return tracer; } +void pycbc_Tracer_set_child(pycbc_Tracer_t *py_Tracer, + lcbtrace_TRACER *child_tracer) +{ + ((pycbc_tracer_state *)(py_Tracer->tracer->cookie))->child = child_tracer; +} + +static PyObject *Tracer_parent(pycbc_Tracer_t *self, void *unused) +{ + pycbc_tracer_state *tracer_state = + (pycbc_tracer_state *)self->tracer->cookie; + pycbc_assert(tracer_state); + { + PyObject *result = + tracer_state->parent ? tracer_state->parent : Py_None; + PYCBC_INCREF(result); + return result; + } +} + +PyObject *pycbc_null_or_value(PyObject *tracer) +{ + return (tracer && PyObject_IsTrue(tracer)) ? tracer : NULL; +} + +static int Tracer__init__(pycbc_Tracer_t *self, + PyObject *args, + PyObject *kwargs) +{ + int rv = 0; + PyObject *tracer = PyTuple_GetItem(args, 0); + PyObject *threshold_tracer_capsule = PyTuple_GetItem(args, 1); + PyObject *parent = pycbc_null_or_value(tracer); + lcbtrace_TRACER *child_tracer = + (lcbtrace_TRACER *)(threshold_tracer_capsule + ? PyCapsule_GetPointer( + threshold_tracer_capsule, + "threshold_tracer") + : NULL); + self->tracer = + parent ? pycbc_tracer_new(parent, child_tracer) : child_tracer; + + PYCBC_EXCEPTION_LOG_NOCLEAR; + return rv; +} + +static void Tracer_dtor(pycbc_Tracer_t *self) +{ + pycbc_tracer_flush(self); + Py_TYPE(self)->tp_free((PyObject *)self); +} + static PyGetSetDef pycbc_Tracer_TABLE_getset[] = { + { "parent", + (getter)Tracer_parent, + NULL, + PyDoc_STR("Optional parent tracer to propagate spans to.\n") + }, { NULL } }; @@ -1047,27 +1307,6 @@ static PyMethodDef pycbc_Tracer_TABLE_methods[] = { { NULL, NULL, 0, NULL } }; - -static int -Tracer__init__(pycbc_Tracer_t *self, - PyObject *args, PyObject* kwargs) -{ - int rv = 0; - PyObject* tracer =PyTuple_GetItem(args, 0); - pycbc_Bucket* bucket= (pycbc_Bucket*) PyTuple_GetItem(args,1); - self->parent=(tracer && PyObject_IsTrue(tracer))?tracer:NULL; - self->tracer= pycbc_tracer_new(self->parent, bucket); - PYCBC_EXCEPTION_LOG_NOCLEAR; - - return rv; -} - -static void -Tracer_dtor(pycbc_Tracer_t *self) -{ - Py_TYPE(self)->tp_free((PyObject*)self); -} - PyTypeObject pycbc_TracerType = { PYCBC_POBJ_HEAD_INIT(NULL) 0 @@ -1089,5 +1328,6 @@ int pycbc_TracerType_init(PyObject **ptr) { p->tp_getset = pycbc_Tracer_TABLE_getset; pycbc_Tracer_init_constants(); return PyType_Ready(p); -}; +} + #endif \ No newline at end of file diff --git a/src/fts.c b/src/fts.c index 481b7e6f2..767819df1 100644 --- a/src/fts.c +++ b/src/fts.c @@ -14,7 +14,6 @@ fts_row_callback(lcb_t instance, int ign, const lcb_RESPFTS *resp) PYCBC_CONN_THR_END(bucket); vres = (pycbc_ViewResult *)PyDict_GetItem((PyObject*)mres, Py_None); - if (resp->htresp) { hdrs = resp->htresp->headers; htcode = resp->htresp->htstatus; @@ -48,7 +47,8 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) lcb_CMDFTS cmd = { 0 }; pycbc_pybuffer buf = { 0 }; PyObject *params_o = NULL; - + pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL( + kwargs, LCBTRACE_OP_REQUEST_ENCODING, self->tracer, "fts_query"); static char *kwlist[] = { "params", NULL }; rv = PyArg_ParseTupleAndKeywords(args, kwargs, "O", kwlist, ¶ms_o); @@ -69,18 +69,7 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) } mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); - { -#ifdef PYCBC_TRACING - PyObject* view_kwargs = PyDict_New(); - PyDict_SetItemString(view_kwargs, "tracer", (PyObject*)self->tracer); -#else - PyObject* view_kwargs = pycbc_DummyKeywords; -#endif - vres = (pycbc_ViewResult *) PYCBC_TYPE_CTOR(&pycbc_ViewResultType, pycbc_DummyTuple, view_kwargs); -#ifdef PYCBC_TRACING - PYCBC_DECREF(view_kwargs); -#endif - } + vres = pycbc_propagate_view_result(context); pycbc_httpresult_init(&vres->base, mres); vres->rows = PyList_New(0); vres->base.format = PYCBC_FMT_JSON; @@ -90,7 +79,9 @@ pycbc_Bucket__fts_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) cmd.query = buf.buffer; cmd.nquery = buf.length; cmd.handle = &vres->base.u.fts; - rc = lcb_fts_query(self->instance, mres, &cmd); + + PYCBC_TRACECMD_SCOPED( + rc, fts, query, self->instance, *cmd.handle, context, mres, &cmd); PYCBC_PYBUF_RELEASE(&buf); if (rc != LCB_SUCCESS) { diff --git a/src/get.c b/src/get.c index c7dca97c5..bee004998 100644 --- a/src/get.c +++ b/src/get.c @@ -101,51 +101,58 @@ TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, static, int, } } u_cmd.base.exptime = ttl; - PYCBC_TRACE_POP_CONTEXT(context); + context = + context ? pycbc_Context_init(context->tracer, + LCBTRACE_OP_RESPONSE_DECODING, + 0, + context, + LCBTRACE_REF_FOLLOWS_FROM, + "") + : NULL; switch (optype) { - case PYCBC_CMD_GAT: - if (!ttl) { - PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "GAT must have positive TTL"); - rv = -1; - goto GT_DONE; - } - goto GT_GET; + case PYCBC_CMD_GAT: + if (!ttl) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "GAT must have positive TTL"); + rv = -1; + goto GT_DONE; + } + goto GT_GET; - case PYCBC_CMD_LOCK: - if (!ttl) { - PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Lock must have an expiry"); - rv = -1; - goto GT_DONE; - } - lock = 1; - goto GT_GET; + case PYCBC_CMD_LOCK: + if (!ttl) { + PYCBC_EXC_WRAP(PYCBC_EXC_ARGUMENTS, 0, "Lock must have an expiry"); + rv = -1; + goto GT_DONE; + } + lock = 1; + goto GT_GET; - case PYCBC_CMD_GET: + case PYCBC_CMD_GET: GT_GET: - u_cmd.get.lock = lock; - PYCBC_TRACECMD(u_cmd.get, context, cv->mres, curkey, self); - err = lcb_get3(self->instance, cv->mres, &u_cmd.get); - break; - - case PYCBC_CMD_TOUCH: - u_cmd.touch.exptime = ttl; - PYCBC_TRACECMD(u_cmd.touch, context, cv->mres, curkey, self); - err = lcb_touch3(self->instance, cv->mres, &u_cmd.touch); - break; - - case PYCBC_CMD_GETREPLICA: - case PYCBC_CMD_GETREPLICA_INDEX: - case PYCBC_CMD_GETREPLICA_ALL: - u_cmd.rget.strategy = gv->u.replica.strategy; - u_cmd.rget.index = gv->u.replica.index; - PYCBC_TRACECMD(u_cmd.rget,context, cv->mres, curkey, self); - err = lcb_rget3(self->instance, cv->mres, &u_cmd.rget); - break; - default: - err = LCB_ERROR; - abort(); - break; + u_cmd.get.lock = lock; + PYCBC_TRACECMD(u_cmd.get, context, cv->mres, curkey, self); + err = lcb_get3(self->instance, cv->mres, &u_cmd.get); + break; + + case PYCBC_CMD_TOUCH: + u_cmd.touch.exptime = ttl; + PYCBC_TRACECMD(u_cmd.touch, context, cv->mres, curkey, self); + err = lcb_touch3(self->instance, cv->mres, &u_cmd.touch); + break; + + case PYCBC_CMD_GETREPLICA: + case PYCBC_CMD_GETREPLICA_INDEX: + case PYCBC_CMD_GETREPLICA_ALL: + u_cmd.rget.strategy = gv->u.replica.strategy; + u_cmd.rget.index = gv->u.replica.index; + PYCBC_TRACECMD(u_cmd.rget, context, cv->mres, curkey, self); + err = lcb_rget3(self->instance, cv->mres, &u_cmd.rget); + break; + default: + err = LCB_ERROR; + abort(); + break; } if (err != LCB_SUCCESS) { @@ -155,7 +162,7 @@ TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, static, int, } else { rv = 0; } - + GT_DONE: PYCBC_PYBUF_RELEASE(&keybuf); return rv; @@ -377,15 +384,29 @@ sdlookup_common(pycbc_Bucket *self, PyObject *args, PyObject *kwargs, int argopt PyObject * pycbc_Bucket_lookup_in(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { - return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_SINGLE, - PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, self->tracer, "bucket.lookup_in")); + return sdlookup_common( + self, + args, + kwargs, + PYCBC_ARGOPT_SINGLE, + PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, + LCBTRACE_OP_REQUEST_ENCODING, + self->tracer, + "bucket.lookup_in")); } PyObject * pycbc_Bucket_lookup_in_multi(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { - return sdlookup_common(self, args, kwargs, PYCBC_ARGOPT_MULTI, - PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, self->tracer, "bucket.lookup_in_multi")); + return sdlookup_common( + self, + args, + kwargs, + PYCBC_ARGOPT_MULTI, + PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, + LCBTRACE_OP_REQUEST_ENCODING, + self->tracer, + "bucket.lookup_in_multi")); } #define DECLFUNC(name, operation, mode) \ diff --git a/src/http.c b/src/http.c index 29a5b436d..835217190 100644 --- a/src/http.c +++ b/src/http.c @@ -160,6 +160,7 @@ pycbc_httpresult_complete(pycbc_HttpResult *htres, pycbc_MultiResult *mres, pycbc_asyncresult_invoke(ares, NULL); /* We don't handle the GIL in async mode */ } + PYCBC_TRACE_POP_CONTEXT(htres->tracing_context); } static void @@ -193,8 +194,13 @@ pycbc_http_callbacks_init(lcb_t instance) PyObject * pycbc_Bucket__http_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { - pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, LCBTRACE_OP_REQUEST_ENCODING, - self->tracer, "bucket.http_request"); +#ifdef PYCBC_TRACING + pycbc_stack_context_handle context = + PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, + LCBTRACE_OP_REQUEST_ENCODING, + self->tracer, + "bucket.http_request"); +#endif int rv; int method; int reqtype; @@ -264,7 +270,7 @@ pycbc_Bucket__http_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) } if (!(self->flags & PYCBC_CONN_F_ASYNC)) { - PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, kwargs, self); + PYCBC_TRACE_WRAP_VOID(pycbc_oputil_wait_common, kwargs, self); /* RC=1 (decref on done) */ if (pycbc_multiresult_maybe_raise(mres)) { goto GT_DONE; diff --git a/src/miscops.c b/src/miscops.c index 3514f63a9..2ae2f4eb9 100644 --- a/src/miscops.c +++ b/src/miscops.c @@ -273,13 +273,22 @@ TRACED_FUNCTION_WRAPPER(endure_multi, LCBTRACE_OP_REQUEST_ENCODING, Bucket) } -#define DECLFUNC(name, operation, mode) \ - PyObject *pycbc_Bucket_##name(pycbc_Bucket *self, \ - PyObject *args, PyObject *kwargs) { \ - PyObject* result;\ - PYCBC_TRACE_WRAP_TOPLEVEL(result,#operation,keyop_common, self->tracer, self, args, kwargs, operation, mode); \ - return result;\ -} +#define DECLFUNC(name, operation, mode) \ + PyObject *pycbc_Bucket_##name( \ + pycbc_Bucket *self, PyObject *args, PyObject *kwargs) \ + { \ + PyObject *result; \ + PYCBC_TRACE_WRAP_TOPLEVEL(result, \ + "Bucket." #name, \ + keyop_common, \ + self->tracer, \ + self, \ + args, \ + kwargs, \ + operation, \ + mode); \ + return result; \ + } DECLFUNC(remove, PYCBC_CMD_DELETE, PYCBC_ARGOPT_SINGLE) DECLFUNC(unlock, PYCBC_CMD_UNLOCK, PYCBC_ARGOPT_SINGLE) @@ -340,13 +349,11 @@ TRACED_FUNCTION_WRAPPER(_stats,LCBTRACE_OP_REQUEST_ENCODING,Bucket) if (is_keystats && PyObject_IsTrue(is_keystats)) { cmd.cmdflags |= LCB_CMDSTATS_F_KV; } - PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); err = lcb_stats3(self->instance, cv.mres, &cmd); Py_XDECREF(newkey); } } else { - PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); err = lcb_stats3(self->instance, cv.mres, &cmd); } @@ -381,7 +388,7 @@ TRACED_FUNCTION_WRAPPER(_ping,LCBTRACE_OP_REQUEST_ENCODING,Bucket) if (rv < 0) { return NULL; } - PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); + lcb_sched_enter(self->instance); err = lcb_ping3(self->instance, cv.mres, &cmd); @@ -415,7 +422,6 @@ TRACED_FUNCTION_WRAPPER(_diagnostics,LCBTRACE_OP_REQUEST_ENCODING,Bucket) return NULL; } - PYCBC_TRACECMD(cmd, context, cv.mres, PYCBC_DEFAULT_TRACING_KEY, self); lcb_sched_enter(self->instance); PYCBC_CONN_THR_BEGIN(self); err = lcb_diag(self->instance, cv.mres, &cmd); diff --git a/src/multiresult.c b/src/multiresult.c index c81e35ac2..a0c0d068c 100644 --- a/src/multiresult.c +++ b/src/multiresult.c @@ -357,12 +357,8 @@ pycbc_multiresult_get_result(pycbc_MultiResult *self) Py_INCREF(res); return res; } - PYCBC_DEBUG_LOG("\n hit multiresult_get_result:["); - pycbc_print_repr((PyObject*)self); - PYCBC_DEBUG_LOG("]\n"); - PYCBC_DEBUG_LOG("\ngot results:["); - pycbc_print_repr(mres); - PYCBC_DEBUG_LOG("]\n"); + PYCBC_DEBUG_PYFORMAT("hit multiresult_get_result:[%R]", self); + PYCBC_DEBUG_PYFORMAT("got results:[%R]", mres); rv = PyDict_Next(mres, &dictpos, &key, &value); if (!rv) { PYCBC_EXC_WRAP(PYCBC_EXC_INTERNAL, 0, "No objects in MultiResult"); diff --git a/src/n1ql.c b/src/n1ql.c index f93132968..5b47f1da8 100644 --- a/src/n1ql.c +++ b/src/n1ql.c @@ -37,15 +37,22 @@ n1ql_row_callback(lcb_t instance, int ign, const lcb_RESPN1QL *resp) } } -static PyObject * -query_common(pycbc_Bucket *self, const char *params, unsigned nparams, - const char *host, int is_prepared, int is_xbucket) { +TRACED_FUNCTION(LCBTRACE_OP_REQUEST_ENCODING, + static, + PyObject *, + query_common, + pycbc_Bucket *self, + const char *params, + unsigned nparams, + const char *host, + int is_prepared, + int is_xbucket) +{ PyObject *ret = NULL; pycbc_MultiResult *mres; pycbc_ViewResult *vres; lcb_error_t rc; lcb_CMDN1QL cmd = { 0 }; - if (-1 == pycbc_oputil_conn_lock(self)) { return NULL; } @@ -56,23 +63,7 @@ query_common(pycbc_Bucket *self, const char *params, unsigned nparams, } mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); - { - PyObject* kwargs = pycbc_DummyKeywords; -#ifdef PYCBC_TRACING - if (self->tracer) { - kwargs= PyDict_New(); - PyDict_SetItemString(kwargs, "tracer", (PyObject*)self->tracer); - } -#endif - vres = (pycbc_ViewResult *) PyObject_CallFunction((PyObject*)&pycbc_ViewResultType, "OO", Py_None, kwargs); - if (!vres) - { - PYCBC_DEBUG_LOG("null vres"); - } - - PYCBC_EXCEPTION_LOG_NOCLEAR; - PYCBC_DEBUG_LOG("got vres: %p", vres); - } + vres = pycbc_propagate_view_result(context); pycbc_httpresult_init(&vres->base, mres); vres->rows = PyList_New(0); vres->base.format = PYCBC_FMT_JSON; @@ -97,7 +88,8 @@ query_common(pycbc_Bucket *self, const char *params, unsigned nparams, cmd.host = host; } - rc = lcb_n1ql_query(self->instance, mres, &cmd); + PYCBC_TRACECMD_SCOPED( + rc, n1ql, query, self->instance, *cmd.handle, context, mres, &cmd); if (rc != LCB_SUCCESS) { PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, rc, "Couldn't schedule n1ql query"); @@ -113,13 +105,13 @@ query_common(pycbc_Bucket *self, const char *params, unsigned nparams, return ret; } - PyObject * pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) { const char *params = NULL; pycbc_strlen_t nparams = 0; int prepared = 0, cross_bucket = 0; + PyObject *result = NULL; static char *kwlist[] = { "params", "prepare", "cross_bucket", NULL }; if (!PyArg_ParseTupleAndKeywords( args, kwargs, "s#|ii", kwlist, ¶ms, @@ -128,7 +120,17 @@ pycbc_Bucket__n1ql_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) PYCBC_EXCTHROW_ARGS(); return NULL; } - return query_common(self, params, nparams, NULL, prepared, cross_bucket); + PYCBC_TRACE_WRAP_TOPLEVEL(result, + LCBTRACE_OP_REQUEST_ENCODING, + query_common, + self->tracer, + self, + params, + nparams, + NULL, + prepared, + cross_bucket); + return result; } PyObject * @@ -138,10 +140,21 @@ pycbc_Bucket__cbas_query(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) const char *params = NULL; pycbc_strlen_t nparams = 0; static char *kwlist[] = { "params", "host", NULL }; + PyObject *result = NULL; if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#s", kwlist, ¶ms, &nparams, &host)) { PYCBC_EXCTHROW_ARGS(); return NULL; } - return query_common(self, params, nparams, host, 0, 0); + PYCBC_TRACE_WRAP_TOPLEVEL(result, + LCBTRACE_OP_REQUEST_ENCODING, + query_common, + self->tracer, + self, + params, + nparams, + host, + 0, + 0); + return result; } diff --git a/src/oputil.c b/src/oputil.c index 6e3e1965b..0b88c994e 100644 --- a/src/oputil.c +++ b/src/oputil.c @@ -62,7 +62,7 @@ pycbc_common_vars_wait, struct pycbc_common_vars *cv, pycbc_Bucket *self) Py_INCREF(Py_None); return 0; } - PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, NULL, self); + PYCBC_TRACE_WRAP_VOID(pycbc_oputil_wait_common, NULL, self); if (!pycbc_assert(self->nremaining == 0)) { fprintf(stderr, "Remaining count != 0. Adjusting"); @@ -80,7 +80,6 @@ pycbc_common_vars_wait, struct pycbc_common_vars *cv, pycbc_Bucket *self) if (cv->ret == NULL) { return -1; } - return 0; } @@ -397,7 +396,18 @@ pycbc_oputil_iter_multi(pycbc_Bucket *self, } #ifdef PYCBC_TRACING - rv = PYCBC_TRACE_WRAP_EXPLICIT_NAMED((handler).cb, (handler).name, (handler).category, NULL, self, cv, optype, arg_k, v, options, itm, arg); + rv = PYCBC_TRACE_WRAP_EXPLICIT_NAMED((handler).cb, + (handler).name, + (handler).category, + NULL, + self, + cv, + optype, + arg_k, + v, + options, + itm, + arg); #else rv = PYCBC_TRACE_WRAP_EXPLICIT_NAMED(handler, "", "", NULL, self, cv, optype, arg_k, v, options, itm, arg); #endif diff --git a/src/pipeline.c b/src/pipeline.c index 0cce2c129..76a38a347 100644 --- a/src/pipeline.c +++ b/src/pipeline.c @@ -39,8 +39,13 @@ pycbc_Bucket__start_pipeline(pycbc_Bucket *self) PyObject * pycbc_Bucket__end_pipeline(pycbc_Bucket *self) { - pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(NULL, LCBTRACE_OP_RESPONSE_DECODING, - self->tracer, "bucket.end_pipeline"); +#ifdef PYCBC_TRACING + pycbc_stack_context_handle context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL( + NULL, + LCBTRACE_OP_RESPONSE_DECODING, + self->tracer, + "bucket.end_pipeline"); +#endif PyObject *rv; int ii; @@ -56,7 +61,7 @@ pycbc_Bucket__end_pipeline(pycbc_Bucket *self) goto GT_DONE; } - PYCBC_TRACE_WRAP(pycbc_oputil_wait_common, NULL, self); + PYCBC_TRACE_WRAP_VOID(pycbc_oputil_wait_common, NULL, self); pycbc_assert(self->nremaining == 0); diff --git a/src/pycbc.h b/src/pycbc.h index 4e01277d4..42f0dfd80 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -19,10 +19,20 @@ * This file contains the base header for the Python Couchbase Client * @author Mark Nunberg */ + #ifdef PYCBC_DEBUG #define PYCBC_DEBUG_LOG_RAW(...) printf(__VA_ARGS__); +void pycbc_log_pyformat(const char *file, int line, const char *format, ...); +void pycbc_exception_log(const char *file, int line, int clear); +#define PYCBC_DEBUG_PYFORMAT(FORMAT, ...) \ + pycbc_log_pyformat(__FILE__, __LINE__, FORMAT, __VA_ARGS__, NULL) +#define PYCBC_EXCEPTION_LOG_NOCLEAR pycbc_exception_log(__FILE__, __LINE__, 0); +#define PYCBC_EXCEPTION_LOG pycbc_exception_log(__FILE__, __LINE__, 1); #else #define PYCBC_DEBUG_LOG_RAW(...) +#define PYCBC_DEBUG_PYFORMAT(...) +#define PYCBC_EXCEPTION_LOG_NOCLEAR +#define PYCBC_EXCEPTION_LOG PyErr_Clear(); #endif #define PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(FILE,LINE,POSTFIX,...)\ @@ -30,17 +40,37 @@ PYCBC_DEBUG_LOG_RAW(__VA_ARGS__)\ PYCBC_DEBUG_LOG_RAW(POSTFIX) #define PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(FILE,LINE,...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(FILE,LINE,"\n", __VA_ARGS__) -#define PYCBC_DEBUG_LOG_WITHOUT_NEWLINE(...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(__FILE__,__LINE__, "", __VA_ARGS__) #define PYCBC_DEBUG_LOG(...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(__FILE__,__LINE__,__VA_ARGS__) #ifdef PYCBC_DEBUG -#define PYCBC_DECREF(X) pycbc_assert(Py_REFCNT(X)>0);PYCBC_DEBUG_LOG("%p has count of %li", X, Py_REFCNT(X)); Py_DECREF(X); -#define PYCBC_XDECREF(X) pycbc_assert(!X || Py_REFCNT(X)>0);PYCBC_DEBUG_LOG("%p has count of %li", X, X?Py_REFCNT(X):0); Py_XDECREF(X) +#define LOG_REFOP(Y, OP) \ + { \ + pycbc_assert((Y) && Py_REFCNT(Y) > 0); \ + PYCBC_DEBUG_PYFORMAT("%p has count of %ld: *** %R ***: OP: %s", \ + Y, \ + (long)(((PyObject *)(Y)) ? Py_REFCNT(Y) : 0), \ + ((PyObject *)(Y) ? (PyObject *)(Y) : Py_None), \ + #OP); \ + Py_##OP((PyObject *)Y); \ + } +#define LOG_REFOPX(Y, OP) \ + { \ + pycbc_assert(!(Y) || Py_REFCNT(Y) > 0); \ + PYCBC_DEBUG_PYFORMAT("%p has count of %ld: *** %R ***: OP: %s", \ + Y, \ + (long)(((PyObject *)(Y)) ? Py_REFCNT(Y) : 0), \ + ((PyObject *)(Y) ? (PyObject *)(Y) : Py_None), \ + #OP); \ + Py_##X##OP((PyObject *)(Y)); \ + } #else -#define PYCBC_DECREF(X) Py_DECREF(X); -#define PYCBC_XDECREF(X) Py_XDECREF(X); +#define LOG_REFOP(Y, OP) Py_##OP(Y) +#define LOG_REFOPX(Y, OP) Py_X##OP(Y) #endif - +#define PYCBC_DECREF(X) LOG_REFOP(X, DECREF) +#define PYCBC_XDECREF(X) LOG_REFOPX(X, DECREF) +#define PYCBC_INCREF(X) LOG_REFOP(X, INCREF) +#define PYCBC_XINCREF(X) LOG_REFOPX(X, INCREF) #include <Python.h> #include <libcouchbase/couchbase.h> @@ -50,13 +80,6 @@ #include <libcouchbase/cbft.h> #include <libcouchbase/ixmgmt.h> -#undef PYCBC_DEBUG -#ifdef PYCBC_DEBUG -#define PYCBC_DEBUG_LOG_RAW(...) printf(__VA_ARGS__); -#else -#define PYCBC_DEBUG_LOG_RAW(...) -#endif - // TODO: fix in libcouchbase #ifdef _WIN32 //#if _MSC_VER >= 1600 @@ -81,6 +104,21 @@ #define PYCBC_REFCNT_ASSERT pycbc_assert +void pycbc_fetch_error(PyObject *err[3]); +void pycbc_store_error(PyObject *err[3]); + +#define PYCBC_STASH_EXCEPTION(OP) \ + { \ + PyObject *pycbc_err[3] = {0}; \ + pycbc_store_error(pycbc_err); \ + { \ + OP; \ + } \ + if (pycbc_err[0] || pycbc_err[1] || pycbc_err[2]) { \ + pycbc_fetch_error(pycbc_err); \ + } \ + }; + /** * See http://docs.python.org/2/c-api/arg.html for an explanation of this * definition. @@ -324,19 +362,13 @@ typedef struct { } pycbc_dur_params; void pycbc_dict_add_text_kv(PyObject *dict, const char *key, const char *value); -void pycbc_print_string( PyObject *curkey); -void pycbc_print_repr( PyObject *pobj); -void pycbc_exception_log(const char* file, int line, int clear); -#ifdef PYCBC_DEBUG -#define PYCBC_EXCEPTION_LOG_NOCLEAR pycbc_exception_log(__FILE__,__LINE__,0); -#define PYCBC_EXCEPTION_LOG pycbc_exception_log(__FILE__,__LINE__,1); -#else -#define PYCBC_EXCEPTION_LOG_NOCLEAR -#define PYCBC_EXCEPTION_LOG PyErr_Clear(); -#endif struct pycbc_Tracer; +#ifndef PYCBC_TRACING_ENABLE +#define PYCBC_TRACING_ENABLE +#endif + #ifdef LCB_TRACING #ifdef PYCBC_TRACING_ENABLE #define PYCBC_TRACING @@ -421,10 +453,6 @@ typedef struct { typedef struct pycbc_Tracer { PyObject_HEAD lcbtrace_TRACER *tracer; - PyObject* parent; - lcb_t *instance; - - int init_called:1; } pycbc_Tracer_t; static PyTypeObject SpanType = { @@ -439,16 +467,22 @@ typedef struct { #endif } pycbc_Span_t; +typedef struct pycbc_context_children { + struct pycbc_stack_context *value; + struct pycbc_context_children *next; +} pycbc_context_children; -typedef struct -{ +typedef struct pycbc_stack_context { #ifdef PYCBC_TRACING pycbc_Tracer_t* tracer; lcbtrace_SPAN* span; + struct pycbc_stack_context *parent; + size_t ref_count; + pycbc_context_children *children; #endif } pycbc_stack_context; -typedef pycbc_stack_context* pycbc_stack_context_handle; +typedef struct pycbc_stack_context *pycbc_stack_context_handle; #else typedef void* pycbc_stack_context_handle; @@ -456,75 +490,160 @@ typedef void* pycbc_stack_context_handle; #endif #ifdef PYCBC_TRACING - int pycbc_is_async_or_pipeline(const pycbc_Bucket *self); +typedef struct pycbc_Result pycbc_Result; +typedef struct pycbc_MultiResult_st pycbc_MultiResult; + +pycbc_stack_context_handle pycbc_Result_start_context( + pycbc_stack_context *parent_context, + PyObject *hkey, + char *component, + char *operation); +void pycbc_Result_propagate_context(pycbc_Result *res, + pycbc_stack_context_handle parent_context); +void pycbc_MultiResult_init_context(pycbc_MultiResult *self, + PyObject *curkey, + pycbc_stack_context_handle context, + pycbc_Bucket *bucket); +pycbc_stack_context_handle pycbc_MultiResult_extract_context( + pycbc_MultiResult *self, PyObject *hkey, pycbc_Result **res); +#define PYCBC_MULTIRESULT_EXTRACT_CONTEXT(MRES, KEY, RES) \ + pycbc_MultiResult_extract_context(MRES, KEY, RES) +pycbc_stack_context_handle pycbc_Result_extract_context( + const pycbc_Result *res); +#define PYCBC_RESULT_EXTRACT_CONTEXT(RESULT) \ + pycbc_Result_extract_context(RESULT) + +pycbc_stack_context_handle pycbc_Context_init( + pycbc_Tracer_t *py_tracer, + const char *operation, + lcb_uint64_t now, + pycbc_stack_context_handle ref_context, + lcbtrace_REF_TYPE ref_type, + const char *component); +pycbc_stack_context_handle pycbc_Context_check( + pycbc_stack_context_handle CONTEXT, const char *file, int line); +#define PYCBC_CHECK_CONTEXT(CONTEXT) \ + pycbc_Context_check(CONTEXT, __FILE__, __LINE__) +pycbc_stack_context_handle pycbc_Context_deref( + pycbc_stack_context_handle context, int should_be_final); +pycbc_stack_context_handle pycbc_Context_deref_debug( + pycbc_stack_context_handle context, + int should_be_final, + const char *file, + int line); +#define PYCBC_CONTEXT_DEREF(CONTEXT, SHOULD_BE_FINAL) \ + pycbc_Context_deref_debug(CONTEXT, SHOULD_BE_FINAL, __FILE__, __LINE__); + +pycbc_stack_context_handle pycbc_Tracer_start_span( + pycbc_Tracer_t *py_tracer, + PyObject *kwargs, + const char *operation, + lcb_uint64_t now, + pycbc_stack_context_handle context, + lcbtrace_REF_TYPE ref_type, + const char *component); -pycbc_stack_context_handle pycbc_Tracer_span_start(pycbc_Tracer_t *tracer, PyObject *kwargs, const char *operation, - lcb_uint64_t now, pycbc_stack_context_handle context, - lcbtrace_REF_TYPE ref_type, const char* component); -PyObject* pycbc_Context_finish(pycbc_stack_context_handle context ); void pycbc_Tracer_propagate(pycbc_Tracer_t *tracer); - - -void pycbc_init_traced_result(pycbc_Bucket *self, PyObject* mres_dict, PyObject *curkey, - pycbc_stack_context_handle context); - -#define PYCBC_GET_STACK_CONTEXT(KWARGS,CATEGORY,TRACER, PARENT_CONTEXT) pycbc_Tracer_span_start(TRACER, KWARGS, CATEGORY, 0, PARENT_CONTEXT, LCBTRACE_REF_CHILD_OF ) -#define PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(KWARGS,CATEGORY,TRACER, NAME) pycbc_Tracer_span_start(TRACER, KWARGS, CATEGORY, 0, NULL, LCBTRACE_REF_NONE, NAME ) - -extern PyObject* pycbc_default_key; -#define PYCBC_DEFAULT_TRACING_KEY pycbc_default_key - -pycbc_stack_context_handle pycbc_check_context(pycbc_stack_context_handle CONTEXT, const char* file, int line); - - -#define PYCBC_CHECK_CONTEXT(CONTEXT) pycbc_check_context(CONTEXT,__FILE__,__LINE__) -#define PYCBC_TRACECMD_PURE(CMD,CONTEXT) {\ - if (PYCBC_CHECK_CONTEXT(CONTEXT))\ - { \ - PYCBC_DEBUG_LOG("setting trace span on %.*s\n",\ - (int)(CMD).key.contig.nbytes,(const char*)(CMD).key.contig.bytes);\ - LCB_CMD_SET_TRACESPAN(&(CMD),(CONTEXT)->span);\ - } else {PYCBC_EXCEPTION_LOG_NOCLEAR;}\ -}; - -#define PYCBC_TRACECMD(CMD,CONTEXT,MRES,CURKEY,BUCKET) PYCBC_TRACECMD_PURE(CMD,CONTEXT); \ - pycbc_init_traced_result(BUCKET, pycbc_multiresult_dict(MRES), CURKEY, context); - -#define PYCBC_TRACE_POP_CONTEXT(CONTEXT) pycbc_Context_finish(CONTEXT); - - -#define PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(RV, CATEGORY, NAME, TRACER, STRINGNAME, ...) \ -{\ - int should_trace = 1 || !pycbc_is_async_or_pipeline(self);\ - pycbc_stack_context_handle sub_context = NULL;\ - if (should_trace) { sub_context = PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(kwargs, CATEGORY, TRACER, STRINGNAME); };\ - RV = NAME(__VA_ARGS__, sub_context);\ - if ( 0 && should_trace && sub_context && !pycbc_is_async_or_pipeline(self)) {\ - pycbc_Context_finish(sub_context);\ - }\ - PYCBC_EXCEPTION_LOG_NOCLEAR;\ -}; - - - -#define PYCBC_TRACE_WRAP_EXPLICIT_NAMED(NAME,COMPONENTNAME,CATEGORY,KWARGS,...) NAME(__VA_ARGS__, pycbc_Tracer_span_start(self->tracer,KWARGS,CATEGORY,0, context, LCBTRACE_REF_CHILD_OF, COMPONENTNAME)) -#define PYCBC_TRACE_WRAP(NAME,KWARGS,...) PYCBC_TRACE_WRAP_EXPLICIT_NAMED(NAME, #NAME, NAME##_category(), KWARGS, __VA_ARGS__) +void pycbc_Tracer_set_child(pycbc_Tracer_t *pTracer, lcbtrace_TRACER *pTRACER); + +#define PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(KWARGS, CATEGORY, TRACER, NAME) \ + pycbc_Tracer_start_span( \ + TRACER, KWARGS, CATEGORY, 0, NULL, LCBTRACE_REF_NONE, NAME) + +extern PyObject *pycbc_default_key; + +#define PYCBC_TRACECMD_PURE(CMD, CONTEXT) \ + { \ + if (PYCBC_CHECK_CONTEXT(CONTEXT)) { \ + PYCBC_DEBUG_LOG("setting trace span on %.*s\n", \ + (int)(CMD).key.contig.nbytes, \ + (const char *)(CMD).key.contig.bytes); \ + ++(CONTEXT)->ref_count; \ + LCB_CMD_SET_TRACESPAN(&(CMD), (CONTEXT)->span); \ + } else { \ + PYCBC_EXCEPTION_LOG_NOCLEAR; \ + } \ + }; + +#define PYCBC_TRACECMD(CMD, CONTEXT, MRES, CURKEY, BUCKET) \ + PYCBC_TRACECMD_PURE(CMD, CONTEXT); \ + pycbc_MultiResult_init_context(MRES, CURKEY, CONTEXT, BUCKET); + +#define PYCBC_TRACE_POP_CONTEXT(context) PYCBC_CONTEXT_DEREF(context, 1); +#define PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME( \ + RV, CATEGORY, NAME, TRACER, STRINGNAME, ...) \ + { \ + pycbc_stack_context_handle sub_context = \ + PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL( \ + kwargs, CATEGORY, TRACER, STRINGNAME); \ + RV = NAME(__VA_ARGS__, sub_context); \ + PYCBC_CONTEXT_DEREF(sub_context, !pycbc_is_async_or_pipeline(self)); \ + PYCBC_EXCEPTION_LOG_NOCLEAR; \ + }; + +int pycbc_wrap_and_pop(pycbc_stack_context_handle *context, int result); + +#define PYCBC_TRACE_WRAP_EXPLICIT_NAMED( \ + NAME, COMPONENTNAME, CATEGORY, KWARGS, ...) \ + pycbc_wrap_and_pop( \ + &context, \ + NAME(__VA_ARGS__, \ + context = pycbc_Tracer_start_span(self->tracer, \ + KWARGS, \ + CATEGORY, \ + 0, \ + context, \ + LCBTRACE_REF_CHILD_OF, \ + COMPONENTNAME))) +#define PYCBC_TRACE_WRAP(NAME, KWARGS, ...) \ + PYCBC_TRACE_WRAP_EXPLICIT_NAMED( \ + NAME, #NAME, NAME##_category(), KWARGS, __VA_ARGS__) + +#define PYCBC_TRACE_WRAP_EXPLICIT_NAMED_VOID( \ + NAME, COMPONENTNAME, CATEGORY, KWARGS, ...) \ + { \ + NAME(__VA_ARGS__, \ + context = pycbc_Tracer_start_span(self->tracer, \ + KWARGS, \ + CATEGORY, \ + 0, \ + context, \ + LCBTRACE_REF_CHILD_OF, \ + COMPONENTNAME)); \ + pycbc_wrap_and_pop(&context, 0); \ + } +#define PYCBC_TRACE_WRAP_VOID(NAME, KWARGS, ...) \ + PYCBC_TRACE_WRAP_EXPLICIT_NAMED_VOID( \ + NAME, #NAME, NAME##_category(), KWARGS, __VA_ARGS__) #define PYCBC_TRACE_WRAP_TOPLEVEL(RV, CATEGORY, NAME, TRACER, ...)\ PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(RV, CATEGORY, NAME, TRACER, #NAME, __VA_ARGS__); +#define PYCBC_TRACECMD_SCOPED( \ + RV, SCOPE, COMMAND, INSTANCE, HANDLE, CONTEXT, ...) \ + if (PYCBC_CHECK_CONTEXT(CONTEXT)) { \ + lcb_##SCOPE##_set_parent_span(INSTANCE, HANDLE, CONTEXT->span); \ + } \ + RV = lcb_##SCOPE##_##COMMAND(INSTANCE, __VA_ARGS__) #else +#define PYCBC_CONTEXT_DEREF(X, Y) #define PYCBC_GET_STACK_CONTEXT(CATEGORY,TRACER, PARENT_CONTEXT) NULL +#define PYCBC_RESULT_EXTRACT_CONTEXT(RESULT) NULL +#define PYCBC_MULTIRESULT_EXTRACT_CONTEXT(MRES, KEY, RES) NULL #define PYCBC_TRACECMD(...) #define PYCBC_TRACECMD_PURE(...) -#define PYCBC_TRACE_POP_CONTEXT(X) +#define PYCBC_TRACE_POP_CONTEXT(X) NULL +#define PYCBC_FINISH_IF_COMPLETE(SELF, CONTEXT) #define PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(RV, CATEGORY, NAME, TRACER, STRINGNAME, ...) { RV = NAME(__VA_ARGS__, NULL); } #define PYCBC_TRACE_WRAP_EXPLICIT_NAMED(NAME,COMPONENTNAME,CATEGORY,KWARGS,...) NAME(__VA_ARGS__, NULL) #define PYCBC_TRACE_WRAP_TOPLEVEL(RV,CATEGORY,NAME,TRACER,...) { RV=NAME(__VA_ARGS__,NULL); } #define PYCBC_TRACE_WRAP(NAME,KWARGS,...) NAME(__VA_ARGS__, NULL) #define PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(...) NULL +#define PYCBC_TRACECMD_SCOPED( \ + RV, SCOPE, COMMAND, INSTANCE, HANDLE, CONTEXT, ...) \ + RV = lcb_##SCOPE##_##COMMAND(INSTANCE, __VA_ARGS__) #endif @@ -537,19 +656,31 @@ pycbc_stack_context_handle pycbc_check_context(pycbc_stack_context_handle CONTEX const char* NAME##_category(void);\ QUALIFIERS RTYPE NAME(__VA_ARGS__, pycbc_stack_context_handle context); -#define TRACED_FUNCTION_WRAPPER(name, CATEGORY, CLASS) \ -PyObject *pycbc_##CLASS##_##name##_real(pycbc_##CLASS *self, PyObject *args, PyObject *kwargs,\ - pycbc_stack_context_handle context);\ -PyObject *pycbc_##CLASS##_##name(pycbc_##CLASS *self, \ - PyObject *args, PyObject *kwargs) {\ - PyObject* result;\ - PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(result, CATEGORY, pycbc_##CLASS##_##name##_real, self->tracer, #CLASS "." #name, self, args, kwargs);\ - return result;\ -}\ -PyObject *pycbc_##CLASS##_##name##_real(pycbc_##CLASS *self, PyObject *args, PyObject *kwargs,\ - pycbc_stack_context_handle context) - - +#define TRACED_FUNCTION_WRAPPER(name, CATEGORY, CLASS) \ + PyObject *pycbc_##CLASS##_##name##_real( \ + pycbc_##CLASS *self, \ + PyObject *args, \ + PyObject *kwargs, \ + pycbc_stack_context_handle context); \ + PyObject *pycbc_##CLASS##_##name( \ + pycbc_##CLASS *self, PyObject *args, PyObject *kwargs) \ + { \ + PyObject *result; \ + PYCBC_TRACE_WRAP_TOPLEVEL_WITHNAME(result, \ + CATEGORY, \ + pycbc_##CLASS##_##name##_real, \ + self->tracer, \ + #CLASS "." #name, \ + self, \ + args, \ + kwargs); \ + return result; \ + } \ + PyObject *pycbc_##CLASS##_##name##_real( \ + pycbc_##CLASS *self, \ + PyObject *args, \ + PyObject *kwargs, \ + pycbc_stack_context_handle context) /***************** * Result Objects. @@ -562,10 +693,9 @@ PyObject *pycbc_##CLASS##_##name##_real(pycbc_##CLASS *self, PyObject *args, PyO */ #ifdef PYCBC_TRACING -#define TRACING_DATA \ - pycbc_stack_context_handle tracing_context;\ - int is_tracing_stub;\ - PyObject* tracing_output; +#define TRACING_DATA \ + pycbc_stack_context_handle tracing_context; \ + char is_tracing_stub; #else #define TRACING_DATA #endif @@ -581,7 +711,7 @@ PyObject *pycbc_##CLASS##_##name##_real(pycbc_##CLASS *self, PyObject *args, PyO lcb_uint64_t cas; \ PyObject *mutinfo; -typedef struct { +typedef struct pycbc_Result { pycbc_Result_HEAD } pycbc_Result; @@ -648,8 +778,7 @@ typedef struct { long rows_per_call; char has_parse_error; #ifdef PYCBC_TRACING - pycbc_Tracer_t* py_tracer; - int own_tracer; + PyObject *context_capsule; #endif } pycbc_ViewResult; @@ -1020,7 +1149,7 @@ PYCBC_TRACING_TYPES(PYCBC_TYPE_INIT_DECL); /** * Calls the type's constructor with no arguments: */ -//#define PYCBC_TYPE_CTOR(t,...) PyObject_CallFunction((PyObject*)t, NULL, NULL) + #define PYCBC_TYPE_CTOR_1_args(t) PyObject_CallFunction((PyObject*)t, 0) #define PYCBC_TYPE_CTOR_2_args(t, args) PyObject_CallFunction((PyObject*)t, "O", args) #define PYCBC_TYPE_CTOR_3_args(t, args, kwargs) PyObject_CallFunction((PyObject*)t, "OO", args, kwargs) @@ -1056,6 +1185,8 @@ pycbc_ObserveInfo * pycbc_observeinfo_new(pycbc_Bucket *parent); */ int pycbc_httpresult_ok(pycbc_HttpResult *self); +pycbc_ViewResult *pycbc_propagate_view_result( + pycbc_stack_context_handle context); /** * Append data to the HTTP result diff --git a/src/result.c b/src/result.c index 65ded1ea7..c19deb953 100644 --- a/src/result.c +++ b/src/result.c @@ -65,10 +65,22 @@ static struct PyMemberDef Result_TABLE_members[] = { READONLY, PyDoc_STR("Key for the operation") }, - +#ifdef PYCBC_TRACING + { "is_tracing_stub", T_BOOL, offsetof(pycbc_Result, is_tracing_stub), + READONLY, + PyDoc_STR("Whether this is a tracing stub") + }, +#endif { NULL } }; +#ifdef PYCBC_TRACING +PyObject *Result_tracing_context(pycbc_Result *self, void *closure) +{ + return PyLong_FromVoidPtr((void *)self->tracing_context); +} +#endif + static struct PyGetSetDef Result_TABLE_getset[] = { { "success", (getter)Result_success, @@ -87,6 +99,11 @@ static struct PyGetSetDef Result_TABLE_getset[] = { { "cas", (getter)Result_int0, NULL, NULL }, +#ifdef PYCBC_TRACING + { "tracing_context", (getter)Result_tracing_context, + NULL, NULL + }, +#endif { NULL } }; @@ -115,8 +132,8 @@ static int pycbc_Result_init(PyObject *self_raw, PyObject *args, PyObject *kwargs) { - pycbc_Result* self = (pycbc_Result*) self_raw; #ifdef PYCBC_TRACING + pycbc_Result* self = (pycbc_Result*)self_raw; self->is_tracing_stub = 0; self->tracing_context = NULL; #endif diff --git a/src/views.c b/src/views.c index 9db17ad64..333abc233 100644 --- a/src/views.c +++ b/src/views.c @@ -218,8 +218,18 @@ get_viewpath_str(pycbc_Bucket *self, viewpath_st *vp, PyObject *options) return 0; } -PyObject * -pycbc_Bucket__view_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) +void *pycbc_viewresult_get_context(const pycbc_ViewResult *self) +{ +#ifdef PYCBC_TRACING + return self->context_capsule ? PyCapsule_GetPointer(self->context_capsule, + "tracing_context") + : NULL; +#else + return NULL; +#endif +} + +TRACED_FUNCTION_WRAPPER(_view_request, LCBTRACE_OP_REQUEST_ENCODING, Bucket) { int rv; PyObject *ret = NULL; @@ -252,7 +262,8 @@ pycbc_Bucket__view_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) } mres = (pycbc_MultiResult *)pycbc_multiresult_new(self); - vres = (pycbc_ViewResult *)PYCBC_TYPE_CTOR(&pycbc_ViewResultType); + vres = pycbc_propagate_view_result( + context); vres->base.htype = PYCBC_HTTP_HVIEW; pycbc_httpresult_init(&vres->base, mres); @@ -276,9 +287,14 @@ pycbc_Bucket__view_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) vres->rows = PyList_New(0); vres->base.format = PYCBC_FMT_JSON; - - rc = lcb_view_query(self->instance, mres, &vcmd); - + PYCBC_TRACECMD_SCOPED(rc, + view, + query, + self->instance, + *vcmd.handle, + context, + mres, + &vcmd); if (rc != LCB_SUCCESS) { PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, rc, "Couldn't schedule view"); goto GT_DONE; @@ -286,7 +302,6 @@ pycbc_Bucket__view_request(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) ret = (PyObject*)mres; mres = NULL; /* Avoid GT_DONE decref */ - GT_DONE: Py_XDECREF(mres); Py_XDECREF(vp.bk); @@ -320,7 +335,10 @@ ViewResult_fetch(pycbc_ViewResult *self, PyObject *args) } if (!self->base.done) { - pycbc_oputil_wait_common(bucket, PYCBC_TRACE_GET_STACK_CONTEXT_TOPLEVEL(NULL, LCBTRACE_OP_RESPONSE_DECODING, self->py_tracer, "viewresult.fetch")); + pycbc_oputil_wait_common( + bucket, + pycbc_viewresult_get_context( + self)); } if (pycbc_multiresult_maybe_raise(mres)) { @@ -334,17 +352,45 @@ ViewResult_fetch(pycbc_ViewResult *self, PyObject *args) pycbc_oputil_conn_unlock(bucket); return ret; } + +pycbc_ViewResult *pycbc_propagate_view_result( + pycbc_stack_context_handle context) +{ + pycbc_ViewResult *vres; + PyObject *kwargs = pycbc_DummyKeywords; +#ifdef PYCBC_TRACING + if (PYCBC_CHECK_CONTEXT(context)) { + kwargs = PyDict_New(); + ++context->ref_count; + PyDict_SetItemString(kwargs, + "context", + PyCapsule_New(context, "tracing_context", NULL)); + } +#endif + vres = (pycbc_ViewResult *)PyObject_CallFunction( + (PyObject *)&pycbc_ViewResultType, "OO", Py_None, kwargs); + if (!vres) { + PYCBC_DEBUG_LOG("null vres"); + } + + PYCBC_EXCEPTION_LOG_NOCLEAR; + PYCBC_DEBUG_LOG("got vres: %p", vres); + return vres; +} + static int ViewResult__init__(PyObject *self_raw, PyObject *args, PyObject *kwargs) { - pycbc_ViewResult* self = (pycbc_ViewResult*)(self_raw); #ifdef PYCBC_TRACING - PYCBC_DEBUG_LOG("in ur view making ur tracer\n"); - self->py_tracer = kwargs?(pycbc_Tracer_t*)PyDict_GetItemString(kwargs, "tracer"):NULL; - self->own_tracer = 0; - if (self->py_tracer) { - PYCBC_DEBUG_LOG("Got parent tracer %p\n", self->py_tracer); + pycbc_ViewResult *self = (pycbc_ViewResult *)self_raw; + PyObject *context_capsule = + kwargs ? PyDict_GetItemString(kwargs, "context") : NULL; + self->context_capsule = context_capsule; + if (self->context_capsule) { + PYCBC_DEBUG_LOG( + "Got parent context %p\n", + PyCapsule_GetPointer(self->context_capsule, "tracing_context")); } #endif PYCBC_EXCEPTION_LOG_NOCLEAR; @@ -356,10 +402,14 @@ ViewResult_dealloc(pycbc_ViewResult *vres) { Py_CLEAR(vres->rows); #ifdef PYCBC_TRACING - if (vres->own_tracer && vres->py_tracer) { - lcbtrace_destroy(vres->py_tracer->tracer); - PYCBC_DECREF((PyObject*)vres->py_tracer); + if (vres->context_capsule) { + pycbc_stack_context_handle handle = + (pycbc_stack_context_handle)PyCapsule_GetPointer( + vres->context_capsule, "tracing_context"); + --(handle->ref_count); } + + Py_XDECREF(vres->context_capsule); #endif Py_TYPE(vres)->tp_base->tp_dealloc((PyObject*)vres); } From ff730476d0be24009944191248f92adbddc60c77 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Thu, 17 May 2018 15:38:56 +0100 Subject: [PATCH 147/829] PYCBC-486: Document Threshold Logger settings Change-Id: I559f280b71559b7f69fdd954806e778a6050d486 Reviewed-on: http://review.couchbase.org/94358 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- couchbase/bucket.py | 173 +++++++++++++++++++++++++++++++++++++++++--- setup.py | 8 +- src/bucket.c | 1 + src/constants.c | 69 +++++++++++++++--- src/ext.c | 14 +++- src/pycbc.h | 14 +++- 6 files changed, 245 insertions(+), 34 deletions(-) diff --git a/couchbase/bucket.py b/couchbase/bucket.py index 9825a955a..73ac08096 100644 --- a/couchbase/bucket.py +++ b/couchbase/bucket.py @@ -125,6 +125,7 @@ def newfn(self, key, *args, **kwargs): class Bucket(_Base): + def __init__(self, *args, **kwargs): """Connect to a bucket. @@ -1772,17 +1773,169 @@ def redaction(self): def redaction(self, val): return self._cntl(_LCB.LCB_CNTL_LOG_REDACTION, value=val, value_type='int') - def __getattribute__(self, name): - if name in _LCB.TRACING.keys(): - return self._cntl(**_LCB.TRACING[name]) - else: - return super(Bucket, self).__getattribute__(name) + @property + def tracing_orphaned_queue_flush_interval(self): + """ + The tracing orphaned queue flush interval, in fractions of a second. - def __setattr__(self, name, value): - if name in _LCB.TRACING.keys(): - return self._cntl(value=value, **_LCB.TRACING[name]) - else: - super(Bucket, self).__setattr__(name, value) + :: + # Set tracing orphaned queue flush interval to 0.5 seconds + cb.tracing_orphaned_queue_flush_interval = 0.5 + + """ + + return self._cntl(op=_LCB.TRACING_ORPHANED_QUEUE_FLUSH_INTERVAL, value_type="timeout") + + @tracing_orphaned_queue_flush_interval.setter + def tracing_orphaned_queue_flush_interval(self, val): + return self._cntl(op=_LCB.TRACING_ORPHANED_QUEUE_FLUSH_INTERVAL, value=val, value_type="timeout") + + @property + def tracing_orphaned_queue_size(self): + """ + The tracing orphaned queue size. + + :: + # Set tracing orphaned queue size to 100 entries + cb.tracing_orphaned_queue_size = 100 + + """ + + return self._cntl(op=_LCB.TRACING_ORPHANED_QUEUE_SIZE, value_type="uint32_t") + + @tracing_orphaned_queue_size.setter + def tracing_orphaned_queue_size(self, val): + return self._cntl(op=_LCB.TRACING_ORPHANED_QUEUE_SIZE, value=val, value_type="uint32_t") + + @property + def tracing_threshold_queue_flush_interval(self): + """ + The tracing threshold queue flush interval, in fractions of a second. + + :: + # Set tracing threshold queue flush interval to 0.5 seconds + cb.tracing_threshold_queue_flush_interval = 0.5 + + """ + + return self._cntl(op=_LCB.TRACING_THRESHOLD_QUEUE_FLUSH_INTERVAL, value_type="timeout") + + @tracing_threshold_queue_flush_interval.setter + def tracing_threshold_queue_flush_interval(self, val): + return self._cntl(op=_LCB.TRACING_THRESHOLD_QUEUE_FLUSH_INTERVAL, value=val, value_type="timeout") + + @property + def tracing_threshold_queue_size(self): + """ + The tracing threshold queue size. + + :: + # Set tracing threshold queue size to 100 entries + cb.tracing_threshold_queue_size = 100 + + """ + + return self._cntl(op=_LCB.TRACING_THRESHOLD_QUEUE_SIZE, value_type="uint32_t") + + @tracing_threshold_queue_size.setter + def tracing_threshold_queue_size(self, val): + return self._cntl(op=_LCB.TRACING_THRESHOLD_QUEUE_SIZE, value=val, value_type="uint32_t") + + @property + def tracing_threshold_kv(self): + """ + The tracing threshold for KV, in fractions of a second. + + :: + # Set tracing threshold for KV to 0.5 seconds + cb.tracing_threshold_kv = 0.5 + + """ + + return self._cntl(op=_LCB.TRACING_THRESHOLD_KV, value_type="timeout") + + @tracing_threshold_kv.setter + def tracing_threshold_kv(self, val): + return self._cntl(op=_LCB.TRACING_THRESHOLD_KV, value=val, value_type="timeout") + + @property + def tracing_threshold_n1ql(self): + """ + The tracing threshold for N1QL, in fractions of a second. + + :: + # Set tracing threshold for N1QL to 0.5 seconds + cb.tracing_threshold_n1ql = 0.5 + + """ + + return self._cntl(op=_LCB.TRACING_THRESHOLD_N1QL, value_type="timeout") + + @tracing_threshold_n1ql.setter + def tracing_threshold_n1ql(self, val): + return self._cntl(op=_LCB.TRACING_THRESHOLD_N1QL, value=val, value_type="timeout") + + @property + def tracing_threshold_view(self): + """ + The tracing threshold for View, in fractions of a second. + + :: + # Set tracing threshold for View to 0.5 seconds + cb.tracing_threshold_view = 0.5 + + """ + + return self._cntl(op=_LCB.TRACING_THRESHOLD_VIEW, value_type="timeout") + + @tracing_threshold_view.setter + def tracing_threshold_view(self, val): + return self._cntl(op=_LCB.TRACING_THRESHOLD_VIEW, value=val, value_type="timeout") + + @property + def tracing_threshold_fts(self): + """ + The tracing threshold for FTS, in fractions of a second. + + :: + # Set tracing threshold for FTS to 0.5 seconds + cb.tracing_threshold_fts = 0.5 + + """ + + return self._cntl(op=_LCB.TRACING_THRESHOLD_FTS, value_type="timeout") + + @tracing_threshold_fts.setter + def tracing_threshold_fts(self, val): + return self._cntl(op=_LCB.TRACING_THRESHOLD_FTS, value=val, value_type="timeout") + + @property + def tracing_threshold_analytics(self): + """ + The tracing threshold for analytics, in fractions of a second. + + :: + # Set tracing threshold for analytics to 0.5 seconds + cb.tracing_threshold_analytics = 0.5 + + """ + + return self._cntl(op=_LCB.TRACING_THRESHOLD_ANALYTICS, value_type="timeout") + + @tracing_threshold_analytics.setter + def tracing_threshold_analytics(self, val): + return self._cntl(op=_LCB.TRACING_THRESHOLD_ANALYTICS, value=val, value_type="timeout") + + # Backward compatibility aliases, to be removed by next major revision + TRACING_ORPHANED_QUEUE_FLUSH_INTERVAL = tracing_orphaned_queue_flush_interval + TRACING_ORPHANED_QUEUE_SIZE = tracing_orphaned_queue_size + TRACING_THRESHOLD_QUEUE_FLUSH_INTERVAL = tracing_threshold_queue_flush_interval + TRACING_THRESHOLD_QUEUE_SIZE = tracing_threshold_queue_size + TRACING_THRESHOLD_KV = tracing_threshold_kv + TRACING_THRESHOLD_N1QL = tracing_threshold_n1ql + TRACING_THRESHOLD_VIEW = tracing_threshold_view + TRACING_THRESHOLD_FTS = tracing_threshold_fts + TRACING_THRESHOLD_ANALYTICS = tracing_threshold_analytics def _cntl(self, *args, **kwargs): """Low-level interface to the underlying C library's settings. via diff --git a/setup.py b/setup.py index 1bbbde88d..1f65734a6 100644 --- a/setup.py +++ b/setup.py @@ -30,11 +30,9 @@ extoptions['extra_compile_args'] = [] extoptions['extra_link_args'] = [] -if os.environ.get('PYCBC_TRACING_ENABLE'): - extoptions['extra_compile_args'] += ['-DPYCBC_TRACING_ENABLE'] -if os.environ.get('PYCBC_DEBUG'): - extoptions['extra_compile_args'] += ['-DPYCBC_DEBUG'] - +comp_flags = ["PYCBC_TRACING_ENABLE","PYCBC_DEBUG","PYCBC_CRYPTO_VERSION"] +extoptions['extra_compile_args'] += ["-D{}={}".format(flag,os.environ.get(flag)) + for flag in comp_flags if flag in os.environ.keys()] if sys.platform != 'win32': extoptions['libraries'] = ['couchbase'] if os.environ.get('PYCBC_DEBUG'): diff --git a/src/bucket.c b/src/bucket.c index 2eba389fa..d3eaf9c3c 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -224,6 +224,7 @@ Bucket_decrypt_fields(pycbc_Bucket *self, PyObject *args) cmd.nfields = pycbc_populate_fieldspec(&cmd.fields, fieldspec); res = lcbcrypto_decrypt_fields(self->instance, &cmd); #else + (void)fieldspec; res = lcbcrypto_decrypt_document(self->instance, &cmd); #endif } diff --git a/src/constants.c b/src/constants.c index f01818525..22389e569 100644 --- a/src/constants.c +++ b/src/constants.c @@ -234,28 +234,75 @@ static void setup_tracing_map(PyObject *module, X(TRACING_THRESHOLD_N1QL, convert_timevalue) DIV\ X(TRACING_THRESHOLD_VIEW, convert_timevalue) DIV\ X(TRACING_THRESHOLD_FTS, convert_timevalue) DIV\ - X(TRACING_THRESHOLD_ANALYTICS, convert_timevalue) DIV \ - X(ENABLE_TRACING, convert_intbool) + X(TRACING_THRESHOLD_ANALYTICS, convert_timevalue) + PyObject* convert_timevalue = pycbc_SimpleStringZ("timeout"); +#define convert_timevalue_desc "The %S, in fractions of a second." +#define convert_timevalue_desc_val "0.5" +#define convert_timevalue_desc_val_units " seconds" PyObject* convert_u32 = pycbc_SimpleStringZ("uint32_t"); +#define convert_u32_desc "The %S." +#define convert_u32_desc_val "100" +#define convert_u32_desc_val_units " entries" PyObject* convert_intbool = pycbc_SimpleStringZ("int"); +#define convert_intbool_desc "Whether %S is set. " +#define convert_intbool_desc_val "True" +#define convert_intbool_desc_val_units "" PyObject *result = PyDict_New(); LCB_FOR_EACH_THRESHOLD_PARAM(LCB_CNTL_CONSTANT, ;); LCB_CNTL_TRACING_THRESHOLD_KV; -#define X(NAME, VALUE) \ - { \ - PyObject *attrdict = PyDict_New(); \ - PyObject *val = PyLong_FromLong(LCB_CNTL_##NAME); \ - PyDict_SetItemString(attrdict, "op", val); \ - PyDict_SetItemString(attrdict, "value_type", VALUE); \ - PyDict_SetItemString(result, #NAME, attrdict); \ - Py_DecRef(val); \ - Py_DecRef(attrdict); \ +#define X(NAME, TYPE) \ + { \ + PyObject *attrdict = PyDict_New(); \ + PyObject *val = PyLong_FromLong(LCB_CNTL_##NAME); \ + PyDict_SetItemString(attrdict, "op", val); \ + PyDict_SetItemString(attrdict, "value_type", TYPE); \ + PyDict_SetItemString(result, #NAME, attrdict); \ + Py_DecRef(val); \ + Py_DecRef(attrdict); \ } LCB_FOR_EACH_THRESHOLD_PARAM(X, ;); +#undef X + +#define X(NAME,TYPE)\ + {\ + PyObject* as_string = pycbc_SimpleStringZ(#NAME);\ + PyObject* as_lower_case = PyObject_CallMethod(as_string, "lower","");\ + PyObject* as_words = PyObject_CallMethod(as_lower_case, "replace", "ss", "_", " ");\ + pycbc_replace_str(&as_words, "analytics", "for analytics");\ + pycbc_replace_str(&as_words, "n1ql", "for N1QL");\ + pycbc_replace_str(&as_words, "kv", "for KV");\ + pycbc_replace_str(&as_words, "fts", "for FTS");\ + pycbc_replace_str(&as_words, "view", "for View");\ + pycbc_print_pyformat(\ + "@property\n"\ + "def %S(self):\n"\ + " \"\"\"\n"\ + " " TYPE##_desc "\n"\ + "\n"\ + " ::\n"\ + " # Set %S to " TYPE##_desc_val TYPE##_desc_val_units "\n"\ + " cb.%S=" TYPE##_desc_val "\n" \ + "\n"\ + " \"\"\"\n"\ + " \n"\ + " return self._cntl(op=_LCB." #NAME ", value_type=\"%S\")\n\n", as_lower_case, as_words, as_words, as_lower_case, TYPE );\ + pycbc_print_pyformat(\ + "@%S.setter\n"\ + "def %S(self, val):\n"\ + " return self._cntl(op=_LCB." #NAME ", value=val, value_type=\"%S\")\n\n", as_lower_case, as_lower_case, TYPE);\ + Py_DecRef(as_words);\ + Py_DecRef(as_lower_case);\ + Py_DecRef(as_string);\ + } + +#ifdef PYCBC_GEN_PYTHON +LCB_FOR_EACH_THRESHOLD_PARAM(X, ; ); +#endif #undef X Py_DecRef(convert_timevalue); Py_DecRef(convert_u32); + Py_DecRef(convert_intbool); PyModule_AddObject(module, "TRACING", result); #undef LCB_FOR_EACH_THRESHOLD_PARAM } diff --git a/src/ext.c b/src/ext.c index 9d5e492ba..772a9a30c 100644 --- a/src/ext.c +++ b/src/ext.c @@ -495,8 +495,9 @@ void pycbc_exception_log(const char* file, int line, int clear) } } } +#endif -void pycbc_log_pyformat(const char *file, int line, const char *format, ...) +void pycbc_print_pyformat(const char *format, ...) { va_list v1; PyObject *type = NULL, *value = NULL, *traceback = NULL; @@ -508,8 +509,7 @@ void pycbc_log_pyformat(const char *file, int line, const char *format, ...) if (!formatted || PyErr_Occurred()) { PYCBC_EXCEPTION_LOG } else { - PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( - file, line, "%s", PYCBC_CSTR(formatted)); + fprintf(stderr, "%s", PYCBC_CSTR(formatted)); } Py_XDECREF(formatted); PyErr_Print(); @@ -518,7 +518,13 @@ void pycbc_log_pyformat(const char *file, int line, const char *format, ...) } } -#endif +PyObject* pycbc_replace_str(PyObject** string, const char* pat, const char* replace) +{ + PyObject* result = PyObject_CallMethod(*string, "replace", "ss", pat, replace); + Py_DecRef(*string); + *string = result; + return result; +} #ifdef PYCBC_TRACING pycbc_stack_context_handle pycbc_Context_deref_debug( diff --git a/src/pycbc.h b/src/pycbc.h index 42f0dfd80..0aa54f115 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -20,12 +20,15 @@ * @author Mark Nunberg */ + #ifdef PYCBC_DEBUG -#define PYCBC_DEBUG_LOG_RAW(...) printf(__VA_ARGS__); -void pycbc_log_pyformat(const char *file, int line, const char *format, ...); +#define PYCBC_DEBUG_LOG_RAW(...) fprintf(stderr,__VA_ARGS__); +void pycbc_print_pyformat(const char *format, ...); void pycbc_exception_log(const char *file, int line, int clear); #define PYCBC_DEBUG_PYFORMAT(FORMAT, ...) \ - pycbc_log_pyformat(__FILE__, __LINE__, FORMAT, __VA_ARGS__, NULL) + PYCBC_DEBUG_LOG_PREFIX(__FILE__, __LINE__)\ + pycbc_print_pyformat(FORMAT, __VA_ARGS__, NULL);\ + fprintf(stderr, "\n"); #define PYCBC_EXCEPTION_LOG_NOCLEAR pycbc_exception_log(__FILE__, __LINE__, 0); #define PYCBC_EXCEPTION_LOG pycbc_exception_log(__FILE__, __LINE__, 1); #else @@ -35,8 +38,9 @@ void pycbc_exception_log(const char *file, int line, int clear); #define PYCBC_EXCEPTION_LOG PyErr_Clear(); #endif +#define PYCBC_DEBUG_LOG_PREFIX(FILE,LINE) PYCBC_DEBUG_LOG_RAW("at %s line %d:", FILE, LINE) #define PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(FILE,LINE,POSTFIX,...)\ - PYCBC_DEBUG_LOG_RAW("at %s line %d:", FILE, LINE)\ + PYCBC_DEBUG_LOG_PREFIX(FILE,LINE)\ PYCBC_DEBUG_LOG_RAW(__VA_ARGS__)\ PYCBC_DEBUG_LOG_RAW(POSTFIX) #define PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(FILE,LINE,...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(FILE,LINE,"\n", __VA_ARGS__) @@ -201,6 +205,8 @@ unsigned long pycbc_IntAsUL(PyObject *o); #endif +PyObject* pycbc_replace_str(PyObject** string, const char* pat, const char* replace); + /** * Fetches a valid TTL from the object * @param obj an object to be parsed as the TTL From f79655387ea9c891174784edc413c2f9fafbc52e Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Tue, 22 May 2018 15:53:35 +0000 Subject: [PATCH 148/829] PYCBC-490: Use C to interpolate logging input Motivation ---------- On some older Python 2.7 systems on Linux, the PyUnicode_FromFormatV function was mishandling some printf interpolation arguments, in this case "%.*s", a string with a given length. PyUnicode_FromFormatV should actually just dump the remainder of the format string but in some cases it segfaults instead when it encounters something it doesn't understand. In addition, we want to be able to log output from these more unusual interpolation strings, so it seems better to use a C function to write to a buffer before sending it to CPython for logging. Changes ------- Added a fixed size buffer (currently 1000 characters) to log input from, and write into it using vsnprintf. Then convert into a Python string and pass it into the Python logging function. Results ------- Input is correctly interpolated and any Python segfaults are prevented. Change-Id: I9504b03728e3e9c706cf80d1bdc55d162f5f0c4a Reviewed-on: http://review.couchbase.org/94518 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Ellis Breen <ellis.breen@couchbase.com> --- src/ext.c | 39 ++++++++++++++++++++++++++++++++++----- 1 file changed, 34 insertions(+), 5 deletions(-) diff --git a/src/ext.c b/src/ext.c index 772a9a30c..87ff24da2 100644 --- a/src/ext.c +++ b/src/ext.c @@ -414,8 +414,12 @@ static void log_handler(struct lcb_logprocs_st *procs, PyGILState_STATE gil_prev; PyObject *tmp; PyObject *kwargs; +#define PYCBC_LOG_BUFSZ 1000 + char stackbuf[PYCBC_LOG_BUFSZ] = {0}; + char* heapbuf = NULL; + char* tempbuf = stackbuf; va_list vacp; - + int length = 0; if (!pycbc_log_handler) { return; } @@ -424,10 +428,35 @@ static void log_handler(struct lcb_logprocs_st *procs, kwargs = PyDict_New(); va_copy(vacp, ap); - tmp = PyUnicode_FromFormatV(fmt, vacp); + length = vsnprintf(stackbuf, PYCBC_LOG_BUFSZ, fmt, vacp); + if (length > PYCBC_LOG_BUFSZ) + { + heapbuf = calloc(length, sizeof(char)); + va_copy(vacp, ap); + length = vsnprintf(heapbuf, length, fmt, vacp); + tempbuf = heapbuf; + } + if (length >= 0) + { + tmp = pycbc_SimpleStringN( + tempbuf, length); + } + else + { + va_copy(vacp, ap); + PYCBC_DEBUG_LOG("Got negative result %d from FMT %s", length, fmt); + PYCBC_DEBUG_LOG("should be:"); + PYCBC_DEBUG_LOG(fmt, vacp); + } va_end(ap); - - PyDict_SetItemString(kwargs, "message", tmp); Py_DECREF(tmp); +#undef PYCBC_LOG_BUFSZ + if (heapbuf) // in case we ever-hit a noncompliant C implementation + { + free(heapbuf); + } + if (length<0 || !tmp || PyErr_Occurred()) goto FAIL; + PyDict_SetItemString(kwargs, "message", tmp); + Py_DECREF(tmp); tmp = pycbc_IntFromL(iid); PyDict_SetItemString(kwargs, "id", tmp); Py_DECREF(tmp); @@ -442,7 +471,7 @@ static void log_handler(struct lcb_logprocs_st *procs, PYCBC_STASH_EXCEPTION( PyObject_Call(pycbc_log_handler, pycbc_DummyTuple, kwargs)); - + FAIL: Py_DECREF(kwargs); PyGILState_Release(gil_prev); } From f06e5756d7b235885bfd53229ced8e805fe4f266 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Wed, 23 May 2018 19:06:43 +0000 Subject: [PATCH 149/829] PYCBC-491: Fix crash when only Threshold Tracer is enabled Change-Id: I27393b00977e325fcaede747e3cb68f1fb864a05 Reviewed-on: http://review.couchbase.org/94619 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- jenkins/go.sh | 21 +++++ setup.py | 9 ++- src/bucket.c | 110 +++++++++++++++------------ src/callbacks.c | 5 +- src/connevents.c | 3 +- src/crypto.c | 12 ++- src/ext.c | 194 +++++++++++++++++++++++++++++++++++++---------- src/pycbc.h | 41 ++++++---- 8 files changed, 283 insertions(+), 112 deletions(-) create mode 100644 jenkins/go.sh diff --git a/jenkins/go.sh b/jenkins/go.sh new file mode 100644 index 000000000..73f2cba7d --- /dev/null +++ b/jenkins/go.sh @@ -0,0 +1,21 @@ +export VENVDIR=~/virtualenvs/${PYVERSION}_LCB_${LCB_VERSION} +rm -Rf $VENVDIR +virtualenv -p `which python$PYVERSION` $VENVDIR +source $VENVDIR/bin/activate + +rm -Rf build +python setup.py build_ext --inplace --library-dir $LCB_LIB --include-dir $LCB_INC +python setup.py install +pip install -r dev_requirements.txt +cp tests.ini.sample tests.ini +sed -i -e '0,/enabled/{s/enabled = True/enabled = False/}' tests.ini +if [ -z $PYCBC_DEBUG_SYMBOLS ] +then + nosetests --with-xunit -v +else +set TMPCMDS=${PYVERSION}_${LCB_VERSION}_cmds + echo "run `which nosetests` -v --with-xunit">$TMPCMDS + echo "bt" >>$TMPCMDS + echo "quit" >>$TMPCMDS +gdb -batch -x $TMPCMDS `which python` +fi diff --git a/setup.py b/setup.py index 1f65734a6..8fd68894e 100644 --- a/setup.py +++ b/setup.py @@ -31,11 +31,12 @@ extoptions['extra_link_args'] = [] comp_flags = ["PYCBC_TRACING_ENABLE","PYCBC_DEBUG","PYCBC_CRYPTO_VERSION"] +debug_symbols = len(set(os.environ.keys()) & set(["PYCBC_DEBUG","PYCBC_DEBUG_SYMBOLS"])) >0 extoptions['extra_compile_args'] += ["-D{}={}".format(flag,os.environ.get(flag)) for flag in comp_flags if flag in os.environ.keys()] if sys.platform != 'win32': extoptions['libraries'] = ['couchbase'] - if os.environ.get('PYCBC_DEBUG'): + if debug_symbols: extoptions['extra_compile_args'] += ['-O0', '-g3'] extoptions['extra_link_args'] += ['-O0', '-g3'] if sys.platform == 'darwin': @@ -64,9 +65,9 @@ extoptions['libraries'] = ['libcouchbase'] ## Enable these lines for debug builds - if os.environ.get('PYCBC_DEBUG'): - extoptions['extra_compile_args'] += ['/Zi','/DEBUG'] - extoptions['extra_link_args'] += ['/DEBUG'] + if debug_symbols: + extoptions['extra_compile_args'] += ['/Zi','/DEBUG','/O0'] + extoptions['extra_link_args'] += ['/DEBUG','-debug'] extoptions['library_dirs'] = [os.path.join(lcb_root, 'lib')] extoptions['include_dirs'] = [os.path.join(lcb_root, 'include')] extoptions['define_macros'] = [('_CRT_SECURE_NO_WARNINGS', 1)] diff --git a/src/bucket.c b/src/bucket.c index d3eaf9c3c..9e06a82ba 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -1,3 +1,4 @@ + /** * Copyright 2013 Couchbase, Inc. * @@ -140,7 +141,7 @@ Bucket_unregister_crypto_provider(pycbc_Bucket *self, PyObject *args) #if PY_MAJOR_VERSION<3 const char* pycbc_cstrn(PyObject* object, Py_ssize_t *length) { - const char* buffer = NULL; + char *buffer = NULL; PyString_AsStringAndSize(object, &buffer, length); return buffer; } @@ -800,6 +801,8 @@ PyObject *pycbc_value_or_none(PyObject *tracer) return tracer ? tracer : Py_None; } +void pycbc_Bucket_init_tracer(pycbc_Bucket *self); + static int Bucket__init__(pycbc_Bucket *self, PyObject *args, PyObject *kwargs) @@ -812,31 +815,27 @@ Bucket__init__(pycbc_Bucket *self, PyObject *iops_O = NULL; PyObject *dfl_fmt = NULL; PyObject *tc = NULL; - PyObject *tracer = NULL; struct lcb_create_st create_opts = { 0 }; -#ifdef PYCBC_TRACING - lcbtrace_TRACER *threshold_tracer = NULL; -#endif /** * This xmacro enumerates the constructor keywords, targets, and types. * This was converted into an xmacro to ease the process of adding or * removing various parameters. */ -#define XCTOR_ARGS(X) \ +#define XCTOR_ARGS(X) \ X("connection_string", &create_opts.v.v3.connstr, "z") \ - X("connstr", &create_opts.v.v3.connstr, "z") \ - X("username", &create_opts.v.v3.username, "z") \ - X("password", &create_opts.v.v3.passwd, "z") \ - X("quiet", &self->quiet, "I") \ - X("unlock_gil", &unlock_gil_O, "O") \ - X("transcoder", &tc, "O") \ - X("default_format", &dfl_fmt, "O") \ - X("lockmode", &self->lockmode, "i") \ - X("_flags", &self->flags, "I") \ - X("_conntype", &conntype, "i") \ - X("_iops", &iops_O, "O") \ - X("tracer", &tracer, "O") + X("connstr", &create_opts.v.v3.connstr, "z") \ + X("username", &create_opts.v.v3.username, "z") \ + X("password", &create_opts.v.v3.passwd, "z") \ + X("quiet", &self->quiet, "I") \ + X("unlock_gil", &unlock_gil_O, "O") \ + X("transcoder", &tc, "O") \ + X("default_format", &dfl_fmt, "O") \ + X("lockmode", &self->lockmode, "i") \ + X("_flags", &self->flags, "I") \ + X("_conntype", &conntype, "i") \ + X("_iops", &iops_O, "O") \ + X("tracer", &self->parent_tracer, "O") static char *kwlist[] = { #define X(s, target, type) s, @@ -872,10 +871,6 @@ Bucket__init__(pycbc_Bucket *self, if (unlock_gil_O && PyObject_IsTrue(unlock_gil_O) == 0) { self->unlock_gil = 0; } -#ifdef PYCBC_TRACING - PYCBC_DEBUG_LOG("threshold tracer %p", threshold_tracer); - -#endif create_opts.version = 3; create_opts.v.v3.type = conntype; @@ -918,33 +913,7 @@ Bucket__init__(pycbc_Bucket *self, #endif err = lcb_create(&self->instance, &create_opts); -#ifdef PYCBC_TRACING - threshold_tracer = lcb_get_tracer(self->instance); - if (tracer || threshold_tracer) { - PyObject *tracer_args = PyTuple_New(2); - PyObject *threshold_tracer_capsule = - threshold_tracer - ? PyCapsule_New( - threshold_tracer, "threshold_tracer", NULL) - : NULL; - PyTuple_SetItem(tracer_args, 0, pycbc_value_or_none(tracer)); - PyTuple_SetItem( - tracer_args, 1, pycbc_value_or_none(threshold_tracer_capsule)); - self->tracer = - (pycbc_Tracer_t *)PyObject_Call((PyObject *)&pycbc_TracerType, - tracer_args, - pycbc_DummyKeywords); - if (PyErr_Occurred()) { - PYCBC_EXCEPTION_LOG_NOCLEAR; - self->tracer = NULL; - } else { - PYCBC_XINCREF(self->tracer); - lcb_set_tracer(self->instance, self->tracer->tracer); - } - PYCBC_DECREF(tracer_args); - } -#endif if (err != LCB_SUCCESS) { self->instance = NULL; PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, err, @@ -976,9 +945,51 @@ Bucket__init__(pycbc_Bucket *self, } self->btype = pycbc_IntFromL(LCB_BTYPE_UNSPEC); +#ifdef PYCBC_TRACING + + pycbc_Bucket_init_tracer(self); +#endif return 0; } +PyObject *pycbc_value_or_none_incref(PyObject *maybe_value) +{ + PyObject *result = pycbc_value_or_none(maybe_value); + PYCBC_INCREF(result); + return result; +} + +void pycbc_Bucket_init_tracer(pycbc_Bucket *self) +{ + lcbtrace_TRACER *threshold_tracer = lcb_get_tracer(self->instance); + + if (self->parent_tracer || threshold_tracer) { + PyObject *tracer_args = PyTuple_New(2); + PyObject *threshold_tracer_capsule = + threshold_tracer + ? PyCapsule_New( + threshold_tracer, "threshold_tracer", NULL) + : NULL; + PyTuple_SetItem(tracer_args, + 0, + pycbc_value_or_none_incref(self->parent_tracer)); + PyTuple_SetItem(tracer_args, + 1, + pycbc_value_or_none_incref(threshold_tracer_capsule)); + self->tracer = + (pycbc_Tracer_t *)PyObject_Call((PyObject *)&pycbc_TracerType, + tracer_args, + pycbc_DummyKeywords); + if (PyErr_Occurred()) { + PYCBC_EXCEPTION_LOG_NOCLEAR; + self->tracer = NULL; + } else { + PYCBC_XINCREF(self->tracer); + } + PYCBC_DECREF(tracer_args); + } +} + static PyObject* Bucket__connect(pycbc_Bucket *self, PyObject* args, PyObject* kwargs) { @@ -1020,6 +1031,9 @@ Bucket__connect(pycbc_Bucket *self, PyObject* args, PyObject* kwargs) } self->btype = pycbc_IntFromL(btype); } + if (self->tracer) { + lcb_set_tracer(self->instance, self->tracer->tracer); + } Py_RETURN_NONE; } diff --git a/src/callbacks.c b/src/callbacks.c index aeee88583..08e073360 100644 --- a/src/callbacks.c +++ b/src/callbacks.c @@ -39,7 +39,10 @@ static void cb_thr_begin(pycbc_Bucket *self) { #ifdef PYCBC_TRACING - pycbc_Tracer_propagate(self->tracer); + if (self && self->tracer) { + PYCBC_DEBUG_LOG("propagating tracer from %p, %p", self, self->tracer); + pycbc_Tracer_propagate(self->tracer); + } #endif if (Py_REFCNT(self) > 1) { Py_DECREF(self); diff --git a/src/connevents.c b/src/connevents.c index b8ce8a0c1..594ef1b63 100644 --- a/src/connevents.c +++ b/src/connevents.c @@ -17,8 +17,9 @@ /** * This file contains connection events */ -#include "pycbc.h" +#include <assert.h> #include "iops.h" +#include "pycbc.h" void pycbc_invoke_connected_event(pycbc_Bucket *conn, lcb_error_t err) diff --git a/src/crypto.c b/src/crypto.c index a548f96fc..1bbec8a72 100644 --- a/src/crypto.c +++ b/src/crypto.c @@ -237,6 +237,11 @@ X(v1,decrypt, pycbc_cstrndup, PYCBC_V1_DECRYPT_TYPES)\ PYCBC_X_V1_ONLY_CRYPTO_METHODS(X)\ PYCBC_X_COMMON_CRYPTO_METHODS(X) +#define PYCBC_X_ALL_CRYPTO_FUNCTIONS(X) \ + PYCBC_X_V0_ONLY_CRYPTO_METHODS(X) \ + PYCBC_X_V1_ONLY_CRYPTO_METHODS(X) \ + PYCBC_X_COMMON_CRYPTO_METHODS(X) + #define PYCBC_SIG_METHOD(VERSION, METHOD, PROCESSOR, X_ARGS)\ static lcb_error_t \ pycbc_crypto_##VERSION##_##METHOD(lcbcrypto_PROVIDER *provider \ @@ -271,7 +276,7 @@ pycbc_crypto_##VERSION##_##METHOD(lcbcrypto_PROVIDER *provider \ #endif #ifdef PYCBC_GEN_METHODS -PYCBC_X_ALL_CRYPTO_VERSIONS(PYCBC_SIG_METHOD); +PYCBC_X_ALL_CRYPTO_FUNCTIONS(PYCBC_SIG_METHOD); #else static lcb_error_t pycbc_crypto_generic_generate_iv(lcbcrypto_PROVIDER *provider, uint8_t **subject, @@ -561,6 +566,9 @@ pycbc_CryptoProviderType_init(PyObject **ptr) p->tp_basicsize = sizeof(pycbc_CryptoProvider); p->tp_setattro = CryptoProvider___setattr__; p->tp_methods = CryptoProvider_TABLE_methods; - +#define PYCBC_DUMMY_METHOD_USE(VERSION, METHOD, PROCESSOR, X_ARGS) \ + (void)pycbc_crypto_##VERSION##_##METHOD; + PYCBC_X_ALL_CRYPTO_FUNCTIONS(PYCBC_DUMMY_METHOD_USE) +#undef PYCBC_DUMMY_METHOD_USE return PyType_Ready(p); } \ No newline at end of file diff --git a/src/ext.c b/src/ext.c index 87ff24da2..33da33abc 100644 --- a/src/ext.c +++ b/src/ext.c @@ -412,12 +412,12 @@ static void log_handler(struct lcb_logprocs_st *procs, va_list ap) { PyGILState_STATE gil_prev; - PyObject *tmp; + PyObject *tmp = NULL; PyObject *kwargs; #define PYCBC_LOG_BUFSZ 1000 char stackbuf[PYCBC_LOG_BUFSZ] = {0}; - char* heapbuf = NULL; - char* tempbuf = stackbuf; + char *heapbuf = NULL; + char *tempbuf = stackbuf; va_list vacp; int length = 0; if (!pycbc_log_handler) { @@ -429,20 +429,15 @@ static void log_handler(struct lcb_logprocs_st *procs, kwargs = PyDict_New(); va_copy(vacp, ap); length = vsnprintf(stackbuf, PYCBC_LOG_BUFSZ, fmt, vacp); - if (length > PYCBC_LOG_BUFSZ) - { - heapbuf = calloc(length, sizeof(char)); + if (length > PYCBC_LOG_BUFSZ) { + heapbuf = PYCBC_CALLOC_TYPED(length, char); va_copy(vacp, ap); length = vsnprintf(heapbuf, length, fmt, vacp); tempbuf = heapbuf; } - if (length >= 0) - { - tmp = pycbc_SimpleStringN( - tempbuf, length); - } - else - { + if (length >= 0) { + tmp = pycbc_SimpleStringN(tempbuf, length); + } else { va_copy(vacp, ap); PYCBC_DEBUG_LOG("Got negative result %d from FMT %s", length, fmt); PYCBC_DEBUG_LOG("should be:"); @@ -452,9 +447,10 @@ static void log_handler(struct lcb_logprocs_st *procs, #undef PYCBC_LOG_BUFSZ if (heapbuf) // in case we ever-hit a noncompliant C implementation { - free(heapbuf); + PYCBC_FREE(heapbuf); } - if (length<0 || !tmp || PyErr_Occurred()) goto FAIL; + if (length < 0 || !tmp || PyErr_Occurred()) + goto FAIL; PyDict_SetItemString(kwargs, "message", tmp); Py_DECREF(tmp); tmp = pycbc_IntFromL(iid); @@ -471,7 +467,7 @@ static void log_handler(struct lcb_logprocs_st *procs, PYCBC_STASH_EXCEPTION( PyObject_Call(pycbc_log_handler, pycbc_DummyTuple, kwargs)); - FAIL: +FAIL: Py_DECREF(kwargs); PyGILState_Release(gil_prev); } @@ -504,7 +500,31 @@ void pycbc_store_error(PyObject *err[3]) } #ifdef PYCBC_DEBUG - +void *malloc_and_log(const char *file, int line, size_t size) +{ + void *result = malloc(size); + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( + file, line, "malloced %llu at %p", size, result); + return result; +} +void *calloc_and_log(const char *file, + int line, + size_t quant, + size_t size, + const char *type_name) +{ + void *result = calloc(size, quant); + PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE( + file, + line, + "calloced (%llu * [ %s == %llu ])= %llu at %p", + quant, + type_name, + size, + quant * size, + result); + return result; +} void pycbc_exception_log(const char* file, int line, int clear) { @@ -585,7 +605,7 @@ static void pycbc_Context_deallocate_chiidren( PYCBC_CONTEXT_DEREF(current_node->value, 1); } - free(current_node); + PYCBC_FREE(current_node); current_node = next_node; } context->children = NULL; @@ -605,7 +625,7 @@ pycbc_stack_context_handle pycbc_Context_deref( pycbc_Context_deallocate_chiidren(context, 0); PYCBC_DEBUG_LOG("closing span %p", context->span); lcbtrace_span_finish(context->span, 0); - free(context); + PYCBC_FREE(context); PYCBC_CONTEXT_DEREF(parent, 0); } else { if (should_be_final && context->children) { @@ -631,7 +651,7 @@ static void pycbc_Context_ref(pycbc_stack_context_handle ref_context, pycbc_stack_context_handle child) { pycbc_context_children *child_node = - calloc(1, sizeof(pycbc_context_children)); + PYCBC_CALLOC_TYPED(1, pycbc_context_children); child_node->value = child; child_node->next = ref_context->children; ++ref_context->ref_count; @@ -648,7 +668,8 @@ pycbc_stack_context_handle pycbc_Context_init( lcbtrace_REF_TYPE ref_type, const char *component) { - pycbc_stack_context_handle context = calloc(1, sizeof(pycbc_stack_context)); + pycbc_stack_context_handle context = + PYCBC_CALLOC_TYPED(1, pycbc_stack_context); lcbtrace_REF ref; ref.type = ref_context ? ref_type : LCBTRACE_REF_NONE; @@ -872,7 +893,7 @@ PYCBC_X_FINISH_ARGS(X,X) PyObject* pycbc_default_key; -void pycbc_Tracer_init_constants() +void pycbc_Tracer_init_constants(void) { pycbc_default_key = pycbc_SimpleStringZ("__PYCBC_DEFAULT_KEY"); @@ -911,8 +932,9 @@ typedef struct pycbc_tracer_finish_args #undef PYCBC_TAG_STRUCT #define PYCBC_TRACING_ADD_TEXT(DICT, KEY, VALUE) (DICT)->KEY=strdup((VALUE)); -#define PYCBC_TRACING_ADD_U64(DICT, KEY, VALUE) (DICT)->KEY=malloc(sizeof(lcb_uint64_t));\ - *((DICT)->KEY)=VALUE; +#define PYCBC_TRACING_ADD_U64(DICT, KEY, VALUE) \ + (DICT)->KEY = PYCBC_MALLOC(sizeof(lcb_uint64_t)); \ + *((DICT)->KEY) = VALUE; #define PYCBC_TEXT_TO_DICT(NAME) if (args->NAME) { pycbc_set_dict_kv_object(dict, pycbc_##NAME, (args->NAME)); } #define PYCBC_ULL_TO_DICT(NAME) if(args->NAME) { pycbc_set_kv_ull(dict, pycbc_##NAME, *(args->NAME)); } @@ -945,23 +967,103 @@ PyObject* pycbc_set_finish_args_from_payload(pycbc_tracer_finish_args_t *args) { #undef PYCBC_ULL_TO_DICT #undef PYCBC_TEXT_TO_DICT -#define PYCBC_TEXT_FREE(NAME) free((void*)args->NAME); -#define PYCBC_ULL_FREE(NAME) free((void*)args->NAME); +#define PYCBC_TEXT_FREE(NAME) \ + if (args->NAME) { \ + PYCBC_FREE((void *)args->NAME); \ + args->NAME = NULL; \ + } +#define PYCBC_ULL_FREE(NAME) \ + if (args->NAME) { \ + PYCBC_FREE((void *)args->NAME); \ + args->NAME = NULL; \ + } void pycbc_span_tags_args_dealloc(pycbc_tracer_tags_t* args) { + PYCBC_DEBUG_LOG("deallocing span tags args %p", args); + +#ifdef PYCBC_GEN_ARGS PYCBC_X_LITERALTAGNAMES(PYCBC_TEXT_FREE, PYCBC_ULL_FREE) - free(args); +#else + if (args->DB_TYPE) { + PYCBC_FREE((void *)args->DB_TYPE); + args->DB_TYPE = ((void *)0); + } + if (args->PEER_LATENCY) { + PYCBC_FREE((void *)args->PEER_LATENCY); + args->PEER_LATENCY = ((void *)0); + } + if (args->OPERATION_ID) { + PYCBC_FREE((void *)args->OPERATION_ID); + args->OPERATION_ID = ((void *)0); + } + if (args->COMPONENT) { + PYCBC_FREE((void *)args->COMPONENT); + args->COMPONENT = ((void *)0); + } + if (args->PEER_ADDRESS) { + PYCBC_FREE((void *)args->PEER_ADDRESS); + args->PEER_ADDRESS = ((void *)0); + } + if (args->LOCAL_ADDRESS) { + PYCBC_FREE((void *)args->LOCAL_ADDRESS); + args->LOCAL_ADDRESS = ((void *)0); + } + if (args->DB_INSTANCE) { + PYCBC_FREE((void *)args->DB_INSTANCE); + args->DB_INSTANCE = ((void *)0); + } +#endif + PYCBC_FREE(args); } -#define PYCBC_TAGS_FREE(NAME) if(args->NAME) { pycbc_span_tags_args_dealloc(args->NAME); } +#define PYCBC_TAGS_FREE(NAME) \ + if (args->NAME) { \ + pycbc_span_tags_args_dealloc(args->NAME); \ + args->NAME = NULL; \ + } void pycbc_span_args_dealloc(pycbc_tracer_span_args_t *args) { + PYCBC_DEBUG_LOG("deallocing span args %p", args); + +#ifdef PYCBC_GEN_ARGS + PYCBC_X_SPAN_ARGS( PYCBC_TEXT_FREE, PYCBC_ULL_FREE, PYCBC_TAGS_FREE, PYCBC_ULL_FREE) - free(args); +#else + if (args->operation_name) { + PYCBC_FREE((void *)args->operation_name); + args->operation_name = ((void *)0); + } + if (args->child_of) { + PYCBC_FREE((void *)args->child_of); + args->child_of = ((void *)0); + } + if (args->id) { + PYCBC_FREE((void *)args->id); + args->id = ((void *)0); + } + if (args->start_time) { + PYCBC_FREE((void *)args->start_time); + args->start_time = ((void *)0); + } + if (args->tags) { + pycbc_span_tags_args_dealloc(args->tags); + args->tags = NULL; + } +#endif + PYCBC_FREE(args); } void pycbc_span_finish_args_dealloc(struct pycbc_tracer_finish_args *args) { + PYCBC_DEBUG_LOG("deallocing finish args %p", args); + +#ifdef PYCBC_GEN_ARGS PYCBC_X_FINISH_ARGS(PYCBC_TEXT_FREE, PYCBC_ULL_FREE); - free(args); +#else + if (args->finish_time) { + PYCBC_FREE((void *)args->finish_time); + args->finish_time = ((void *)0); + }; +#endif + PYCBC_FREE(args); } #undef PYCBC_TEXT_FREE #undef PYCBC_ULL_FREE @@ -990,12 +1092,14 @@ typedef struct pycbc_tracer_state { void pycbc_init_span_args(pycbc_tracer_payload* payload) { - payload->span_start_args = calloc(1, sizeof(pycbc_tracer_span_args_t)); - payload->span_start_args->tags = calloc(1, sizeof(pycbc_tracer_tags_t)); - payload->span_finish_args = calloc(1, sizeof(pycbc_tracer_finish_args_t)); + payload->span_start_args = PYCBC_CALLOC_TYPED(1, pycbc_tracer_span_args_t); + payload->span_start_args->tags = PYCBC_CALLOC_TYPED(1, pycbc_tracer_tags_t); + payload->span_finish_args = + PYCBC_CALLOC_TYPED(1, pycbc_tracer_finish_args_t); } void pycbc_payload_dealloc(pycbc_tracer_payload *pPayload) { + PYCBC_DEBUG_LOG("deallocing Payload %p", pPayload); pycbc_span_args_dealloc(pPayload->span_start_args); pycbc_span_finish_args_dealloc(pPayload->span_finish_args); } @@ -1050,7 +1154,8 @@ pycbc_tracer_payload *pycbc_persist_span(lcbtrace_SPAN *span) char *buf = &bufarray[0]; lcbtrace_SPAN *parent; lcb_uint64_t start; - pycbc_tracer_payload *payload = calloc(1, sizeof(pycbc_tracer_payload)); + pycbc_tracer_payload *payload = PYCBC_CALLOC_TYPED(1, pycbc_tracer_payload); + PYCBC_DEBUG_LOG("alloced payload %p", payload); pycbc_init_span_args(payload); { pycbc_tracer_span_args_t *span_args = payload->span_start_args; @@ -1190,6 +1295,9 @@ void pycbc_tracer_flush(pycbc_Tracer_t *tracer) if (tracer == NULL) { return; } + if (tracer->tracer == NULL) { + return; + } state = tracer->tracer->cookie; if (state == NULL) { return; @@ -1214,6 +1322,7 @@ void pycbc_tracer_flush(pycbc_Tracer_t *tracer) void pycbc_Tracer_propagate(pycbc_Tracer_t *tracer) { + PYCBC_DEBUG_LOG("flushing pycbc_Tracer_t %p", tracer); pycbc_tracer_flush(tracer); } @@ -1221,22 +1330,25 @@ void pycbc_tracer_destructor(lcbtrace_TRACER *tracer) { if (tracer) { pycbc_tracer_state *state = tracer->cookie; + PYCBC_DEBUG_LOG("freeing lcbtrace_TRACER %p", tracer); if (state) { // Py_XDECREF(state->parent); Py_XDECREF(state->id_map); Py_XDECREF(state->start_span_method); - free(state); + PYCBC_FREE(state); tracer->cookie = NULL; } - free(tracer); + PYCBC_FREE(tracer); } } lcbtrace_TRACER *pycbc_tracer_new(PyObject *parent, lcbtrace_TRACER *child_tracer) { - lcbtrace_TRACER *tracer = calloc(1, sizeof(lcbtrace_TRACER)); - pycbc_tracer_state *pycbc_tracer = calloc(1, sizeof(pycbc_tracer_state)); + lcbtrace_TRACER *tracer = PYCBC_CALLOC_TYPED(1, lcbtrace_TRACER); + pycbc_tracer_state *pycbc_tracer = + PYCBC_CALLOC_TYPED(1, pycbc_tracer_state); + tracer->destructor = pycbc_tracer_destructor; tracer->flags = 0; tracer->version = 0; @@ -1305,8 +1417,7 @@ static int Tracer__init__(pycbc_Tracer_t *self, threshold_tracer_capsule, "threshold_tracer") : NULL); - self->tracer = - parent ? pycbc_tracer_new(parent, child_tracer) : child_tracer; + self->tracer = pycbc_tracer_new(parent, child_tracer); PYCBC_EXCEPTION_LOG_NOCLEAR; return rv; @@ -1314,6 +1425,7 @@ static int Tracer__init__(pycbc_Tracer_t *self, static void Tracer_dtor(pycbc_Tracer_t *self) { + PYCBC_DEBUG_LOG("Destroying pycbc_Tracer_t"); pycbc_tracer_flush(self); Py_TYPE(self)->tp_free((PyObject *)self); } @@ -1365,4 +1477,4 @@ int pycbc_TracerType_init(PyObject **ptr) { return PyType_Ready(p); } -#endif \ No newline at end of file +#endif diff --git a/src/pycbc.h b/src/pycbc.h index 0aa54f115..ddc25e6fd 100644 --- a/src/pycbc.h +++ b/src/pycbc.h @@ -46,7 +46,15 @@ void pycbc_exception_log(const char *file, int line, int clear); #define PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(FILE,LINE,...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_POSTFIX(FILE,LINE,"\n", __VA_ARGS__) #define PYCBC_DEBUG_LOG(...) PYCBC_DEBUG_LOG_WITH_FILE_AND_LINE_NEWLINE(__FILE__,__LINE__,__VA_ARGS__) +#define PYCBC_FREE(X) \ + PYCBC_DEBUG_LOG("freeing %p", X); \ + free(X); + #ifdef PYCBC_DEBUG +#define PYCBC_MALLOC(X) malloc_and_log(__FILE__, __LINE__, X) +#define PYCBC_CALLOC(X, Y) calloc_and_log(__FILE__, __LINE__, X, Y, "unknown") +#define PYCBC_CALLOC_TYPED(X, Y) \ + calloc_and_log(__FILE__, __LINE__, X, sizeof(Y), #Y) #define LOG_REFOP(Y, OP) \ { \ pycbc_assert((Y) && Py_REFCNT(Y) > 0); \ @@ -68,6 +76,9 @@ void pycbc_exception_log(const char *file, int line, int clear); Py_##X##OP((PyObject *)(Y)); \ } #else +#define PYCBC_MALLOC(X) malloc(X) +#define PYCBC_CALLOC(X, Y) calloc(X, Y) +#define PYCBC_CALLOC_TYPED(X, Y) calloc(X, sizeof(Y)) #define LOG_REFOP(Y, OP) Py_##OP(Y) #define LOG_REFOPX(Y, OP) Py_X##OP(Y) #endif @@ -110,6 +121,12 @@ void pycbc_exception_log(const char *file, int line, int clear); void pycbc_fetch_error(PyObject *err[3]); void pycbc_store_error(PyObject *err[3]); +void *malloc_and_log(const char *file, int line, size_t size); +void *calloc_and_log(const char *file, + int line, + size_t quant, + size_t size, + const char *type_name); #define PYCBC_STASH_EXCEPTION(OP) \ { \ @@ -195,14 +212,13 @@ typedef int pycbc_strlen_t; const char* pycbc_cstrn(PyObject* object, Py_ssize_t *length); #define PYCBC_CSTR(X) PyString_AsString(X) -#define PYCBC_CSTRN(X, n) pycbc_cstrn((X), (n)) +#define PYCBC_CSTRN(X, n) pycbc_cstrn((X), (Py_ssize_t *)(n)) unsigned PY_LONG_LONG pycbc_IntAsULL(PyObject *o); PY_LONG_LONG pycbc_IntAsLL(PyObject *o); long pycbc_IntAsL(PyObject *o); unsigned long pycbc_IntAsUL(PyObject *o); - #endif PyObject* pycbc_replace_str(PyObject** string, const char* pat, const char* replace); @@ -389,6 +405,7 @@ typedef struct { #ifdef PYCBC_TRACING /** Tracer **/ struct pycbc_Tracer *tracer; + PyObject *parent_tracer; #endif /** Transcoder object */ PyObject *tc; @@ -461,11 +478,6 @@ typedef struct pycbc_Tracer { lcbtrace_TRACER *tracer; } pycbc_Tracer_t; -static PyTypeObject SpanType = { - PYCBC_POBJ_HEAD_INIT(NULL) - 0 -}; - typedef struct { PyObject_HEAD #ifdef PYCBC_TRACING @@ -497,7 +509,7 @@ typedef void* pycbc_stack_context_handle; #ifdef PYCBC_TRACING int pycbc_is_async_or_pipeline(const pycbc_Bucket *self); -typedef struct pycbc_Result pycbc_Result; +typedef struct pycbc_Result pycbc_Result_t; typedef struct pycbc_MultiResult_st pycbc_MultiResult; pycbc_stack_context_handle pycbc_Result_start_context( @@ -505,18 +517,18 @@ pycbc_stack_context_handle pycbc_Result_start_context( PyObject *hkey, char *component, char *operation); -void pycbc_Result_propagate_context(pycbc_Result *res, +void pycbc_Result_propagate_context(pycbc_Result_t *res, pycbc_stack_context_handle parent_context); void pycbc_MultiResult_init_context(pycbc_MultiResult *self, PyObject *curkey, pycbc_stack_context_handle context, pycbc_Bucket *bucket); pycbc_stack_context_handle pycbc_MultiResult_extract_context( - pycbc_MultiResult *self, PyObject *hkey, pycbc_Result **res); + pycbc_MultiResult *self, PyObject *hkey, pycbc_Result_t **res); #define PYCBC_MULTIRESULT_EXTRACT_CONTEXT(MRES, KEY, RES) \ pycbc_MultiResult_extract_context(MRES, KEY, RES) -pycbc_stack_context_handle pycbc_Result_extract_context( - const pycbc_Result *res); +pycbc_stack_context_handle pycbc_Result_t_extract_context( + const pycbc_Result_t *res); #define PYCBC_RESULT_EXTRACT_CONTEXT(RESULT) \ pycbc_Result_extract_context(RESULT) @@ -827,7 +839,7 @@ typedef PyObject pycbc_enhanced_err_info; * * See multiresult.c */ -typedef struct pycbc_MultiResult_st { +struct pycbc_MultiResult_st { PYCBC_MULTIRESULT_BASE; /** parent Connection object */ @@ -851,8 +863,7 @@ typedef struct pycbc_MultiResult_st { int mropts; pycbc_enhanced_err_info *err_info; - -} pycbc_MultiResult; +}; typedef struct { pycbc_MultiResult base; From f779a26499405edb56c016c1489da9452a99cf21 Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Fri, 25 May 2018 11:12:39 +0100 Subject: [PATCH 150/829] PYCBC-492: Update Travis configuration to reflect supported platforms Change-Id: I9db25c482b5ec77dc4ef97d3602648efb39b9a20 Reviewed-on: http://review.couchbase.org/94767 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Mike Goldsmith <goldsmith.mike@gmail.com> --- .travis.yml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8b3e22f76..9dcf455c6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,11 +3,10 @@ sudo: false python: - "2.7" - - "2.6" - - "3.2" - "3.3" - "3.4" - "3.5" + - "3.6" addons: apt: @@ -20,7 +19,6 @@ addons: install: - pip -q install testresources==0.2.7 - pip -q install -r dev_requirements.txt - - pip -q install -r requirements.txt - pip -q install gevent || echo "Couldn't install gevent" - pip -q install twisted || echo "Couldn't install Twisted" - git describe --tags --long --always From 1dec833f6ae96d435e1b863645af94df8e85ac1b Mon Sep 17 00:00:00 2001 From: Ellis Breen <ellis.breen@couchbase.com> Date: Thu, 31 May 2018 17:50:15 +0100 Subject: [PATCH 151/829] PYCBC-494: support get_key_id method in Crypto V1 API Change-Id: I4e3d0e1f3b87b36a4ad95947fce14a6d9e49b6d0 Reviewed-on: http://review.couchbase.org/94971 Tested-by: Build Bot <build@couchbase.com> Reviewed-by: Sergey Avseyev <sergey.avseyev@gmail.com> --- CMakeLists.txt | 27 ++- couchbase/tests/cases/crypto_t.py | 7 +- couchbase/tests/cases/tracing_t.py | 1 + jenkins/go.sh | 32 ++- src/bucket.c | 67 ++++-- src/crypto.c | 323 ++++++++++++++++++++--------- src/ext.c | 57 ++++- src/pycbc.h | 25 +-- 8 files changed, 381 insertions(+), 158 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index 52505fe2f..8af509c32 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -12,6 +12,8 @@ set(OT_ROOT thirdparty/opentracing-cpp) set(LCB_CJSON ${LCB_ROOT}/contrib/cJSON) set(TP_CJSON thirdparty/cJSON) set(LCB_TRACING_DIR ${LCB_ROOT}/src/tracing) +set(LCB_MCSERVER_DIR ${LCB_ROOT}/src/mcserver) +set(LCB_BUCKETCONFIG_DIR ${LCB_ROOT}/src/bucketconfig) aux_source_directory(${LCB_ROOT}/src LCB_CORE) aux_source_directory(${LCB_ROOT}/src/http HTTP) aux_source_directory(${LCB_ROOT}/include/memcached/ MCD_INC) @@ -19,11 +21,11 @@ aux_source_directory(${LCB_ROOT}/include/libcouchbase/ LCB_INC) aux_source_directory(${LCB_ROOT}/contrib/lcb-jsoncpp LCB_JSON) aux_source_directory(${LCB_CJSON} LCB_CJSON_SRC ) aux_source_directory(${LCB_ROOT}/src/ssl LCB_SSL) -aux_source_directory(${LCB_ROOT}/src/mcserver LCB_MCSERVER) +aux_source_directory(${LCB_MCSERVER_DIR} LCB_MCSERVER) aux_source_directory(${LCB_IO} LCB_IO_A) aux_source_directory(${LCB_ROOT_SRC}/sasl LCB_SASL) aux_source_directory(${LCB_ROOT_SRC}/tracing LCB_TRACING_DIR) - +aux_source_directory(${LCB_BUCKETCONFIG_DIR} BUCKETCONFIG) aux_source_directory(LCB_VBUCKET ${LCB_ROOT}/src/vbucket) aux_source_directory(LCB_SASL ${LCB_ROOT}/contrib/cbsasl) aux_source_directory(LCB_CLI ${LCB_ROOT}/contrib/cliopts) @@ -32,7 +34,7 @@ aux_source_directory(LCB_ROOT_A ${LCB_ROOT_SRC}) include(ExternalProject) add_definitions(-DLCB_TRACING=TRUE -DPYCBC_TRACING_ENABLE=TRUE - -DPYCBC_DEBUG=TRUE) + ) ExternalProject_Add (cJSON PREFIX ${OT_ROOT} @@ -63,6 +65,7 @@ include_directories( ${LCB_ROOT_SRC} ${LCB_JSON} ${LCB_SSL} ${LCB_MCSERVER} + ${LCB_MCSERVER_DIR} ${LCB_IO} ${LCB_VBUCKET} ${LCB_SASL} @@ -87,8 +90,9 @@ set(EXTRA_SOURCE_DIRS ${LCB_ROOT}/include/libcouchbase ${LCB_ROOT}/src/http ${LCB_ROOT}/src/ssl + ${LCB_MCSERVER_DIR} - ${LCB_MCSERVER} + ${LCB_ROOT}/src/mcserver ${LCB_IO} ${LCB_VBUCKET} @@ -96,6 +100,9 @@ set(EXTRA_SOURCE_DIRS ${LCB_CLI} ${LCB_ROOT_SRC} ${LCB_ROOT_A} + + ${LCB_BUCKETCONFIG} + ${LCB_BUCKETCONFIG_DIR} ) set(SOURCE @@ -106,19 +113,26 @@ set(SOURCE ${HTTP} ${LCB_SSL} ${LCB_ROOT}/src/ssl - + ${LCB_TRACING_DIR} + ${LCB_JSON} ${LCB_MCSERVER} ${LCB_IO} + ${LCB_MCSERVER_DIR} ${LCB_VBUCKET} ${LCB_SASL} ${LCB_CLI} ${LCB_ROOT_SRC} + ${LCB_BUCKETCONFIG} + ${LCB_BUCKETCONFIG_DIR} ) link_directories(${LCB_ROOT}/lib) link_libraries(${LCB_ROOT}/../lib/ ) add_executable(couchbase_python_client_2_3_1 + ${LCB_BUCKETCONFIG} + ${LCB_BUCKETCONFIG_DIR} + ${LCB_TRACING_DIR} ${OT_ROOT}/src ${LCB_CORE} ${LCB_INC} @@ -129,6 +143,9 @@ add_executable(couchbase_python_client_2_3_1 ${LCB_VBUCKET} ${LCB_SASL} ${LCB_CLI} + ${LCB_MCSERVER} + ${LCB_MCSERVER_DIR} + ${LCB_JSON} acouchbase/tests/asyncio_tests.py acouchbase/tests/fixtures.py acouchbase/tests/py34only.py diff --git a/couchbase/tests/cases/crypto_t.py b/couchbase/tests/cases/crypto_t.py index f9ecb973c..fa135f5df 100644 --- a/couchbase/tests/cases/crypto_t.py +++ b/couchbase/tests/cases/crypto_t.py @@ -81,6 +81,9 @@ def encrypt(self, input, iv): def decrypt(self, input, iv): return self.decrypt_real(input, iv) + def get_key_id(self): + return 'key' + @@ -146,8 +149,8 @@ def test_pure_python_encryption(self): self.cb.register_crypto_provider('aes256', provider) # encrypt document document = self.cb.encrypt_fields(document, [fieldspec], "crypto_") - expected = {'ciphertext': 'ImZycGVyZyI=', 'iv': 'd2liYmxl', 'sig': 'Z3Jvbms='} - orig_fields = {'alg': 'aes256', 'kid': 'key'} + expected = {'ciphertext': 'ImZycGVyZyI=', 'iv': 'd2liYmxl', 'sig': 'Z3Jvbms=', 'kid': 'key'} + orig_fields = {'alg': 'aes256'} expected_orig = {k:orig_fields[k] for k in orig_fields if k in fieldspec} expected.update(expected_orig) diff --git a/couchbase/tests/cases/tracing_t.py b/couchbase/tests/cases/tracing_t.py index 3b04cd142..449a74e0f 100644 --- a/couchbase/tests/cases/tracing_t.py +++ b/couchbase/tests/cases/tracing_t.py @@ -46,6 +46,7 @@ def handler(self,**kwargs): self.records.append(kwargs) def test_threshold_multi_get(self): + raise SkipTest("temporarily disabling - fix pending") handler = TracingTest.BogusHandler() couchbase._libcouchbase.lcb_logging(handler.handler) diff --git a/jenkins/go.sh b/jenkins/go.sh index 73f2cba7d..4129072e1 100644 --- a/jenkins/go.sh +++ b/jenkins/go.sh @@ -1,21 +1,37 @@ +#!/usr/bin/env bash +echo "LCB_INC ${LCB_INC}" +echo "LCB_LIB ${LCB_LIB}" + export VENVDIR=~/virtualenvs/${PYVERSION}_LCB_${LCB_VERSION} rm -Rf $VENVDIR virtualenv -p `which python$PYVERSION` $VENVDIR source $VENVDIR/bin/activate +export LD_LIBRARY_PATH=$LCB_LIB:$LD_LIBRARY_PATH + + rm -Rf build -python setup.py build_ext --inplace --library-dir $LCB_LIB --include-dir $LCB_INC +export COMMAND="python setup.py build_ext --inplace --library-dirs ${LCB_LIB} --include-dirs ${LCB_INC}" +echo $COMMAND +eval $COMMAND python setup.py install pip install -r dev_requirements.txt + +export TMPCMDS="${PYVERSION}_${LCB_VERSION}_cmds" +echo "trying to write to: [" +echo "${TMPCMDS}" +echo "]" +echo "run `which nosetests` -v --with-xunit" > "${TMPCMDS}" +echo "bt" >>"${TMPCMDS}" +echo "quit" >>"${TMPCMDS}" + cp tests.ini.sample tests.ini sed -i -e '0,/enabled/{s/enabled = True/enabled = False/}' tests.ini -if [ -z $PYCBC_DEBUG_SYMBOLS ] -then +if [ -n $PYCBC_VALGRIND ]; then + valgrind --track-origins=yes --leak-check=full --show-reachable=yes `which python` `which nosetests` -v "${PYCBC_VALGRIND}" > build/valgrind.txt +fi +if [ -z $PYCBC_DEBUG_SYMBOLS ] && [ -z $PYCBC_DEBUG ]; then nosetests --with-xunit -v else -set TMPCMDS=${PYVERSION}_${LCB_VERSION}_cmds - echo "run `which nosetests` -v --with-xunit">$TMPCMDS - echo "bt" >>$TMPCMDS - echo "quit" >>$TMPCMDS -gdb -batch -x $TMPCMDS `which python` +gdb -batch -x "${TMPCMDS}" `which python` fi diff --git a/src/bucket.c b/src/bucket.c index 9e06a82ba..2cbb2aeea 100644 --- a/src/bucket.c +++ b/src/bucket.c @@ -138,14 +138,6 @@ Bucket_unregister_crypto_provider(pycbc_Bucket *self, PyObject *args) return Py_None; } -#if PY_MAJOR_VERSION<3 -const char* pycbc_cstrn(PyObject* object, Py_ssize_t *length) -{ - char *buffer = NULL; - PyString_AsStringAndSize(object, &buffer, length); - return buffer; -} -#endif const char *pycbc_dict_cstr(PyObject *dp, char *key) { PyObject *item = PyDict_GetItemString(dp, key); @@ -156,15 +148,31 @@ const char *pycbc_dict_cstr(PyObject *dp, char *key) { return result; } -size_t pycbc_populate_fieldspec(lcbcrypto_FIELDSPEC **fields, PyObject *fieldspec) { +size_t pycbc_populate_fieldspec(lcbcrypto_FIELDSPEC **fields, + PyObject *fieldspec) +{ size_t i = 0; - size_t size = (size_t) PyList_Size(fieldspec); + size_t size = (size_t)PyList_Size(fieldspec); *fields = calloc(size, sizeof(lcbcrypto_FIELDSPEC)); - for (i = 0; i < size ; ++i) { + for (i = 0; i < size; ++i) { PyObject *dict = PyList_GetItem(fieldspec, i); + const char *kid_value = pycbc_dict_cstr(dict, "kid"); (*fields)[i].alg = pycbc_dict_cstr(dict, "alg"); - (*fields)[i].kid = (PYCBC_CRYPTO_VERSION < 1) ? pycbc_dict_cstr(dict, "kid") : NULL; - (*fields)[i].name = pycbc_dict_cstr(dict, "name");; +#if PYCBC_CRYPTO_VERSION < 1 + (*fields)[i].kid = kid_value; +#else + if (kid_value && strlen(kid_value) > 0) { + PYCBC_FREE(*fields); + *fields = NULL; + size = 0; + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, + LCB_EINVAL, + "Fieldspec should not include Key ID - this should " + "be provided by get_key_id instead") + break; + } +#endif + (*fields)[i].name = pycbc_dict_cstr(dict, "name"); } return size; } @@ -175,14 +183,16 @@ Bucket_encrypt_fields(pycbc_Bucket *self, PyObject *args) lcbcrypto_CMDENCRYPT cmd = {0}; PyObject* fieldspec=NULL; int res =0; + PyObject *result = NULL; if (!PyArg_ParseTuple(args, "s#Os", &cmd.doc, &cmd.ndoc, &fieldspec, &(cmd.prefix))) { PYCBC_EXCTHROW_ARGS(); return NULL; } - ; if (!PyErr_Occurred()) { cmd.nfields = pycbc_populate_fieldspec(&cmd.fields, fieldspec); + } + if (!PyErr_Occurred()) { #if PYCBC_CRYPTO_VERSION > 0 res = lcbcrypto_encrypt_fields(self->instance, &cmd); #else @@ -191,13 +201,19 @@ Bucket_encrypt_fields(pycbc_Bucket *self, PyObject *args) } if (PyErr_Occurred()) { - return NULL; + goto FINISH; } if (!res) { - return pycbc_SimpleStringN(cmd.out, cmd.nout); + result = pycbc_SimpleStringN(cmd.out, cmd.nout); + } + if (!result && !PyErr_Occurred()) { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, + LCB_GENERIC_TMPERR, + "Internal error while encrypting"); } - PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, LCB_GENERIC_TMPERR, "Internal error while encrypting"); - return NULL; +FINISH: + PYCBC_FREE(cmd.out); + return result; } @@ -207,6 +223,7 @@ Bucket_decrypt_fields(pycbc_Bucket *self, PyObject *args) lcbcrypto_CMDDECRYPT cmd = { 0 }; int res =0; PyObject* fieldspec = NULL; + PyObject *result = NULL; if (!PyArg_ParseTuple(args, "s#" #if PYCBC_CRYPTO_VERSION >0 "O" @@ -231,13 +248,19 @@ Bucket_decrypt_fields(pycbc_Bucket *self, PyObject *args) } if (PyErr_Occurred()) { - return NULL; + goto FINISH; } if (!res) { - return pycbc_SimpleStringN(cmd.out, cmd.nout); + result = pycbc_SimpleStringN(cmd.out, cmd.nout); + } + if (!result && !PyErr_Occurred()) { + PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, + LCB_GENERIC_TMPERR, + "Internal error while decrypting"); } - PYCBC_EXC_WRAP(PYCBC_EXC_LCBERR, LCB_GENERIC_TMPERR, "Internal error while decrypting"); - return NULL; +FINISH: + PYCBC_FREE(cmd.out); + return result; } static PyObject * diff --git a/src/crypto.c b/src/crypto.c index 1bbec8a72..fa9d68ca8 100644 --- a/src/crypto.c +++ b/src/crypto.c @@ -31,26 +31,44 @@ static PyTypeObject CryptoProviderType = { 0 }; -lcb_error_t pycbc_cstrndup(uint8_t **key, size_t *key_len, PyObject *result) { - uint8_t* data = NULL; +lcb_error_t pycbc_cstrndup(const char **key, + size_t *key_len, + PyObject *result) +{ + const char *data = NULL; lcb_error_t lcb_result = LCB_EINTERNAL; -#if PY_MAJOR_VERSION>=3 - if (PyObject_IsInstance(result, (PyObject*)&PyBytes_Type)){ - lcb_result = (!PyBytes_AsStringAndSize(result, (char**)&data, (Py_ssize_t* )key_len))?LCB_SUCCESS:LCB_EINTERNAL; - } - else -#endif - { - data = (uint8_t*)PYCBC_CSTRN(result, key_len); - lcb_result = data?LCB_SUCCESS:LCB_EINTERNAL; + + data = PYCBC_CSTRN(result, key_len);; + if (data) { + lcb_result = LCB_SUCCESS; + PYCBC_DEBUG_LOG("Got string from %p: %.*s", result, (int)key_len, data); + *key = calloc(1, *key_len); + memcpy((void *)*key, (void *)data, *key_len); + PYCBC_DEBUG_LOG("Copied string from %p: %.*s", + result, + (int)key_len, + (char *)*key); + } else { + PYCBC_DEBUG_PYFORMAT( + "Problems extracting key from %p: %S", result, result); } - if (!lcb_result && data && *key_len){ - *key = (uint8_t *) calloc(1,*key_len); - memcpy(*key,data,*key_len);} return lcb_result; } +const char *pycbc_cstrdup_or_