diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 9099347df..6ed3c52c4 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -24,6 +24,12 @@ 8080, 9000 ], + "mounts": [ + "type=bind,source=${env:SSH_AUTH_SOCK},target=/ssh-agent" + ], + "containerEnv": { + "SSH_AUTH_SOCK": "/ssh-agent" + }, // Uncomment the next line if you want start specific services in your Docker Compose config. // "runServices": [], // Uncomment the next line if you want to keep your containers running after VS Code shuts down. diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 71d74e46f..5c22aaf14 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -1,4 +1,3 @@ -version: '2.4' services: # Update this to the name of the service you want to work with in your docker-compose.yml file app: @@ -7,13 +6,13 @@ services: # docker-compose.yml file (the first in the devcontainer.json "dockerComposeFile" # array). The sample below assumes your primary file is in the root of your project. container_name: datajoint-python-devcontainer - image: datajoint/datajoint-python-devcontainer:${PY_VER:-3.11}-${DISTRO:-buster} + image: datajoint/datajoint-python-devcontainer:${PY_VER:-3.11}-${DISTRO:-bookworm} build: context: . dockerfile: .devcontainer/Dockerfile args: - PY_VER=${PY_VER:-3.11} - - DISTRO=${DISTRO:-buster} + - DISTRO=${DISTRO:-bookworm} volumes: # Update this to wherever you want VS Code to mount the folder of your project diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b8992481a..4a58e0483 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,18 +21,18 @@ repos: hooks: - id: codespell - repo: https://github.com/pycqa/isort - rev: 5.12.0 # Use the latest stable version + rev: 6.0.1 # Use the latest stable version hooks: - id: isort args: - --profile=black # Optional, makes isort compatible with Black - repo: https://github.com/psf/black - rev: 24.2.0 # matching versions in pyproject.toml and github actions + rev: 25.1.0 # matching versions in pyproject.toml and github actions hooks: - id: black args: ["--check", "-v", "datajoint", "tests", "--diff"] # --required-version is conflicting with pre-commit - repo: https://github.com/PyCQA/flake8 - rev: 7.1.2 + rev: 7.3.0 hooks: # syntax tests - id: flake8 diff --git a/CHANGELOG.md b/CHANGELOG.md index 1a7b86032..4bf094509 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ ## Release notes +**Note:** This file is no longer updated. See the GitHub change log page for the +latest release notes: . + ### 0.14.3 -- Sep 23, 2024 - Added - `dj.Top` restriction - PR [#1024](https://github.com/datajoint/datajoint-python/issues/1024)) PR [#1084](https://github.com/datajoint/datajoint-python/pull/1084) - Fixed - Added encapsulating double quotes to comply with [DOT language](https://graphviz.org/doc/info/lang.html) - PR [#1177](https://github.com/datajoint/datajoint-python/pull/1177) diff --git a/Dockerfile b/Dockerfile index dce8a6438..0d727f6b4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ ARG IMAGE=mambaorg/micromamba:1.5-bookworm-slim FROM ${IMAGE} ARG CONDA_BIN=micromamba -ARG PY_VER=3.9 +ARG PY_VER=3.11 ARG HOST_UID=1000 RUN ${CONDA_BIN} install --no-pin -qq -y -n base -c conda-forge \ diff --git a/README.md b/README.md index eecee41a0..e582c8ec5 100644 --- a/README.md +++ b/README.md @@ -30,8 +30,8 @@ Since Release - - commit since last release + + commit since last release diff --git a/datajoint/attribute_adapter.py b/datajoint/attribute_adapter.py index e062f4c57..2a8e59a51 100644 --- a/datajoint/attribute_adapter.py +++ b/datajoint/attribute_adapter.py @@ -1,7 +1,6 @@ import re from .errors import DataJointError, _support_adapted_types -from .plugin import type_plugins class AttributeAdapter: @@ -44,11 +43,7 @@ def get_adapter(context, adapter_name): raise DataJointError("Support for Adapted Attribute types is disabled.") adapter_name = adapter_name.lstrip("<").rstrip(">") try: - adapter = ( - context[adapter_name] - if adapter_name in context - else type_plugins[adapter_name]["object"].load() - ) + adapter = context[adapter_name] except KeyError: raise DataJointError( "Attribute adapter '{adapter_name}' is not defined.".format( diff --git a/datajoint/connection.py b/datajoint/connection.py index 6e21b5fef..21b1c97a4 100644 --- a/datajoint/connection.py +++ b/datajoint/connection.py @@ -16,7 +16,6 @@ from .blob import pack, unpack from .dependencies import Dependencies from .hash import uuid_from_buffer -from .plugin import connection_plugins from .settings import config from .version import __version__ @@ -27,34 +26,6 @@ cache_key = "query_cache" # the key to lookup the query_cache folder in dj.config -def get_host_hook(host_input): - if "://" in host_input: - plugin_name = host_input.split("://")[0] - try: - return connection_plugins[plugin_name]["object"].load().get_host(host_input) - except KeyError: - raise errors.DataJointError( - "Connection plugin '{}' not found.".format(plugin_name) - ) - else: - return host_input - - -def connect_host_hook(connection_obj): - if "://" in connection_obj.conn_info["host_input"]: - plugin_name = connection_obj.conn_info["host_input"].split("://")[0] - try: - connection_plugins[plugin_name]["object"].load().connect_host( - connection_obj - ) - except KeyError: - raise errors.DataJointError( - "Connection plugin '{}' not found.".format(plugin_name) - ) - else: - connection_obj.connect() - - def translate_query_error(client_error, query): """ Take client error and original query and return the corresponding DataJoint exception. @@ -177,7 +148,6 @@ class Connection: """ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None): - host_input, host = (host, get_host_hook(host)) if ":" in host: # the port in the hostname overrides the port argument host, port = host.split(":") @@ -190,11 +160,10 @@ def __init__(self, host, user, password, port=None, init_fun=None, use_tls=None) use_tls if isinstance(use_tls, dict) else {"ssl": {}} ) self.conn_info["ssl_input"] = use_tls - self.conn_info["host_input"] = host_input self.init_fun = init_fun self._conn = None self._query_cache = None - connect_host_hook(self) + self.connect() if self.is_connected: logger.info( "DataJoint {version} connected to {user}@{host}:{port}".format( @@ -232,7 +201,7 @@ def connect(self): **{ k: v for k, v in self.conn_info.items() - if k not in ["ssl_input", "host_input"] + if k not in ["ssl_input"] }, ) except client.err.InternalError: @@ -245,7 +214,7 @@ def connect(self): k: v for k, v in self.conn_info.items() if not ( - k in ["ssl_input", "host_input"] + k == "ssl_input" or k == "ssl" and self.conn_info["ssl_input"] is None ) @@ -352,7 +321,7 @@ def query( if not reconnect: raise logger.warning("Reconnecting to MySQL server.") - connect_host_hook(self) + self.connect() if self._in_transaction: self.cancel_transaction() raise errors.LostConnectionError( diff --git a/datajoint/errors.py b/datajoint/errors.py index 427e8d1ad..03555bf13 100644 --- a/datajoint/errors.py +++ b/datajoint/errors.py @@ -5,32 +5,12 @@ import os -# --- Unverified Plugin Check --- -class PluginWarning(Exception): - pass - - # --- Top Level --- class DataJointError(Exception): """ Base class for errors specific to DataJoint internal operation. """ - def __init__(self, *args): - from .plugin import connection_plugins, type_plugins - - self.__cause__ = ( - PluginWarning("Unverified DataJoint plugin detected.") - if any( - [ - any([not plugins[k]["verified"] for k in plugins]) - for plugins in [connection_plugins, type_plugins] - if plugins - ] - ) - else None - ) - def suggest(self, *args): """ regenerate the exception with additional arguments diff --git a/datajoint/plugin.py b/datajoint/plugin.py deleted file mode 100644 index 8cb668092..000000000 --- a/datajoint/plugin.py +++ /dev/null @@ -1,46 +0,0 @@ -import logging -from pathlib import Path - -import pkg_resources -from cryptography.exceptions import InvalidSignature -from otumat import hash_pkg, verify - -from .settings import config - -logger = logging.getLogger(__name__.split(".")[0]) - - -def _update_error_stack(plugin_name): - try: - base_name = "datajoint" - base_meta = pkg_resources.get_distribution(base_name) - plugin_meta = pkg_resources.get_distribution(plugin_name) - - data = hash_pkg(pkgpath=str(Path(plugin_meta.module_path, plugin_name))) - signature = plugin_meta.get_metadata(f"{plugin_name}.sig") - pubkey_path = str(Path(base_meta.egg_info, f"{base_name}.pub")) - verify(pubkey_path=pubkey_path, data=data, signature=signature) - logger.info(f"DataJoint verified plugin `{plugin_name}` detected.") - return True - except (FileNotFoundError, InvalidSignature): - logger.warning(f"Unverified plugin `{plugin_name}` detected.") - return False - - -def _import_plugins(category): - return { - entry_point.name: dict( - object=entry_point, - verified=_update_error_stack(entry_point.module_name.split(".")[0]), - ) - for entry_point in pkg_resources.iter_entry_points( - "datajoint_plugins.{}".format(category) - ) - if "plugin" not in config - or category not in config["plugin"] - or entry_point.module_name.split(".")[0] in config["plugin"][category] - } - - -connection_plugins = _import_plugins("connection") -type_plugins = _import_plugins("datatype") diff --git a/datajoint/version.py b/datajoint/version.py index 3f48dc939..5fb608cef 100644 --- a/datajoint/version.py +++ b/datajoint/version.py @@ -1,6 +1,6 @@ # version bump auto managed by Github Actions: # label_prs.yaml(prep), release.yaml(bump), post_release.yaml(edit) # manually set this version will be eventually overwritten by the above actions -__version__ = "0.14.4" +__version__ = "0.14.6" assert len(__version__) <= 10 # The log table limits version to the 10 characters diff --git a/docker-compose.yaml b/docker-compose.yaml index d09d06d49..40b211756 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -40,7 +40,7 @@ services: context: . dockerfile: Dockerfile args: - PY_VER: ${PY_VER:-3.8} + PY_VER: ${PY_VER:-3.9} HOST_UID: ${HOST_UID:-1000} depends_on: db: diff --git a/docs/src/compute/make.md b/docs/src/compute/make.md index c67711079..1b5569b65 100644 --- a/docs/src/compute/make.md +++ b/docs/src/compute/make.md @@ -23,3 +23,193 @@ The `make` call of a master table first inserts the master entity and then inser the matching part entities in the part tables. None of the entities become visible to other processes until the entire `make` call completes, at which point they all become visible. + +### Three-Part Make Pattern for Long Computations + +For long-running computations, DataJoint provides an advanced pattern called the +**three-part make** that separates the `make` method into three distinct phases. +This pattern is essential for maintaining database performance and data integrity +during expensive computations. + +#### The Problem: Long Transactions + +Traditional `make` methods perform all operations within a single database transaction: + +```python +def make(self, key): + # All within one transaction + data = (ParentTable & key).fetch1() # Fetch + result = expensive_computation(data) # Compute (could take hours) + self.insert1(dict(key, result=result)) # Insert +``` + +This approach has significant limitations: +- **Database locks**: Long transactions hold locks on tables, blocking other operations +- **Connection timeouts**: Database connections may timeout during long computations +- **Memory pressure**: All fetched data must remain in memory throughout the computation +- **Failure recovery**: If computation fails, the entire transaction is rolled back + +#### The Solution: Three-Part Make Pattern + +The three-part make pattern splits the `make` method into three distinct phases, +allowing the expensive computation to occur outside of database transactions: + +```python +def make_fetch(self, key): + """Phase 1: Fetch all required data from parent tables""" + fetched_data = ((ParentTable1 & key).fetch1(), (ParentTable2 & key).fetch1()) + return fetched_data # must be a sequence, eg tuple or list + +def make_compute(self, key, *fetched_data): + """Phase 2: Perform expensive computation (outside transaction)""" + computed_result = expensive_computation(*fetched_data) + return computed_result # must be a sequence, eg tuple or list + +def make_insert(self, key, *computed_result): + """Phase 3: Insert results into the current table""" + self.insert1(dict(key, result=computed_result)) +``` + +#### Execution Flow + +To achieve data intensity without long transactions, the three-part make pattern follows this sophisticated execution sequence: + +```python +# Step 1: Fetch data outside transaction +fetched_data1 = self.make_fetch(key) +computed_result = self.make_compute(key, *fetched_data1) + +# Step 2: Begin transaction and verify data consistency +begin transaction: + fetched_data2 = self.make_fetch(key) + if fetched_data1 != fetched_data2: # deep comparison + cancel transaction # Data changed during computation + else: + self.make_insert(key, *computed_result) + commit_transaction +``` + +#### Key Benefits + +1. **Reduced Database Lock Time**: Only the fetch and insert operations occur within transactions, minimizing lock duration +2. **Connection Efficiency**: Database connections are only used briefly for data transfer +3. **Memory Management**: Fetched data can be processed and released during computation +4. **Fault Tolerance**: Computation failures don't affect database state +5. **Scalability**: Multiple computations can run concurrently without database contention + +#### Referential Integrity Protection + +The pattern includes a critical safety mechanism: **referential integrity verification**. +Before inserting results, the system: + +1. Re-fetches the source data within the transaction +2. Compares it with the originally fetched data using deep hashing +3. Only proceeds with insertion if the data hasn't changed + +This prevents the "phantom read" problem where source data changes during long computations, +ensuring that results remain consistent with their inputs. + +#### Implementation Details + +The pattern is implemented using Python generators in the `AutoPopulate` class: + +```python +def make(self, key): + # Step 1: Fetch data from parent tables + fetched_data = self.make_fetch(key) + computed_result = yield fetched_data + + # Step 2: Compute if not provided + if computed_result is None: + computed_result = self.make_compute(key, *fetched_data) + yield computed_result + + # Step 3: Insert the computed result + self.make_insert(key, *computed_result) + yield +``` +Therefore, it is possible to override the `make` method to implement the three-part make pattern by using the `yield` statement to return the fetched data and computed result as above. + +#### Use Cases + +This pattern is particularly valuable for: + +- **Machine learning model training**: Hours-long training sessions +- **Image processing pipelines**: Large-scale image analysis +- **Statistical computations**: Complex statistical analyses +- **Data transformations**: ETL processes with heavy computation +- **Simulation runs**: Time-consuming simulations + +#### Example: Long-Running Image Analysis + +Here's an example of how to implement the three-part make pattern for a +long-running image analysis task: + +```python +@schema +class ImageAnalysis(dj.Computed): + definition = """ + # Complex image analysis results + -> Image + --- + analysis_result : longblob + processing_time : float + """ + + def make_fetch(self, key): + """Fetch the image data needed for analysis""" + image_data = (Image & key).fetch1('image') + params = (Params & key).fetch1('params') + return (image_data, params) # pack fetched_data + + def make_compute(self, key, image_data, params): + """Perform expensive image analysis outside transaction""" + import time + start_time = time.time() + + # Expensive computation that could take hours + result = complex_image_analysis(image_data, params) + processing_time = time.time() - start_time + return result, processing_time + + def make_insert(self, key, analysis_result, processing_time): + """Insert the analysis results""" + self.insert1(dict(key, + analysis_result=analysis_result, + processing_time=processing_time)) +``` + +The exact same effect may be achieved by overriding the `make` method as a generator function using the `yield` statement to return the fetched data and computed result as above: + +```python +@schema +class ImageAnalysis(dj.Computed): + definition = """ + # Complex image analysis results + -> Image + --- + analysis_result : longblob + processing_time : float + """ + + def make(self, key): + image_data = (Image & key).fetch1('image') + params = (Params & key).fetch1('params') + computed_result = yield (image, params) # pack fetched_data + + if computed_result is None: + # Expensive computation that could take hours + import time + start_time = time.time() + result = complex_image_analysis(image_data, params) + processing_time = time.time() - start_time + computed_result = result, processing_time #pack + yield computed_result + + result, processing_time = computed_result # unpack + self.insert1(dict(key, + analysis_result=result, + processing_time=processing_time)) + yield # yield control back to the caller +``` +We expect that most users will prefer to use the three-part implementation over the generator function implementation due to its conceptual complexity. \ No newline at end of file diff --git a/docs/src/compute/populate.md b/docs/src/compute/populate.md index 76fc62aee..45c863f17 100644 --- a/docs/src/compute/populate.md +++ b/docs/src/compute/populate.md @@ -62,8 +62,198 @@ The `make` callback does three things: 2. Computes and adds any missing attributes to the fields already in `key`. 3. Inserts the entire entity into `self`. -`make` may populate multiple entities in one call when `key` does not specify the -entire primary key of the populated table. +A single `make` call may populate multiple entities when `key` does not specify the +entire primary key of the populated table, when the definition adds new attributes to the primary key. +This design is uncommon and not recommended. +The standard practice for autopopulated tables is to have its primary key composed of +foreign keys pointing to parent tables. + +### Three-Part Make Pattern for Long Computations + +For long-running computations, DataJoint provides an advanced pattern called the +**three-part make** that separates the `make` method into three distinct phases. +This pattern is essential for maintaining database performance and data integrity +during expensive computations. + +#### The Problem: Long Transactions + +Traditional `make` methods perform all operations within a single database transaction: + +```python +def make(self, key): + # All within one transaction + data = (ParentTable & key).fetch1() # Fetch + result = expensive_computation(data) # Compute (could take hours) + self.insert1(dict(key, result=result)) # Insert +``` + +This approach has significant limitations: +- **Database locks**: Long transactions hold locks on tables, blocking other operations +- **Connection timeouts**: Database connections may timeout during long computations +- **Memory pressure**: All fetched data must remain in memory throughout the computation +- **Failure recovery**: If computation fails, the entire transaction is rolled back + +#### The Solution: Three-Part Make Pattern + +The three-part make pattern splits the `make` method into three distinct phases, +allowing the expensive computation to occur outside of database transactions: + +```python +def make_fetch(self, key): + """Phase 1: Fetch all required data from parent tables""" + fetched_data = ((ParentTable & key).fetch1(),) + return fetched_data # must be a sequence, eg tuple or list + +def make_compute(self, key, *fetched_data): + """Phase 2: Perform expensive computation (outside transaction)""" + computed_result = expensive_computation(*fetched_data) + return computed_result # must be a sequence, eg tuple or list + +def make_insert(self, key, *computed_result): + """Phase 3: Insert results into the current table""" + self.insert1(dict(key, result=computed_result)) +``` + +#### Execution Flow + +To achieve data intensity without long transactions, the three-part make pattern follows this sophisticated execution sequence: + +```python +# Step 1: Fetch data outside transaction +fetched_data1 = self.make_fetch(key) +computed_result = self.make_compute(key, *fetched_data1) + +# Step 2: Begin transaction and verify data consistency +begin transaction: + fetched_data2 = self.make_fetch(key) + if fetched_data1 != fetched_data2: # deep comparison + cancel transaction # Data changed during computation + else: + self.make_insert(key, *computed_result) + commit_transaction +``` + +#### Key Benefits + +1. **Reduced Database Lock Time**: Only the fetch and insert operations occur within transactions, minimizing lock duration +2. **Connection Efficiency**: Database connections are only used briefly for data transfer +3. **Memory Management**: Fetched data can be processed and released during computation +4. **Fault Tolerance**: Computation failures don't affect database state +5. **Scalability**: Multiple computations can run concurrently without database contention + +#### Referential Integrity Protection + +The pattern includes a critical safety mechanism: **referential integrity verification**. +Before inserting results, the system: + +1. Re-fetches the source data within the transaction +2. Compares it with the originally fetched data using deep hashing +3. Only proceeds with insertion if the data hasn't changed + +This prevents the "phantom read" problem where source data changes during long computations, +ensuring that results remain consistent with their inputs. + +#### Implementation Details + +The pattern is implemented using Python generators in the `AutoPopulate` class: + +```python +def make(self, key): + # Step 1: Fetch data from parent tables + fetched_data = self.make_fetch(key) + computed_result = yield fetched_data + + # Step 2: Compute if not provided + if computed_result is None: + computed_result = self.make_compute(key, *fetched_data) + yield computed_result + + # Step 3: Insert the computed result + self.make_insert(key, *computed_result) + yield +``` +Therefore, it is possible to override the `make` method to implement the three-part make pattern by using the `yield` statement to return the fetched data and computed result as above. + +#### Use Cases + +This pattern is particularly valuable for: + +- **Machine learning model training**: Hours-long training sessions +- **Image processing pipelines**: Large-scale image analysis +- **Statistical computations**: Complex statistical analyses +- **Data transformations**: ETL processes with heavy computation +- **Simulation runs**: Time-consuming simulations + +#### Example: Long-Running Image Analysis + +Here's an example of how to implement the three-part make pattern for a +long-running image analysis task: + +```python +@schema +class ImageAnalysis(dj.Computed): + definition = """ + # Complex image analysis results + -> Image + --- + analysis_result : longblob + processing_time : float + """ + + def make_fetch(self, key): + """Fetch the image data needed for analysis""" + return (Image & key).fetch1('image'), + + def make_compute(self, key, image_data): + """Perform expensive image analysis outside transaction""" + import time + start_time = time.time() + + # Expensive computation that could take hours + result = complex_image_analysis(image_data) + processing_time = time.time() - start_time + return result, processing_time + + def make_insert(self, key, analysis_result, processing_time): + """Insert the analysis results""" + self.insert1(dict(key, + analysis_result=analysis_result, + processing_time=processing_time)) +``` + +The exact same effect may be achieved by overriding the `make` method as a generator function using the `yield` statement to return the fetched data and computed result as above: + +```python +@schema +class ImageAnalysis(dj.Computed): + definition = """ + # Complex image analysis results + -> Image + --- + analysis_result : longblob + processing_time : float + """ + + def make(self, key): + image_data = (Image & key).fetch1('image') + computed_result = yield (image_data, ) # pack fetched_data + + if computed_result is None: + # Expensive computation that could take hours + import time + start_time = time.time() + result = complex_image_analysis(image_data) + processing_time = time.time() - start_time + computed_result = result, processing_time #pack + yield computed_result + + result, processing_time = computed_result # unpack + self.insert1(dict(key, + analysis_result=result, + processing_time=processing_time)) + yield # yield control back to the caller +``` +We expect that most users will prefer to use the three-part implementation over the generator function implementation due to its conceptual complexity. ## Populate diff --git a/pyproject.toml b/pyproject.toml index 075bb92b7..b98361fe8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,15 +18,14 @@ dependencies = [ "pydot", "minio>=7.0.0", "matplotlib", - "otumat", "faker", - "cryptography", "urllib3", "setuptools", ] requires-python = ">=3.9,<4.0" authors = [ {name = "Dimitri Yatsenko", email = "dimitri@datajoint.com"}, + {name = "Thinh Nguyen", email = "thinh@datajoint.com"}, {name = "Raphael Guzman"}, {name = "Edgar Walker"}, {name = "DataJoint Contributors", email = "support@datajoint.com"}, diff --git a/tests/test_plugin.py b/tests/test_plugin.py deleted file mode 100644 index 7fd9aff22..000000000 --- a/tests/test_plugin.py +++ /dev/null @@ -1,62 +0,0 @@ -from os import path - -import pkg_resources -import pytest - -import datajoint.errors as djerr -import datajoint.plugin as p - - -@pytest.mark.skip(reason="marked for deprecation") -def test_check_pubkey(): - base_name = "datajoint" - base_meta = pkg_resources.get_distribution(base_name) - pubkey_meta = base_meta.get_metadata("{}.pub".format(base_name)) - - with open( - path.join(path.abspath(path.dirname(__file__)), "..", "datajoint.pub"), "r" - ) as f: - assert f.read() == pubkey_meta - - -def test_normal_djerror(): - try: - raise djerr.DataJointError - except djerr.DataJointError as e: - assert e.__cause__ is None - - -def test_verified_djerror(category="connection"): - try: - curr_plugins = getattr(p, "{}_plugins".format(category)) - setattr( - p, - "{}_plugins".format(category), - dict(test_plugin_id=dict(verified=True, object="example")), - ) - raise djerr.DataJointError - except djerr.DataJointError as e: - setattr(p, "{}_plugins".format(category), curr_plugins) - assert e.__cause__ is None - - -def test_verified_djerror_type(): - test_verified_djerror(category="type") - - -def test_unverified_djerror(category="connection"): - try: - curr_plugins = getattr(p, "{}_plugins".format(category)) - setattr( - p, - "{}_plugins".format(category), - dict(test_plugin_id=dict(verified=False, object="example")), - ) - raise djerr.DataJointError("hello") - except djerr.DataJointError as e: - setattr(p, "{}_plugins".format(category), curr_plugins) - assert isinstance(e.__cause__, djerr.PluginWarning) - - -def test_unverified_djerror_type(): - test_unverified_djerror(category="type")