Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions docs/source/index.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ Documentation for LabThings-FastAPI
blobs.rst
concurrency.rst
using_things.rst
updates_and_features.rst
see_also.rst
examples.rst
wot_core_concepts.rst
Expand Down
3 changes: 2 additions & 1 deletion docs/source/properties.rst
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,8 @@ Note that the constraints for functional properties are set by assigning a dicti

.. note::

Property values are not validated when they are set directly, only via HTTP. This behaviour may change in the future.
Currently, property values are not validated when they are set directly in Python, only via HTTP.
Setting ``lt.FEATURE_FLAGS.validate_properties_on_set = True`` enables validation when they are set in Python. This may become default behaviour in the future.

HTTP interface
--------------
Expand Down
28 changes: 28 additions & 0 deletions docs/source/updates_and_features.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
.. _optional_features:

Optional Features and updates
=============================

LabThings allows some features to be turned on and off globally, using the `.FEATURE_FLAGS` object.
This was introduced as a way to smooth the upgrade process for downstream projects, meaning that when a new version of LabThings is released, they need not adopt all the new features at once.

Typically, your application will set the feature flags once, just after importing LabThings. For example, to validate properties when they are written to in Python, we would do:

.. code-block: python

import labthings_fastapi as lt


lt.FEATURE_FLAGS.validate_properties_on_set = True

When new features are intended to become non-optional, the usual procedure will be:

* Introduce the feature in a release, but disable it by default. It may be activated by setting a flag to `True`\ .
* At some point (either the release that introduces it, or a future release) a `DeprecationWarning` will be raised by relevant code if the feature has not been enabled.
* A subsequent release will enable the feature by default, but it may still be disabled by setting the flag to `False`\ . This will raise a `DeprecationWarning`\ .
* Another release will remove the feature flag and the feature will be permanently enabled.

Introducing a feature that's disabled by default, and adding `DeprecationWarning`\ s, are not "breaking changes" as they require no change to downstream code.
Enabling a feature by default, or removing the ability to disable a feature, would constitute a "breaking change".
While our major version is zero, the intention is that patch releases (e.g. ```0.1.0``` to ``0.1.1``) should not make breaking changes, but minor releases (e.g. ``0.1.0`` to ``0.2.0``) may do so.
After `v1.0.0` LabThings should follow the Semantic Versioning convention and make breaking changes only when the major version changes.
2 changes: 2 additions & 0 deletions src/labthings_fastapi/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
from .properties import property, setting, DataProperty, DataSetting
from .actions import action
from .endpoints import endpoint
from .feature_flags import FEATURE_FLAGS
from . import outputs
from .outputs import blob
from .server import ThingServer, cli
Expand Down Expand Up @@ -52,6 +53,7 @@
"action",
"thing_slot",
"endpoint",
"FEATURE_FLAGS",
"outputs",
"blob",
"ThingServer",
Expand Down
62 changes: 62 additions & 0 deletions src/labthings_fastapi/feature_flags.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
"""Control of optional features in LabThings.

This module provides a way to globally enable or disable certain features of LabThings.
When a new, non-backwards-compatible feature is added, it will usually be disabled by
default. This module provides an object `.FEATURE_FLAGS` that allows control over
optional features.

The default values of `.FEATURE_FLAGS` may change with new LabThings releases. The usual
sequence of events would be that a new feature is added (disabled by default), then the
feature flag is enabled by default in a subsequent release. If the intention is that the
feature will become non-optional, disabling the feature will raise a
`DeprecationWarning` for at least one release cycle before it is removed.
"""

from collections.abc import Iterator
from contextlib import contextmanager
from dataclasses import dataclass
from typing import Any


@dataclass(slots=True)
class LabThingsFeatureFlags:
"""Control over optional features of LabThings."""

validate_properties_on_set: bool = False
"""Whether validation logic runs when properties are set in Python."""

@contextmanager
def set_temporarily(
self,
**kwargs: Any,
) -> Iterator[None]:
r"""Temporarily set feature flags in a context manager.

This function may be used in a `with:` block to set feature flags and
then reset them afterwards. This is primarily useful for testing.

:param \**kwargs: the feature flags to set for the duration of the ``with:``
block. The argument names must match attributes of this object.

.. code-block: python

with FEATURE_FLAGS.set_temporarily(validate_properties_on_set=True):
my_thing.positive_int = -10 # Raises an exception

"""
values_before = {k: getattr(self, k) for k in kwargs.keys()}
try:
for k, v in kwargs.items():
setattr(self, k, v)
yield
finally:
for k, v in values_before.items():
setattr(self, k, v)


FEATURE_FLAGS = LabThingsFeatureFlags()
r"""This module-level object allows features of LabThings to be controlled.

See the documentation for the class `.LabThingsFeatureFlags` for details of the
flags and what they do. More information is available in :ref:`optional_features`\ .
"""
15 changes: 14 additions & 1 deletion src/labthings_fastapi/properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ class attribute. Documentation is in strings immediately following the
from fastapi import Body, FastAPI
from pydantic import BaseModel, ConfigDict, RootModel, ValidationError, create_model

from .feature_flags import FEATURE_FLAGS
from .thing_description import type_to_dataschema
from .thing_description._model import (
DataSchema,
Expand Down Expand Up @@ -608,7 +609,11 @@ def __set__(
:param value: the new value for the property.
:param emit_changed_event: whether to emit a changed event.
"""
obj.__dict__[self.name] = value
property_info = self.descriptor_info(obj)
if FEATURE_FLAGS.validate_properties_on_set:
obj.__dict__[self.name] = property_info.validate(value)
else:
obj.__dict__[self.name] = value
if emit_changed_event:
self.emit_changed_event(obj, value)

Expand Down Expand Up @@ -811,13 +816,21 @@ def instance_get(self, obj: Owner) -> Value:
def __set__(self, obj: Owner, value: Value) -> None:
"""Set the value of the property.

If property validation is enabled by `.FEATURE_FLAGS.validate_properties_on_set`
this will validate the value against the property's model, and an error
will be raised if the value is not valid.

:param obj: the `.Thing` on which the attribute is accessed.
:param value: the value of the property.

:raises ReadOnlyPropertyError: if the property cannot be set.
"""
if self.fset is None:
raise ReadOnlyPropertyError(f"Property {self.name} of {obj} has no setter.")
property_info = self.descriptor_info(obj)
if FEATURE_FLAGS.validate_properties_on_set:
value = property_info.validate(value)

self.fset(obj, value)


Expand Down
34 changes: 34 additions & 0 deletions tests/test_feature_flags.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""Test the feature flags mechanism.

Specific feature flags should be tested by the test code for the relevant feature. This
test module checks that `set_temporarily` works as expected.
"""

import pytest

import labthings_fastapi as lt


@pytest.mark.parametrize("value", [True, False])
def test_set_temporarily(value):
"""Test values may be set and reset."""
value_before = lt.FEATURE_FLAGS.validate_properties_on_set

with lt.FEATURE_FLAGS.set_temporarily(validate_properties_on_set=value):
assert lt.FEATURE_FLAGS.validate_properties_on_set == value

with lt.FEATURE_FLAGS.set_temporarily(validate_properties_on_set=not value):
assert lt.FEATURE_FLAGS.validate_properties_on_set != value

assert lt.FEATURE_FLAGS.validate_properties_on_set == value

assert lt.FEATURE_FLAGS.validate_properties_on_set == value_before


def test_set_bad_setting():
"""Test for errors when bad flags are used."""
with pytest.raises(AttributeError):
with lt.FEATURE_FLAGS.set_temporarily(bogus_name=True):
pass
with pytest.raises(AttributeError):
lt.FEATURE_FLAGS.bogus_name = True
35 changes: 33 additions & 2 deletions tests/test_properties.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
ServerNotRunningError,
UnsupportedConstraintError,
)
from labthings_fastapi.feature_flags import FEATURE_FLAGS
from labthings_fastapi.properties import BaseProperty, PropertyInfo
from labthings_fastapi.testing import create_thing_without_server
from .temp_client import poll_task
Expand Down Expand Up @@ -378,21 +379,51 @@ def test_setting_without_event_loop():


@pytest.mark.parametrize("prop_info", CONSTRAINED_PROPS)
def test_constrained_properties(prop_info):
"""Test that constraints on property values generate correct models."""
def test_constrained_properties(prop_info, mocker):
"""Test that constraints on property values generate correct models.

This also tests the `validate` method and checks validation happens
on assignment to the property in Python. Further checks over http
are made in later tests.
"""
prop = prop_info.prop
assert prop.value_type is prop_info.value_type
m = prop.model
assert issubclass(m, RootModel)
mock_thing = mocker.Mock(spec=PropertyTestThing)
mock_thing._thing_server_interface = mocker.Mock()
descriptorinfo = prop.descriptor_info(mock_thing)
assert isinstance(descriptorinfo, PropertyInfo)
for ann in prop_info.constraints:
assert any(meta == ann for meta in m.model_fields["root"].metadata)
for valid in prop_info.valid_values:
# Check the model can be created
instance = m(root=valid)
# Check the value passes through the model
validated = instance.model_dump()
assert validated == valid or validated is valid # `is` for NaN
# Check the descriptorinfo object also validates
# (this is what we get from thing.properties["name"])
validated = descriptorinfo.validate(valid)
assert validated == valid or validated is valid # `is` for NaN
# Check that assignment works
prop.__set__(mock_thing, valid)
validated = prop.__get__(mock_thing)
assert validated == valid or validated is valid # `is` for NaN

for invalid in prop_info.invalid_values:
with pytest.raises(ValidationError):
_ = m(root=invalid)
with pytest.raises(ValidationError):
descriptorinfo.validate(invalid)
# We check that validation on __set__ only applies if it's enabled.
with FEATURE_FLAGS.set_temporarily(validate_properties_on_set=True):
with pytest.raises(ValidationError):
prop.__set__(mock_thing, invalid)
with FEATURE_FLAGS.set_temporarily(validate_properties_on_set=False):
prop.__set__(mock_thing, invalid)
not_validated = prop.__get__(mock_thing)
assert not_validated == invalid or not_validated is invalid


def convert_inf_nan(value):
Expand Down
23 changes: 21 additions & 2 deletions tests/test_property.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
import pydantic
import pytest
from labthings_fastapi import properties
from labthings_fastapi.feature_flags import FEATURE_FLAGS
from labthings_fastapi.properties import (
BaseProperty,
DataProperty,
Expand Down Expand Up @@ -365,8 +366,9 @@ class BadIntModel(pydantic.BaseModel):
# Check that a broken `_model` raises the right error
# See above for where we manually set badprop._model to something that's
# not a rootmodel.
example.badprop = 3
assert example.badprop == 3
with FEATURE_FLAGS.set_temporarily(validate_properties_on_set=True):
with pytest.raises(TypeError):
example.badprop = 3 # Validation will fail here because of the bad model.
with pytest.raises(TypeError):
_ = example.properties["badprop"].model_instance
with pytest.raises(TypeError):
Expand All @@ -391,12 +393,24 @@ class BadIntModel(pydantic.BaseModel):
# Check validation for a model
modelprop = example.properties["modelprop"]
assert modelprop.validate(MyModel(a=3, b="four")) == MyModel(a=3, b="four")
# Check that a valid model passes through unchanged: this should indicate that
# we're not unnecessarily re-validating already-valid models.
m = MyModel(a=3, b="four")
assert modelprop.validate(m) is m
assert modelprop.validate({"a": 5, "b": "six"}) == MyModel(a=5, b="six")
for invalid in [{"c": 5}, (4, "f"), None]:
with pytest.raises(pydantic.ValidationError):
modelprop.validate(invalid)
# Check that an invalid model doesn't get re-validated. This is intended behaviour:
# it is another test that we're not unnecessarily re-validating a model that
# should already have been validated when it was created.
# Creating models with `model_construct` intentionally allows invalid models:
# if this is used downstream, the downstream code should accept responsibility!
bad_m = MyModel.model_construct(a="not an int", b=6)
assert modelprop.validate(bad_m) is bad_m
with pytest.raises(pydantic.ValidationError):
# Check that passing the same data in as a dict fails validation.
modelprop.validate(bad_m.model_dump())

# Check again for an odd rootmodel
rootmodelprop = example.properties["rootmodelprop"]
Expand All @@ -409,6 +423,11 @@ class BadIntModel(pydantic.BaseModel):
for invalid in ["seven", {"root": None}, 14.5, pydantic.RootModel[int](root=0)]:
with pytest.raises(pydantic.ValidationError):
modelprop.validate(invalid)
# Tty constructing a model with an invalid root value, skipping validation
invalid_model = rootmodelprop.model.model_construct(invalid)
# The RootModel gets re-validated, in contrast to the BaseModel above.
with pytest.raises(pydantic.ValidationError):
assert modelprop.validate(invalid_model) == invalid


def test_readonly_metadata():
Expand Down
Loading