Skip to content

Commit

Permalink
Ran black formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
evalott100 committed Sep 28, 2023
1 parent e4edddc commit 5db7b0a
Show file tree
Hide file tree
Showing 12 changed files with 173 additions and 119 deletions.
64 changes: 28 additions & 36 deletions event_model/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,8 +327,8 @@ def __call__(
else:
raise EventModelValueError(
"SingleRunDocumentRouter associated with start document "
f'{self._start_doc["uid"]} '
f'received a second start document with uid {doc["uid"]}'
f"{self._start_doc['uid']} "
f"received a second start document with uid {doc['uid']}"
)
elif name == "descriptor":
assert isinstance(self._start_doc, dict)
Expand All @@ -337,9 +337,9 @@ def __call__(
else:
raise EventModelValueError(
"SingleRunDocumentRouter associated with start document "
f'{self._start_doc["uid"]} '
f'received a descriptor {doc["uid"]} associated with '
f'start document {doc["run_start"]}'
f"{self._start_doc['uid']} "
f"received a descriptor {doc['uid']} associated with "
f"start document {doc['run_start']}"
)
# Defer to superclass for dispatch/processing.
return super().__call__(name, doc, validate=validate)
Expand Down Expand Up @@ -380,7 +380,7 @@ def get_descriptor(self, doc: dict) -> EventDescriptor:
elif doc["descriptor"] not in self._descriptors:
raise EventModelValueError(
"SingleRunDocumentRouter has not processed a descriptor with "
f'uid {doc["descriptor"]}'
f"uid {doc['descriptor']}"
)

return self._descriptors[doc["descriptor"]]
Expand Down Expand Up @@ -510,7 +510,7 @@ def register_coercion(name: str, func: Callable, overwrite: bool = False) -> Non
raise EventModelValueError(
f"The coercion function {func} could not be registered for the "
f"name {name} because {_coercion_registry[name]} is already "
f"registered. Use overwrite=True to force it."
"registered. Use overwrite=True to force it."
)
_coercion_registry[name] = func

Expand Down Expand Up @@ -893,7 +893,7 @@ def register_handler(
return
raise DuplicateHandler(
f"There is already a handler registered for the spec {spec!r}. "
f"Use overwrite=True to deregister the original.\n"
"Use overwrite=True to deregister the original.\n"
f"Original: {original}\n"
f"New: {handler}"
)
Expand Down Expand Up @@ -1018,8 +1018,8 @@ def get_handler(self, resource: Resource) -> Any:
raise UndefinedAssetSpecification(
f"Resource document with uid {resource['uid']} "
f"refers to spec {resource['spec']!r} which is "
f"not defined in the Filler's "
f"handler registry."
"not defined in the Filler's "
"handler registry."
) from err
# Apply root_map.
resource_path = resource["resource_path"]
Expand All @@ -1028,19 +1028,17 @@ def get_handler(self, resource: Resource) -> Any:
if root:
resource_path = os.path.join(root, resource_path)
msg = (
f"Error instantiating handler "
"Error instantiating handler "
f"class {handler_class} "
f"with Resource document {resource}. "
)
if root != original_root:
msg += (
f"Its 'root' field was "
"Its 'root' field was "
f"mapped from {original_root} to {root} by root_map."
)
else:
msg += (
f"Its 'root' field {original_root} was " f"*not* modified by root_map."
)
msg += f"Its 'root' field {original_root} was *not* modified by root_map."
error_to_raise = EventModelError(msg)
handler = _attempt_with_retries(
func=handler_class,
Expand Down Expand Up @@ -1141,7 +1139,7 @@ def fill_event(
self._current_state.datum = datum_doc
handler = self._get_handler_maybe_cached(resource)
error_to_raise = DataNotAccessible(
f"Filler was unable to load the data referenced by "
"Filler was unable to load the data referenced by "
f"the Datum document {datum_doc} and the Resource "
f"document {resource}."
)
Expand Down Expand Up @@ -1221,8 +1219,7 @@ def __call__(
return super().__call__(name, doc, validate)


class EventModelError(Exception):
...
class EventModelError(Exception): ...


def _attempt_with_retries(
Expand Down Expand Up @@ -1527,8 +1524,7 @@ def start(self, start_doc: RunStart) -> None:
if uid in self._start_to_start_doc:
if self._start_to_start_doc[uid] == start_doc:
raise ValueError(
"RunRouter received the same 'start' document twice:\n"
"{start_doc!r}"
"RunRouter received the same 'start' document twice:\n{start_doc!r}"
)
else:
raise ValueError(
Expand Down Expand Up @@ -1716,24 +1712,19 @@ def stop(self, doc: RunStop) -> None:
# useful for higher-level libraries and for debugging.


class EventModelKeyError(EventModelError, KeyError):
...
class EventModelKeyError(EventModelError, KeyError): ...


class EventModelValueError(EventModelError, ValueError):
...
class EventModelValueError(EventModelError, ValueError): ...


class EventModelRuntimeError(EventModelError, RuntimeError):
...
class EventModelRuntimeError(EventModelError, RuntimeError): ...


class EventModelTypeError(EventModelError, TypeError):
...
class EventModelTypeError(EventModelError, TypeError): ...


class EventModelValidationError(EventModelError):
...
class EventModelValidationError(EventModelError): ...


class UnfilledData(EventModelError):
Expand Down Expand Up @@ -2151,8 +2142,9 @@ def __call__(
) -> RunStop:
if self.poison_pill:
raise EventModelError(
"Already composed a RunStop document for run "
"{!r}.".format(self.start["uid"])
"Already composed a RunStop document for run {!r}.".format(
self.start["uid"]
)
)
self.poison_pill.append(object())
if uid is None:
Expand Down Expand Up @@ -2222,12 +2214,12 @@ def __call__(
timestamps_length = length_of_value(
timestamps,
"Cannot compose event_page: event_page contains `timestamps` "
"list values of different lengths"
"list values of different lengths",
)
data_length = length_of_value(
data,
"Cannot compose event_page: event_page contains `data` "
"lists of different lengths"
"lists of different lengths",
)
assert timestamps_length == data_length, (
"Cannot compose event_page: the lists in `timestamps` are of a different "
Expand Down Expand Up @@ -3052,9 +3044,9 @@ def verify_filled(event_page: dict) -> None:
if not all(filled):
unfilled_data.append(field)
raise UnfilledData(
f"Unfilled data found in fields "
"Unfilled data found in fields "
f"{unfilled_data!r}. Use "
f"`event_model.Filler`."
"`event_model.Filler`."
)


Expand Down
11 changes: 7 additions & 4 deletions event_model/documents/datum.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,18 @@ class Datum(TypedDict):
datum_id: Annotated[
str,
Field(
description="Globally unique identifier for this Datum (akin to 'uid' "
"for other Document types), typically formatted as '<resource>/<integer>'"
description=(
"Globally unique identifier for this Datum (akin to 'uid' for other"
" Document types), typically formatted as '<resource>/<integer>'"
)
),
]
datum_kwargs: Annotated[
Dict[str, Any],
Field(
description="Arguments to pass to the Handler to "
"retrieve one quanta of data",
description=(
"Arguments to pass to the Handler to retrieve one quanta of data"
),
),
]
resource: Annotated[
Expand Down
12 changes: 8 additions & 4 deletions event_model/documents/datum_page.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,15 +15,19 @@ class DatumPage(TypedDict):
List[str],
AsRef("Dataframe"),
Field(
description="Array unique identifiers for each Datum (akin to 'uid' for "
"other Document types), typically formatted as '<resource>/<integer>'"
description=(
"Array unique identifiers for each Datum (akin to 'uid' for "
"other Document types), typically formatted as '<resource>/<integer>'"
)
),
]
datum_kwargs: Annotated[
Dict[str, List[Any]],
Field(
description="Array of arguments to pass to the Handler to "
"retrieve one quanta of data"
description=(
"Array of arguments to pass to the Handler to "
"retrieve one quanta of data"
)
),
]
resource: Annotated[
Expand Down
20 changes: 13 additions & 7 deletions event_model/documents/event.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,17 +13,21 @@ class PartialEvent(TypedDict):
Annotated[
Dict[str, Union[bool, str]],
Field(
description="Mapping each of the keys of externally-stored data to the "
"boolean False, indicating that the data has not been loaded, or to "
"foreign keys (moved here from 'data' when the data was loaded)"
description=(
"Mapping each of the keys of externally-stored data to the boolean"
" False, indicating that the data has not been loaded, or to"
" foreign keys (moved here from 'data' when the data was loaded)"
)
),
]
]
time: Annotated[
float,
Field(
description="The event time. This maybe different than the timestamps on "
"each of the data entries.",
description=(
"The event time. This maybe different than the timestamps on "
"each of the data entries."
),
),
]
timestamps: Annotated[
Expand All @@ -42,8 +46,10 @@ class Event(PartialEvent):
seq_num: Annotated[
int,
Field(
description="Sequence number to identify the location of this Event in the "
"Event stream",
description=(
"Sequence number to identify the location of this Event in the "
"Event stream"
),
),
]
uid: Annotated[str, Field(description="Globally unique identifier for this Event")]
40 changes: 26 additions & 14 deletions event_model/documents/event_descriptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,10 @@ class DataKey(TypedDict):
Annotated[
List[str],
Field(
description="The names for dimensions of the data. Null or empty list "
"if scalar data",
description=(
"The names for dimensions of the data. Null or empty list "
"if scalar data"
),
),
]
]
Expand All @@ -27,8 +29,9 @@ class DataKey(TypedDict):
Annotated[
str,
Field(
description="Where the data is stored if it is stored external "
"to the events",
description=(
"Where the data is stored if it is stored external to the events"
),
regex=r"^[A-Z]+:?",
),
]
Expand All @@ -43,8 +46,9 @@ class DataKey(TypedDict):
Annotated[
int,
Field(
description="Number of digits after decimal place if "
"a floating point number"
description=(
"Number of digits after decimal place if a floating point number"
)
),
]
]
Expand Down Expand Up @@ -79,8 +83,10 @@ class Configuration(TypedDict):
Annotated[
Dict[str, DataKey],
Field(
description="This describes the data stored alongside it in this "
"configuration object."
description=(
"This describes the data stored alongside it in this "
"configuration object."
)
),
]
]
Expand Down Expand Up @@ -120,8 +126,10 @@ class EventDescriptor(TypedDict):
Annotated[
Dict[str, Configuration],
Field(
description="Readings of configurational fields necessary for "
"interpreting data in the Events.",
description=(
"Readings of configurational fields necessary for "
"interpreting data in the Events."
),
),
]
]
Expand All @@ -137,17 +145,21 @@ class EventDescriptor(TypedDict):
Annotated[
str,
Field(
description="A human-friendly name for this data stream, such as "
"'primary' or 'baseline'.",
description=(
"A human-friendly name for this data stream, such as "
"'primary' or 'baseline'."
),
),
]
]
object_keys: NotRequired[
Annotated[
Dict[str, Any],
Field(
description="Maps a Device/Signal name to the names of the entries "
"it produces in data_keys.",
description=(
"Maps a Device/Signal name to the names of the entries "
"it produces in data_keys."
),
),
]
]
Expand Down
Loading

0 comments on commit 5db7b0a

Please sign in to comment.