WARNING: THIS SITE IS A MIRROR OF GITHUB.COM / IT CANNOT LOGIN OR REGISTER ACCOUNTS / THE CONTENTS ARE PROVIDED AS-IS / THIS SITE ASSUMES NO RESPONSIBILITY FOR ANY DISPLAYED CONTENT OR LINKS / IF YOU FOUND SOMETHING MAY NOT GOOD FOR EVERYONE, CONTACT ADMIN AT ilovescratch@foxmail.com
Skip to content

Commit e107d95

Browse files
committed
add pipeline overlay on/off
Signed-off-by: Max Xiang <[email protected]>
1 parent 48ce89d commit e107d95

File tree

6 files changed

+14
-2
lines changed

6 files changed

+14
-2
lines changed

application/backend/src/alembic/versions/7a213a27d666_initial_schema.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -136,6 +136,7 @@ def upgrade() -> None:
136136
sa.Column("sink_id", sa.Text(), nullable=True),
137137
sa.Column("model_id", sa.Text(), nullable=True),
138138
sa.Column("inference_device", sa.String(length=64), nullable=True),
139+
sa.Column("overlay", sa.Boolean(), nullable=True),
139140
sa.Column("is_running", sa.Boolean(), nullable=False),
140141
sa.Column("is_active", sa.Boolean(), nullable=False),
141142
sa.Column("data_collection_policies", sa.JSON(), nullable=False),

application/backend/src/db/schema.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -90,6 +90,7 @@ class PipelineDB(Base):
9090
sink_id: Mapped[str | None] = mapped_column(Text, ForeignKey("sinks.id", ondelete="RESTRICT"))
9191
model_id: Mapped[str | None] = mapped_column(Text, ForeignKey("models.id", ondelete="RESTRICT"))
9292
inference_device: Mapped[str | None] = mapped_column(String(64), nullable=True)
93+
overlay: Mapped[bool | None] = mapped_column(Boolean, default=True)
9394
is_running: Mapped[bool] = mapped_column(Boolean, default=False)
9495
is_active: Mapped[bool] = mapped_column(Boolean, default=False)
9596
data_collection_policies: Mapped[list] = mapped_column(JSON, nullable=False, default=list)

application/backend/src/pydantic_models/pipeline.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ class Pipeline(BaseModel):
4848
) # ID of the model, used for DB mapping, not exposed in API
4949
status: PipelineStatus = PipelineStatus.IDLE # Current status of the pipeline
5050
inference_device: str | None = Field(default=None)
51+
overlay: bool | None = Field(default=None)
5152

5253
# TODO: can be confused with status.is_running / is_active, consider refactoring
5354
is_running: bool | None = Field(default=None, exclude=True) # If set will overwrite status

application/backend/src/repositories/mappers/pipeline_mapper.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ def from_schema(pipeline_db: PipelineDB) -> Pipeline:
2525
source_id=UUID(pipeline_db.source_id) if pipeline_db.source_id else None,
2626
status=PipelineStatus.from_bool(pipeline_db.is_running, pipeline_db.is_active),
2727
inference_device=pipeline_db.inference_device.upper() if pipeline_db.inference_device else None,
28+
overlay=pipeline_db.overlay,
2829
)
2930

3031
@staticmethod
@@ -38,4 +39,5 @@ def to_schema(pipeline: Pipeline) -> PipelineDB:
3839
is_running=pipeline.status.is_running,
3940
is_active=pipeline.status.is_active,
4041
inference_device=pipeline.inference_device.upper() if pipeline.inference_device else None,
42+
overlay=pipeline.overlay,
4143
)

application/backend/src/services/pipeline_service.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ async def update_pipeline(self, project_id: UUID, partial_config: dict) -> Pipel
7474
await self._notify_pipeline_changed()
7575
# Intentionally call activate_model on status change regardless of whether a model exists.
7676
self._model_service.activate_model()
77-
if updated.inference_device != pipeline.inference_device:
77+
if updated.inference_device != pipeline.inference_device or updated.overlay != pipeline.overlay:
7878
# reload model on device change
7979
self._model_service.activate_model()
8080
return updated

application/backend/src/workers/inference.py

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@
1212

1313
if TYPE_CHECKING:
1414
import multiprocessing as mp
15+
from collections.abc import Callable
1516
from multiprocessing.synchronize import Event as EventClass
1617
from multiprocessing.synchronize import Lock
1718

@@ -57,6 +58,7 @@ def __init__(
5758
self._cached_models: dict[Any, object] = {}
5859
self._model_check_interval: float = 5.0 # seconds between model refresh checks
5960
self._is_passthrough_mode: bool = False
61+
self._overlay = True
6062

6163
def setup(self) -> None:
6264
super().setup()
@@ -71,6 +73,7 @@ async def _get_active_model(self) -> LoadedModel | None:
7173
self._is_passthrough_mode = pipeline is None or (
7274
pipeline.status.is_active and not pipeline.status.is_running
7375
)
76+
self._overlay = pipeline.overlay if pipeline and pipeline.overlay is not None else self._overlay
7477
logger.info(f"Passthrough mode {'activated' if self._is_passthrough_mode else 'disabled'}.")
7578
if pipeline is None or pipeline.model is None:
7679
return None
@@ -224,7 +227,11 @@ async def _process_frame_with_model(self, stream_data: StreamData) -> None:
224227
raise RuntimeError("Inference failed or model became unavailable")
225228

226229
# Build visualization
227-
vis_frame: np.ndarray = Visualizer.overlay_predictions(frame, prediction_response)
230+
overlays: list[Callable] = []
231+
if self._overlay:
232+
overlays.append(Visualizer.overlay_anomaly_heatmap)
233+
overlays.append(Visualizer.draw_prediction_label)
234+
vis_frame: np.ndarray = Visualizer.overlay_predictions(frame, prediction_response, *overlays)
228235

229236
# Package inference data
230237
stream_data.inference_data = InferenceData(

0 commit comments

Comments
 (0)