test_app.py
1 import datetime 2 import json 3 import os 4 import time 5 from copy import deepcopy 6 from typing import List 7 8 import pytest 9 from litestar.testing import TestClient 10 11 import evidently 12 from evidently._pydantic_compat import parse_obj_as 13 from evidently.legacy.base_metric import InputData 14 from evidently.legacy.base_metric import Metric 15 from evidently.legacy.base_metric import MetricResult 16 from evidently.legacy.core import new_id 17 from evidently.legacy.model.widget import BaseWidgetInfo 18 from evidently.legacy.options.base import Options 19 from evidently.legacy.renderers.base_renderer import MetricRenderer 20 from evidently.legacy.renderers.base_renderer import default_renderer 21 from evidently.legacy.renderers.html_widgets import CounterData 22 from evidently.legacy.renderers.html_widgets import WidgetSize 23 from evidently.legacy.renderers.html_widgets import counter 24 from evidently.legacy.suite.base_suite import ContextPayload 25 from evidently.legacy.suite.base_suite import Snapshot 26 from evidently.legacy.ui.dashboards import CounterAgg 27 from evidently.legacy.ui.dashboards import DashboardPanelCounter 28 from evidently.legacy.ui.dashboards import ReportFilter 29 from evidently.legacy.ui.dashboards.base import DashboardPanel 30 from evidently.legacy.ui.managers.projects import ProjectManager 31 from evidently.legacy.ui.storage.local import FSSpecBlobStorage 32 from evidently.legacy.ui.type_aliases import ZERO_UUID 33 from evidently.legacy.utils import NumpyEncoder 34 from tests.ui.conftest import HEADERS 35 from tests.ui.conftest import _dumps 36 37 38 @pytest.mark.asyncio 39 async def test_list_projects(test_client: TestClient, project_manager: ProjectManager, mock_project): 40 """get /api/projects""" 41 r = test_client.get("/api/projects") 42 r.raise_for_status() 43 assert r.json() == [] 44 45 await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 46 47 r = test_client.get("/api/projects") 48 r.raise_for_status() 49 data = r.json() 50 assert len(data) == 1 51 assert data[0]["name"] == mock_project.name 52 53 54 @pytest.mark.asyncio 55 async def test_add_project(test_client: TestClient, project_manager: ProjectManager, mock_project): 56 """post /api/projects""" 57 mock_project.team_id = None 58 org_id = new_id() 59 mock_project.org_id = org_id 60 r = test_client.post(f"/api/projects?org_id={org_id}", content=_dumps(mock_project), headers=HEADERS) 61 r.raise_for_status() 62 63 data = await project_manager.list_projects(ZERO_UUID, None, None) 64 assert len(data) == 1 65 assert data[0].name == mock_project.name 66 67 68 @pytest.mark.asyncio 69 async def test_get_project_info(test_client: TestClient, project_manager: ProjectManager, mock_project): 70 """get /api/projects/{project_id}/info""" 71 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 72 73 r = test_client.get(f"/api/projects/{project.id}/info") 74 r.raise_for_status() 75 76 data = r.json() 77 78 assert json.dumps(data) == _dumps(project) 79 80 81 @pytest.mark.asyncio 82 async def test_update_project_info(test_client: TestClient, project_manager: ProjectManager, mock_project): 83 """post /api/projects/{project_id}/info""" 84 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 85 86 project2 = deepcopy(project) 87 project2.name = "mock2" 88 r = test_client.post(f"/api/projects/{project.id}/info", content=_dumps(project2), headers=HEADERS) 89 r.raise_for_status() 90 91 assert (await project_manager.get_project(ZERO_UUID, project.id)).name == "mock2" 92 93 94 @pytest.mark.asyncio 95 async def test_projects_search(test_client: TestClient, project_manager: ProjectManager, mock_project): 96 """get /api/projects/search/{project_name}""" 97 98 r = test_client.get(f"/api/projects/search/{mock_project.name}") 99 r.raise_for_status() 100 assert r.json() == [] 101 102 await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 103 104 r = test_client.get(f"/api/projects/search/{mock_project.name}") 105 r.raise_for_status() 106 data = r.json() 107 assert len(data) == 1 108 assert data[0]["name"] == mock_project.name 109 110 r = test_client.get(f"/api/projects/search/{mock_project.name}_2") 111 r.raise_for_status() 112 assert r.json() == [] 113 114 115 @pytest.mark.asyncio 116 async def test_delete_project(test_client: TestClient, project_manager: ProjectManager, mock_project): 117 """delete /api/projects/{project_id}""" 118 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 119 assert len(await project_manager.list_projects(ZERO_UUID, None, None)) == 1 120 r = test_client.delete(f"/api/projects/{project.id}") 121 r.raise_for_status() 122 assert len(await project_manager.list_projects(ZERO_UUID, None, None)) == 0 123 124 125 class MockMetricResult(MetricResult): 126 class Config: 127 alias_required = False 128 129 value: float 130 131 @classmethod 132 def create(cls, value: float): 133 return MockMetricResult(value=value) 134 135 136 class MockMetric(Metric[MockMetricResult]): 137 class Config: 138 alias_required = False 139 140 def calculate(self, data: InputData) -> MockMetricResult: 141 return MockMetricResult.create(1) 142 143 144 @default_renderer(wrap_type=MockMetric) 145 class MockMetricRenderer(MetricRenderer): 146 def render_html(self, obj) -> List[BaseWidgetInfo]: 147 widget = counter(counters=[CounterData("title", "text")], size=WidgetSize.FULL) 148 widget.additionalGraphs = [counter(counters=[CounterData("title2", "text2")], size=WidgetSize.FULL)] 149 return [widget] 150 151 152 @pytest.fixture 153 def mock_snapshot(): 154 return Snapshot( 155 id=new_id(), 156 name="mock", 157 timestamp=datetime.datetime.now(), 158 metadata={}, 159 tags=[], 160 suite=ContextPayload( 161 metrics=[MockMetric()], 162 metric_results=[MockMetricResult.create(1)], 163 tests=[], 164 test_results=[], 165 options=Options(), 166 ), 167 metrics_ids=[], 168 test_ids=[], 169 options=Options(), 170 ) 171 172 173 @pytest.mark.asyncio 174 async def test_add_snapshot(test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot): 175 """post /api/projects/{project_id}/snapshots""" 176 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 177 178 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 0 179 r = test_client.post(f"/api/projects/{project.id}/snapshots", content=_dumps(mock_snapshot), headers=HEADERS) 180 r.raise_for_status() 181 182 snapshots = await project_manager.list_snapshots(ZERO_UUID, project.id) 183 assert len(snapshots) == 1 184 assert snapshots[0].id == mock_snapshot.id 185 186 187 @pytest.mark.asyncio 188 async def test_delete_snapshot(test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot): 189 """delete /api/projects/{project_id}/{snapshot_id}""" 190 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 191 await project_manager.add_snapshot(ZERO_UUID, project.id, mock_snapshot) 192 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 1 193 time.sleep(0.1) # try to avoid WinError 32 error (file used by another process) 194 r = test_client.delete(f"/api/projects/{project.id}/{mock_snapshot.id}") 195 r.raise_for_status() 196 197 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 0 198 199 200 @pytest.mark.asyncio 201 async def test_get_project_reports( 202 test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot 203 ): 204 """get /api/projects/{project_id}/reports""" 205 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 206 mock_snapshot.metrics_ids = [0] 207 await project_manager.add_snapshot(ZERO_UUID, project.id, mock_snapshot) 208 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 1 209 210 r = test_client.get(f"/api/projects/{project.id}/reports") 211 r.raise_for_status() 212 data = r.json() 213 assert len(data) == 1 214 assert data[0]["id"] == str(mock_snapshot.id) 215 216 217 @pytest.mark.asyncio 218 async def test_get_project_test_suites( 219 test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot 220 ): 221 """get /api/projects/{project_id}/test_suites""" 222 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 223 await project_manager.add_snapshot(ZERO_UUID, project.id, mock_snapshot) 224 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 1 225 226 r = test_client.get(f"/api/projects/{project.id}/test_suites") 227 r.raise_for_status() 228 data = r.json() 229 assert len(data) == 1 230 assert data[0]["id"] == str(mock_snapshot.id) 231 232 233 @pytest.mark.asyncio 234 async def test_get_snapshot_data(test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot): 235 """get /api/projects/{project_id}/{snapshot_id}/data""" 236 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 237 mock_snapshot.metrics_ids = [0] 238 await project_manager.add_snapshot(ZERO_UUID, project.id, mock_snapshot) 239 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 1 240 241 r = test_client.get(f"/api/projects/{project.id}/{mock_snapshot.id}/data") 242 r.raise_for_status() 243 data = r.json() 244 fp = MockMetric().get_fingerprint() 245 assert data == { 246 "name": "Report", 247 "widgets": [ 248 { 249 "additionalGraphs": [ 250 { 251 "additionalGraphs": [], 252 "alertStats": None, 253 "alerts": [], 254 "alertsPosition": None, 255 "details": "", 256 "id": "MockMetric-1", 257 "insights": [], 258 "pageSize": 5, 259 "params": {"counters": [{"label": "title2", "value": "text2"}]}, 260 "size": 2, 261 "tabs": [], 262 "title": "", 263 "type": "counter", 264 "widgets": [], 265 "source_fingerprint": None, 266 "linked_metrics": None, 267 } 268 ], 269 "alertStats": None, 270 "alerts": [], 271 "alertsPosition": None, 272 "details": "", 273 "id": "MockMetric-0", 274 "insights": [], 275 "pageSize": 5, 276 "params": {"counters": [{"label": "title", "value": "text"}]}, 277 "size": 2, 278 "tabs": [], 279 "title": "", 280 "type": "counter", 281 "widgets": [], 282 "source_fingerprint": fp, 283 "linked_metrics": [fp], 284 } 285 ], 286 } 287 288 289 @pytest.mark.asyncio 290 async def test_get_projects_graphs_data( 291 test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot 292 ): 293 """get /api/projects/{project_id}/{snapshot_id}/graphs_data/{graph_id}""" 294 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 295 mock_snapshot.metrics_ids = [0] 296 await project_manager.add_snapshot(ZERO_UUID, project.id, mock_snapshot) 297 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 1 298 299 r = test_client.get(f"/api/projects/{project.id}/{mock_snapshot.id}/graphs_data/MockMetric-1") 300 r.raise_for_status() 301 data = r.json() 302 303 assert data == { 304 "additionalGraphs": [], 305 "alertStats": None, 306 "alerts": [], 307 "alertsPosition": None, 308 "details": "", 309 "id": "MockMetric-1", 310 "insights": [], 311 "pageSize": 5, 312 "params": {"counters": [{"label": "title2", "value": "text2"}]}, 313 "size": 2, 314 "tabs": [], 315 "title": "", 316 "type": "counter", 317 "widgets": [], 318 "source_fingerprint": None, 319 "linked_metrics": None, 320 } 321 322 323 @pytest.mark.parametrize("report_format", ["html", "json"]) 324 @pytest.mark.asyncio 325 async def test_download_snapshot( 326 test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot, report_format 327 ): 328 """get /api/projects/{project_id}/{snapshot_id}/download""" 329 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 330 mock_snapshot.metrics_ids = [0] 331 await project_manager.add_snapshot(ZERO_UUID, project.id, mock_snapshot) 332 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 1 333 334 r = test_client.get( 335 f"/api/projects/{project.id}/{mock_snapshot.id}/download", params={"report_format": report_format} 336 ) 337 r.raise_for_status() 338 if report_format == "json": 339 data = r.json() 340 data["timestamp"] = None 341 assert data == { 342 "metrics": [{"metric": "MockMetric", "result": {"value": 1}}], 343 "timestamp": None, 344 "version": evidently.__version__, 345 } 346 if report_format == "html": 347 pass # how should we validate it? not 500 seems good enough 348 349 350 @pytest.mark.asyncio 351 async def test_get_project_panels(test_client: TestClient, project_manager: ProjectManager, mock_project): 352 """get /api/projects/{project_id}/dashboard/panels""" 353 panel = DashboardPanelCounter( 354 title="panel", 355 filter=ReportFilter(metadata_values={}, tag_values=[], include_test_suites=True), 356 agg=CounterAgg.NONE, 357 ) 358 mock_project.dashboard.add_panel(panel) 359 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 360 361 r = test_client.get(f"/api/projects/{project.id}/dashboard/panels") 362 r.raise_for_status() 363 data = r.json() 364 assert parse_obj_as(List[DashboardPanel], data) == [panel] 365 366 367 @pytest.mark.asyncio 368 async def test_get_project_dashboard(test_client: TestClient, project_manager: ProjectManager, mock_project): 369 """get /api/projects/{project_id}/dashboard""" 370 panel = DashboardPanelCounter( 371 title="panel", 372 filter=ReportFilter(metadata_values={}, tag_values=[], include_test_suites=True), 373 agg=CounterAgg.NONE, 374 ) 375 mock_project.dashboard.add_panel(panel) 376 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 377 378 r = test_client.get(f"/api/projects/{project.id}/dashboard") 379 r.raise_for_status() 380 data = r.json() 381 assert data == { 382 "max_timestamp": None, 383 "min_timestamp": None, 384 "name": "", 385 "widgets": [ 386 { 387 "additionalGraphs": [], 388 "alertStats": None, 389 "alerts": [], 390 "alertsPosition": None, 391 "details": "", 392 "id": str(panel.id), 393 "insights": [], 394 "pageSize": 5, 395 "params": {"counters": [{"label": "panel", "value": ""}]}, 396 "size": 2, 397 "tabs": [], 398 "title": "", 399 "type": "counter", 400 "widgets": [], 401 "source_fingerprint": None, 402 "linked_metrics": None, 403 } 404 ], 405 } 406 407 408 @pytest.mark.asyncio 409 async def test_reload_project(test_client: TestClient, project_manager: ProjectManager, mock_project, mock_snapshot): 410 """get /api/projects/{project_id}/reload""" 411 project = await project_manager.add_project(mock_project, ZERO_UUID, ZERO_UUID) 412 mock_snapshot.metrics_ids = [0] 413 await project_manager.add_snapshot(ZERO_UUID, project.id, mock_snapshot) 414 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 1 415 416 blob = project_manager.blob_storage 417 assert isinstance(blob, FSSpecBlobStorage) 418 snapshot_path = os.path.join(blob.base_path, blob.get_snapshot_blob_id(project.id, mock_snapshot)) 419 snapshot_id2 = new_id() 420 snapshot2 = deepcopy(mock_snapshot) 421 snapshot2.id = snapshot_id2 422 snapshot_path2 = snapshot_path.replace(str(mock_snapshot.id), str(snapshot_id2)) 423 with open(snapshot_path2, "w") as f: 424 f.write(json.dumps(snapshot2.dict(), indent=2, cls=NumpyEncoder)) 425 426 r = test_client.get(f"/api/projects/{project.id}/reload") 427 r.raise_for_status() 428 429 assert len(await project_manager.list_snapshots(ZERO_UUID, project.id)) == 2 430 431 432 def test_api_version(test_client): 433 """get /api/version""" 434 response = test_client.get("/api/version") 435 assert response.status_code == 200 436 437 version_response = response.json() 438 assert "version" in version_response 439 assert version_response["application"] == "Evidently UI"