/ tests / test_google_ads_monitoring.py
test_google_ads_monitoring.py
  1  """Google Ads _monitoring.py ユニットテスト
  2  
  3  _MonitoringMixin の evaluate_delivery_goal / evaluate_cpa_goal /
  4  evaluate_cv_goal / diagnose_zero_conversions をモックベースでテストする。
  5  """
  6  
  7  from __future__ import annotations
  8  
  9  from unittest.mock import AsyncMock, MagicMock
 10  
 11  import pytest
 12  
 13  from mureo.google_ads._monitoring import _MonitoringMixin
 14  
 15  
 16  # ---------------------------------------------------------------------------
 17  # テスト用のモッククライアントクラス
 18  # ---------------------------------------------------------------------------
 19  
 20  
 21  class _MockMonitoringClient(_MonitoringMixin):
 22      """_MonitoringMixin をテスト可能にするモッククラス"""
 23  
 24      def __init__(self) -> None:
 25          self._customer_id = "1234567890"
 26          self._client = MagicMock()
 27  
 28      @staticmethod
 29      def _validate_id(value: str, field_name: str) -> str:
 30          if not value or not value.isdigit():
 31              raise ValueError(f"{field_name} は数値文字列である必要があります: {value}")
 32          return value
 33  
 34      async def get_campaign(self, campaign_id: str):
 35          return None
 36  
 37      async def get_performance_report(self, **kwargs):
 38          return []
 39  
 40      async def diagnose_campaign_delivery(self, campaign_id: str):
 41          return {}
 42  
 43      async def analyze_performance(self, campaign_id: str, period: str = "LAST_7_DAYS"):
 44          return {}
 45  
 46      async def investigate_cost_increase(self, campaign_id: str):
 47          return {}
 48  
 49      async def get_search_terms_report(self, **kwargs):
 50          return []
 51  
 52      async def list_conversion_actions(self):
 53          return []
 54  
 55  
 56  # ---------------------------------------------------------------------------
 57  # evaluate_delivery_goal テスト
 58  # ---------------------------------------------------------------------------
 59  
 60  
 61  @pytest.mark.unit
 62  class TestEvaluateDeliveryGoal:
 63      @pytest.fixture()
 64      def client(self) -> _MockMonitoringClient:
 65          return _MockMonitoringClient()
 66  
 67      @pytest.mark.asyncio
 68      async def test_healthy_campaign(self, client: _MockMonitoringClient) -> None:
 69          """正常な配信状態 → healthy"""
 70          client.get_campaign = AsyncMock(return_value={"status": "ENABLED"})
 71          client.diagnose_campaign_delivery = AsyncMock(
 72              return_value={
 73                  "issues": [],
 74                  "warnings": [],
 75              }
 76          )
 77          client.get_performance_report = AsyncMock(
 78              return_value=[{"metrics": {"impressions": 100, "clicks": 10}}]
 79          )
 80  
 81          result = await client.evaluate_delivery_goal("123")
 82          assert result["status"] == "healthy"
 83          assert "normally" in result["summary"]
 84  
 85      @pytest.mark.asyncio
 86      async def test_critical_no_impressions(self, client: _MockMonitoringClient) -> None:
 87          """インプレッション0 → critical"""
 88          client.get_campaign = AsyncMock(return_value={"status": "ENABLED"})
 89          client.diagnose_campaign_delivery = AsyncMock(
 90              return_value={
 91                  "issues": [],
 92                  "warnings": [],
 93              }
 94          )
 95          client.get_performance_report = AsyncMock(
 96              return_value=[{"metrics": {"impressions": 0, "clicks": 0}}]
 97          )
 98  
 99          result = await client.evaluate_delivery_goal("123")
100          assert result["status"] == "critical"
101          assert result.get("suggested_workflow") == "delivery_fix"
102  
103      @pytest.mark.asyncio
104      async def test_critical_with_issues(self, client: _MockMonitoringClient) -> None:
105          """診断で issues 検出 → critical"""
106          client.get_campaign = AsyncMock(return_value={"status": "ENABLED"})
107          client.diagnose_campaign_delivery = AsyncMock(
108              return_value={
109                  "issues": ["有効な広告がありません"],
110                  "warnings": [],
111              }
112          )
113          client.get_performance_report = AsyncMock(
114              return_value=[{"metrics": {"impressions": 50, "clicks": 5}}]
115          )
116  
117          result = await client.evaluate_delivery_goal("123")
118          assert result["status"] == "critical"
119  
120      @pytest.mark.asyncio
121      async def test_warning_with_warnings_only(
122          self, client: _MockMonitoringClient
123      ) -> None:
124          """診断で warnings のみ → warning"""
125          client.get_campaign = AsyncMock(return_value={"status": "ENABLED"})
126          client.diagnose_campaign_delivery = AsyncMock(
127              return_value={
128                  "issues": [],
129                  "warnings": ["地域ターゲティングが未設定"],
130              }
131          )
132          client.get_performance_report = AsyncMock(
133              return_value=[{"metrics": {"impressions": 50, "clicks": 5}}]
134          )
135  
136          result = await client.evaluate_delivery_goal("123")
137          assert result["status"] == "warning"
138  
139      @pytest.mark.asyncio
140      async def test_paused_campaign_critical(
141          self, client: _MockMonitoringClient
142      ) -> None:
143          """一時停止中 → critical"""
144          client.get_campaign = AsyncMock(return_value={"status": "PAUSED"})
145          client.diagnose_campaign_delivery = AsyncMock(
146              return_value={
147                  "issues": [],
148                  "warnings": [],
149              }
150          )
151          client.get_performance_report = AsyncMock(
152              return_value=[{"metrics": {"impressions": 0, "clicks": 0}}]
153          )
154  
155          result = await client.evaluate_delivery_goal("123")
156          assert result["status"] == "critical"
157  
158      @pytest.mark.asyncio
159      async def test_exception_handling(self, client: _MockMonitoringClient) -> None:
160          """各メソッドの例外は吸収される"""
161          client.get_campaign = AsyncMock(side_effect=RuntimeError("fail"))
162          client.diagnose_campaign_delivery = AsyncMock(side_effect=RuntimeError("fail"))
163          client.get_performance_report = AsyncMock(side_effect=RuntimeError("fail"))
164  
165          result = await client.evaluate_delivery_goal("123")
166          assert result["status"] in ("critical", "warning")
167  
168  
169  # ---------------------------------------------------------------------------
170  # evaluate_cpa_goal テスト
171  # ---------------------------------------------------------------------------
172  
173  
174  @pytest.mark.unit
175  class TestEvaluateCpaGoal:
176      @pytest.fixture()
177      def client(self) -> _MockMonitoringClient:
178          return _MockMonitoringClient()
179  
180      @pytest.mark.asyncio
181      async def test_healthy_cpa(self, client: _MockMonitoringClient) -> None:
182          """CPA目標内 → healthy"""
183          client.get_performance_report = AsyncMock(
184              return_value=[{"metrics": {"cost": 10000, "conversions": 10}}]
185          )
186          client.investigate_cost_increase = AsyncMock(return_value={})
187  
188          result = await client.evaluate_cpa_goal("123", 2000.0)
189          assert result["status"] == "healthy"
190          assert result["current_cpa"] == 1000.0
191  
192      @pytest.mark.asyncio
193      async def test_warning_cpa(self, client: _MockMonitoringClient) -> None:
194          """CPA目標の1.2倍以内 → warning"""
195          client.get_performance_report = AsyncMock(
196              return_value=[{"metrics": {"cost": 11000, "conversions": 10}}]  # CPA=1100
197          )
198          client.investigate_cost_increase = AsyncMock(return_value={})
199  
200          result = await client.evaluate_cpa_goal("123", 1000.0)
201          assert result["status"] == "warning"
202          assert result["deviation_pct"] > 0
203  
204      @pytest.mark.asyncio
205      async def test_critical_cpa(self, client: _MockMonitoringClient) -> None:
206          """CPA目標の1.2倍超 → critical"""
207          client.get_performance_report = AsyncMock(
208              return_value=[{"metrics": {"cost": 15000, "conversions": 10}}]  # CPA=1500
209          )
210          client.investigate_cost_increase = AsyncMock(return_value={})
211  
212          result = await client.evaluate_cpa_goal("123", 1000.0)
213          assert result["status"] == "critical"
214          assert result.get("suggested_workflow") == "cpa_optimization"
215  
216      @pytest.mark.asyncio
217      async def test_zero_conversions(self, client: _MockMonitoringClient) -> None:
218          """CV0 → warning, current_cpa=None"""
219          client.get_performance_report = AsyncMock(
220              return_value=[{"metrics": {"cost": 5000, "conversions": 0}}]
221          )
222          client.investigate_cost_increase = AsyncMock(return_value={})
223  
224          result = await client.evaluate_cpa_goal("123", 1000.0)
225          assert result["status"] == "warning"
226          assert result["current_cpa"] is None
227  
228      @pytest.mark.asyncio
229      async def test_wasteful_terms_extraction(
230          self, client: _MockMonitoringClient
231      ) -> None:
232          """wasteful_search_termsが正しく抽出される"""
233          client.get_performance_report = AsyncMock(
234              return_value=[{"metrics": {"cost": 5000, "conversions": 5}}]
235          )
236          client.investigate_cost_increase = AsyncMock(
237              return_value={
238                  "wasteful_search_terms": [{"term": f"t{i}"} for i in range(10)]
239              }
240          )
241  
242          result = await client.evaluate_cpa_goal("123", 2000.0)
243          assert len(result["wasteful_terms"]) == 5  # 上位5件
244  
245  
246  # ---------------------------------------------------------------------------
247  # evaluate_cv_goal テスト
248  # ---------------------------------------------------------------------------
249  
250  
251  @pytest.mark.unit
252  class TestEvaluateCvGoal:
253      @pytest.fixture()
254      def client(self) -> _MockMonitoringClient:
255          return _MockMonitoringClient()
256  
257      @pytest.mark.asyncio
258      async def test_healthy_cv(self, client: _MockMonitoringClient) -> None:
259          """CV目標達成 → healthy"""
260          client.get_performance_report = AsyncMock(
261              return_value=[
262                  {"metrics": {"impressions": 1000, "clicks": 100, "conversions": 70}}
263              ]
264          )
265          client.analyze_performance = AsyncMock(return_value={"insights": []})
266  
267          result = await client.evaluate_cv_goal("123", 10.0)
268          assert result["status"] == "healthy"
269          assert result["current_cv_daily"] == 10.0  # 70/7
270  
271      @pytest.mark.asyncio
272      async def test_warning_cv(self, client: _MockMonitoringClient) -> None:
273          """CV目標の80%以上 → warning"""
274          client.get_performance_report = AsyncMock(
275              return_value=[
276                  {"metrics": {"impressions": 1000, "clicks": 100, "conversions": 63}}
277              ]
278          )
279          client.analyze_performance = AsyncMock(return_value={"insights": []})
280  
281          result = await client.evaluate_cv_goal("123", 10.0)
282          assert result["status"] == "warning"
283  
284      @pytest.mark.asyncio
285      async def test_critical_cv(self, client: _MockMonitoringClient) -> None:
286          """CV目標の80%未満 → critical"""
287          client.get_performance_report = AsyncMock(
288              return_value=[
289                  {"metrics": {"impressions": 1000, "clicks": 100, "conversions": 35}}
290              ]
291          )
292          client.analyze_performance = AsyncMock(return_value={"insights": []})
293  
294          result = await client.evaluate_cv_goal("123", 10.0)
295          assert result["status"] == "critical"
296          assert result.get("suggested_workflow") == "cv_increase"
297  
298      @pytest.mark.asyncio
299      async def test_bottleneck_impression(self, client: _MockMonitoringClient) -> None:
300          """インプレッション系インサイト → impression ボトルネック"""
301          client.get_performance_report = AsyncMock(
302              return_value=[
303                  {"metrics": {"impressions": 10, "clicks": 5, "conversions": 0}}
304              ]
305          )
306          client.analyze_performance = AsyncMock(
307              return_value={"insights": ["インプレッションが不足しています"]}
308          )
309  
310          result = await client.evaluate_cv_goal("123", 10.0)
311          assert result["bottleneck"] == "impression"
312  
313      @pytest.mark.asyncio
314      async def test_bottleneck_ctr(self, client: _MockMonitoringClient) -> None:
315          """低CTR → ctr ボトルネック"""
316          client.get_performance_report = AsyncMock(
317              return_value=[
318                  {"metrics": {"impressions": 10000, "clicks": 10, "conversions": 0}}
319              ]
320          )
321          client.analyze_performance = AsyncMock(return_value={"insights": []})
322  
323          result = await client.evaluate_cv_goal("123", 10.0)
324          assert result["bottleneck"] == "ctr"
325  
326      @pytest.mark.asyncio
327      async def test_bottleneck_cvr(self, client: _MockMonitoringClient) -> None:
328          """低CVR → cvr ボトルネック"""
329          client.get_performance_report = AsyncMock(
330              return_value=[
331                  {"metrics": {"impressions": 10000, "clicks": 500, "conversions": 1}}
332              ]
333          )
334          client.analyze_performance = AsyncMock(return_value={"insights": []})
335  
336          result = await client.evaluate_cv_goal("123", 10.0)
337          assert result["bottleneck"] == "cvr"
338  
339      @pytest.mark.asyncio
340      async def test_zero_target(self, client: _MockMonitoringClient) -> None:
341          """target_cv_daily=0 → deviation_pct=0"""
342          client.get_performance_report = AsyncMock(
343              return_value=[
344                  {"metrics": {"impressions": 100, "clicks": 10, "conversions": 0}}
345              ]
346          )
347          client.analyze_performance = AsyncMock(return_value={"insights": []})
348  
349          result = await client.evaluate_cv_goal("123", 0.0)
350          assert result["deviation_pct"] == 0.0
351  
352  
353  # ---------------------------------------------------------------------------
354  # diagnose_zero_conversions テスト
355  # ---------------------------------------------------------------------------
356  
357  
358  @pytest.mark.unit
359  class TestDiagnoseZeroConversions:
360      @pytest.fixture()
361      def client(self) -> _MockMonitoringClient:
362          return _MockMonitoringClient()
363  
364      @pytest.mark.asyncio
365      async def test_no_cv_tracking_critical(self, client: _MockMonitoringClient) -> None:
366          """CV計測未設定 → critical"""
367          client.get_campaign = AsyncMock(
368              return_value={"bidding_strategy": "MAXIMIZE_CONVERSIONS"}
369          )
370          client.list_conversion_actions = AsyncMock(return_value=[])
371          client.get_performance_report = AsyncMock(
372              return_value=[
373                  {
374                      "metrics": {
375                          "impressions": 100,
376                          "clicks": 10,
377                          "conversions": 0,
378                          "cost": 5000,
379                      }
380                  }
381              ]
382          )
383          client.diagnose_campaign_delivery = AsyncMock(
384              return_value={
385                  "issues": [],
386                  "warnings": [],
387                  "recommendations": [],
388              }
389          )
390  
391          result = await client.diagnose_zero_conversions("123")
392          assert result["status"] == "critical"
393          assert result["conversion_tracking"]["has_issue"] is True
394  
395      @pytest.mark.asyncio
396      async def test_no_delivery_bottleneck(self, client: _MockMonitoringClient) -> None:
397          """インプレッション0 → no_delivery ボトルネック"""
398          client.get_campaign = AsyncMock(
399              return_value={"bidding_strategy": "MAXIMIZE_CLICKS"}
400          )
401          client.list_conversion_actions = AsyncMock(return_value=[{"status": "ENABLED"}])
402          client.get_performance_report = AsyncMock(
403              return_value=[
404                  {
405                      "metrics": {
406                          "impressions": 0,
407                          "clicks": 0,
408                          "conversions": 0,
409                          "cost": 0,
410                      }
411                  }
412              ]
413          )
414          client.diagnose_campaign_delivery = AsyncMock(
415              return_value={
416                  "issues": [],
417                  "warnings": [],
418                  "recommendations": [],
419              }
420          )
421  
422          result = await client.diagnose_zero_conversions("123")
423          assert result["funnel"]["bottleneck"] == "no_delivery"
424  
425      @pytest.mark.asyncio
426      async def test_no_clicks_bottleneck(self, client: _MockMonitoringClient) -> None:
427          """クリック0 → no_clicks ボトルネック"""
428          client.get_campaign = AsyncMock(
429              return_value={"bidding_strategy": "MAXIMIZE_CLICKS"}
430          )
431          client.list_conversion_actions = AsyncMock(return_value=[{"status": "ENABLED"}])
432          client.get_performance_report = AsyncMock(
433              return_value=[
434                  {
435                      "metrics": {
436                          "impressions": 100,
437                          "clicks": 0,
438                          "conversions": 0,
439                          "cost": 0,
440                      }
441                  }
442              ]
443          )
444          client.diagnose_campaign_delivery = AsyncMock(
445              return_value={
446                  "issues": [],
447                  "warnings": [],
448                  "recommendations": [],
449              }
450          )
451  
452          result = await client.diagnose_zero_conversions("123")
453          assert result["funnel"]["bottleneck"] == "no_clicks"
454  
455      @pytest.mark.asyncio
456      async def test_healthy_with_conversions(
457          self, client: _MockMonitoringClient
458      ) -> None:
459          """CVあり → healthy"""
460          client.get_campaign = AsyncMock(
461              return_value={"bidding_strategy": "MAXIMIZE_CLICKS"}
462          )
463          client.list_conversion_actions = AsyncMock(return_value=[{"status": "ENABLED"}])
464          client.get_performance_report = AsyncMock(
465              return_value=[
466                  {
467                      "metrics": {
468                          "impressions": 1000,
469                          "clicks": 100,
470                          "conversions": 10,
471                          "cost": 50000,
472                      }
473                  }
474              ]
475          )
476          client.diagnose_campaign_delivery = AsyncMock(
477              return_value={
478                  "issues": [],
479                  "warnings": [],
480                  "recommendations": [],
481              }
482          )
483  
484          result = await client.diagnose_zero_conversions("123")
485          assert result["status"] == "healthy"
486  
487      @pytest.mark.asyncio
488      async def test_search_term_quality_high_waste(
489          self, client: _MockMonitoringClient
490      ) -> None:
491          """CVなし検索語句のコストが50%超の場合はissueに含まれる"""
492          client.get_campaign = AsyncMock(
493              return_value={"bidding_strategy": "MAXIMIZE_CLICKS"}
494          )
495          client.list_conversion_actions = AsyncMock(return_value=[{"status": "ENABLED"}])
496          client.get_performance_report = AsyncMock(
497              return_value=[
498                  {
499                      "metrics": {
500                          "impressions": 1000,
501                          "clicks": 100,
502                          "conversions": 0,
503                          "cost": 10000,
504                      }
505                  }
506              ]
507          )
508          client.diagnose_campaign_delivery = AsyncMock(
509              return_value={
510                  "issues": [],
511                  "warnings": [],
512                  "recommendations": [],
513              }
514          )
515          client.get_search_terms_report = AsyncMock(
516              return_value=[
517                  {"search_term": "bad1", "metrics": {"conversions": 0, "cost": 6000}},
518                  {"search_term": "bad2", "metrics": {"conversions": 0, "cost": 2000}},
519              ]
520          )
521  
522          result = await client.diagnose_zero_conversions("123")
523          assert result["search_term_quality"] is not None
524          assert result["search_term_quality"]["zero_cv_cost"] == 8000
525  
526  
527  # ---------------------------------------------------------------------------
528  # _build_cv_recommendations テスト
529  # ---------------------------------------------------------------------------
530  
531  
532  @pytest.mark.unit
533  class TestBuildCvRecommendations:
534      def test_all_issues(self) -> None:
535          """全問題が存在する場合の推奨アクション"""
536          actions = _MonitoringMixin._build_cv_recommendations(
537              has_cv_issue=True,
538              bidding_issue="入札戦略不整合",
539              bottleneck="no_delivery",
540              search_term_quality={"zero_cv_terms": 5},
541              cost=5000.0,
542          )
543          action_types = [a["action"] for a in actions]
544          assert "fix_cv_tracking" in action_types
545          assert "fix_bidding_strategy" in action_types
546          assert "add_negative_keywords" in action_types
547          assert "fix_delivery" in action_types
548          # 優先順位が昇順
549          priorities = [a["priority"] for a in actions]
550          assert priorities == sorted(priorities)
551  
552      def test_no_issues(self) -> None:
553          """問題がない場合でも基本的な提案は含まれる"""
554          actions = _MonitoringMixin._build_cv_recommendations(
555              has_cv_issue=False,
556              bidding_issue=None,
557              bottleneck=None,
558              search_term_quality=None,
559              cost=0.0,
560          )
561          action_types = [a["action"] for a in actions]
562          assert "improve_ads_and_keywords" in action_types
563          assert "review_landing_page" in action_types