/ test / python / testpipeline / testllm / testopencode.py
testopencode.py
 1  """
 2  OpenCode module tests
 3  """
 4  
 5  import json
 6  import unittest
 7  
 8  from http.server import HTTPServer, BaseHTTPRequestHandler
 9  from threading import Thread
10  
11  from txtai.pipeline import LLM
12  
13  
14  class RequestHandler(BaseHTTPRequestHandler):
15      """
16      Test HTTP handler.
17      """
18  
19      def do_POST(self):
20          """
21          POST request handler.
22          """
23  
24          # Mock response
25          content = "application/json"
26          response = json.dumps({"id": "0", "parts": [{"type": "text", "text": "blue"}]})
27  
28          # Encode response as bytes
29          response = response.encode("utf-8")
30  
31          self.send_response(200)
32          self.send_header("content-type", content)
33          self.send_header("content-length", len(response))
34          self.end_headers()
35  
36          self.wfile.write(response)
37          self.wfile.flush()
38  
39  
40  class TestOpenCode(unittest.TestCase):
41      """
42      OpenCode tests.
43      """
44  
45      @classmethod
46      def setUpClass(cls):
47          """
48          Create mock http server.
49          """
50  
51          cls.httpd = HTTPServer(("127.0.0.1", 8005), RequestHandler)
52  
53          server = Thread(target=cls.httpd.serve_forever, daemon=True)
54          server.start()
55  
56      @classmethod
57      def tearDownClass(cls):
58          """
59          Shutdown mock http server.
60          """
61  
62          cls.httpd.shutdown()
63  
64      def testGeneration(self):
65          """
66          Test generation with OpenCode
67          """
68  
69          # Test model generation with LiteLLM
70          model = LLM("opencode/big-pickle", url="http://127.0.0.1:8005")
71          self.assertEqual(model("The sky is"), "blue")