/ legacy / python / lib / 1_basic_client.py
1_basic_client.py
 1  """
 2  Basic LangChain client example connecting to your Ollama server.
 3  This demonstrates simple LLM interaction.
 4  """
 5  
 6  from langchain_community.llms import Ollama
 7  
 8  # Initialize Ollama client pointing to your server
 9  llm = Ollama(
10      model="gpt-oss:120b",
11      base_url="http://192.222.50.154:11434"
12  )
13  
14  def simple_query(prompt: str):
15      """Send a simple query to the LLM"""
16      response = llm.invoke(prompt)
17      return response
18  
19  if __name__ == "__main__":
20      # Example 1: Simple question
21      print("=" * 60)
22      print("Example 1: Simple Question")
23      print("=" * 60)
24      response = simple_query("Why is the sky blue? Explain in 2 sentences.")
25      print(f"\nResponse: {response}\n")
26  
27      # Example 2: Code generation
28      print("=" * 60)
29      print("Example 2: Code Generation")
30      print("=" * 60)
31      response = simple_query("Write a Python function to calculate fibonacci numbers.")
32      print(f"\nResponse: {response}\n")
33  
34      # Example 3: Streaming response
35      print("=" * 60)
36      print("Example 3: Streaming Response")
37      print("=" * 60)
38      print("\nStreaming response:")
39      for chunk in llm.stream("Tell me a short joke about programming."):
40          print(chunk, end="", flush=True)
41      print("\n")