/ scripts / setup / setup_llms.sh
setup_llms.sh
  1  #!/bin/bash
  2  
  3  # ECHO LLM Setup Script
  4  # Installs Ollama and pulls all required models for ECHO agents
  5  
  6  set -e
  7  
  8  echo "🚀 ECHO Local LLM Setup"
  9  echo "======================="
 10  echo ""
 11  
 12  # Colors for output
 13  GREEN='\033[0;32m'
 14  BLUE='\033[0;34m'
 15  YELLOW='\033[1;33m'
 16  NC='\033[0m' # No Color
 17  
 18  # Check if running on macOS
 19  if [[ "$OSTYPE" != "darwin"* ]]; then
 20      echo "${YELLOW}Warning: This script is optimized for macOS. For Linux, install Ollama from https://ollama.com${NC}"
 21      echo ""
 22  fi
 23  
 24  # Step 1: Install Ollama
 25  echo "${BLUE}Step 1: Installing Ollama...${NC}"
 26  if command -v ollama &> /dev/null; then
 27      echo "${GREEN}✓ Ollama is already installed${NC}"
 28      ollama --version
 29  else
 30      echo "Installing Ollama via Homebrew..."
 31      if ! command -v brew &> /dev/null; then
 32          echo "${YELLOW}Homebrew not found. Please install from https://brew.sh${NC}"
 33          echo "Or install Ollama manually from https://ollama.com"
 34          exit 1
 35      fi
 36      brew install ollama
 37      echo "${GREEN}✓ Ollama installed successfully${NC}"
 38  fi
 39  
 40  echo ""
 41  
 42  # Step 2: Start Ollama service
 43  echo "${BLUE}Step 2: Starting Ollama service...${NC}"
 44  if pgrep -x "ollama" > /dev/null; then
 45      echo "${GREEN}✓ Ollama service is already running${NC}"
 46  else
 47      echo "Starting Ollama service in the background..."
 48      ollama serve > /dev/null 2>&1 &
 49      sleep 3
 50      echo "${GREEN}✓ Ollama service started${NC}"
 51  fi
 52  
 53  echo ""
 54  
 55  # Step 3: Pull models for each agent
 56  echo "${BLUE}Step 3: Pulling LLM models for ECHO agents...${NC}"
 57  echo "This will download several GB of data and may take 15-30 minutes."
 58  echo ""
 59  
 60  # Define agent models (agent|model format)
 61  AGENT_MODELS=(
 62      "CEO|qwen2.5:14b"
 63      "CTO|deepseek-coder:33b"
 64      "CHRO|llama3.1:8b"
 65      "Operations Head|mistral:7b"
 66      "Product Manager|llama3.1:8b"
 67      "Senior Architect|deepseek-coder:33b"
 68      "UI/UX Engineer|llama3.2-vision:11b"
 69      "Senior Developer|deepseek-coder:6.7b"
 70      "Test Lead|codellama:13b"
 71  )
 72  
 73  # Get unique models
 74  UNIQUE_MODELS=$(for entry in "${AGENT_MODELS[@]}"; do echo "$entry" | cut -d'|' -f2; done | sort -u)
 75  
 76  echo "Models to install:"
 77  for model in $UNIQUE_MODELS; do
 78      echo "  - $model"
 79  done
 80  echo ""
 81  
 82  # Pull each unique model
 83  for model in $UNIQUE_MODELS; do
 84      echo "${YELLOW}Pulling $model...${NC}"
 85      if ollama list | grep -q "^${model}"; then
 86          echo "${GREEN}✓ $model already installed${NC}"
 87      else
 88          ollama pull "$model"
 89          echo "${GREEN}✓ $model installed successfully${NC}"
 90      fi
 91      echo ""
 92  done
 93  
 94  # Step 4: Verify installation
 95  echo "${BLUE}Step 4: Verifying installation...${NC}"
 96  echo ""
 97  
 98  echo "Installed models:"
 99  ollama list
100  echo ""
101  
102  # Step 5: Test a model
103  echo "${BLUE}Step 5: Testing model...${NC}"
104  echo "Testing with llama3.1:8b..."
105  echo ""
106  
107  TEST_RESPONSE=$(ollama run llama3.1:8b "Say 'ECHO LLM setup successful!' in one sentence." 2>&1 | head -1)
108  echo "Response: $TEST_RESPONSE"
109  echo ""
110  
111  # Step 6: Display agent-model mapping
112  echo "${GREEN}✅ Setup complete!${NC}"
113  echo ""
114  echo "Agent-Model Mapping:"
115  echo "--------------------"
116  printf "%-20s %s\n" "Agent" "Model"
117  echo "--------------------"
118  for entry in "${AGENT_MODELS[@]}"; do
119      agent=$(echo "$entry" | cut -d'|' -f1)
120      model=$(echo "$entry" | cut -d'|' -f2)
121      printf "%-20s %s\n" "$agent" "$model"
122  done
123  echo ""
124  
125  # Step 7: Environment variables
126  echo "${BLUE}Optional Configuration:${NC}"
127  echo ""
128  echo "You can customize models per agent using environment variables:"
129  echo "  export CEO_MODEL=qwen2.5:14b"
130  echo "  export CTO_MODEL=deepseek-coder:33b"
131  echo ""
132  echo "To disable LLM for specific agents:"
133  echo "  export CEO_LLM_ENABLED=false"
134  echo ""
135  echo "To change Ollama endpoint:"
136  echo "  export OLLAMA_ENDPOINT=http://localhost:11434"
137  echo ""
138  
139  # Step 8: Next steps
140  echo "${BLUE}Next Steps:${NC}"
141  echo "1. Compile the shared library: cd shared && mix deps.get && mix compile"
142  echo "2. Compile and run an agent: cd apps/ceo && mix deps.get && mix escript.build"
143  echo "3. Test AI consultation: Use the 'ai_consult' tool in any agent"
144  echo ""
145  echo "For more information, see CLAUDE.md"
146  echo ""
147  echo "${GREEN}Happy building with ECHO! 🎉${NC}"