/ examples / localai_api_fn_calling.ipynb
localai_api_fn_calling.ipynb
  1  {
  2   "cells": [
  3    {
  4     "cell_type": "code",
  5     "execution_count": null,
  6     "metadata": {},
  7     "outputs": [],
  8     "source": [
  9      "!pip install openai==0.28"
 10     ]
 11    },
 12    {
 13     "cell_type": "code",
 14     "execution_count": 31,
 15     "metadata": {},
 16     "outputs": [
 17      {
 18       "name": "stdout",
 19       "output_type": "stream",
 20       "text": [
 21        "OPENAI_API_KEY: xxx-localai-api-key\n",
 22        "OPENAI_API_BASE: http://192.168.57.92:8080/\n"
 23       ]
 24      }
 25     ],
 26     "source": [
 27      "import openai\n",
 28      "import os\n",
 29      "\n",
 30      "# Set the OPENAI_API_KEY environment variable\n",
 31      "os.environ['OPENAI_API_KEY'] = 'localai-api-key'\n",
 32      "\n",
 33      "# Set the OPENAI_API_BASE environment variable\n",
 34      "#os.environ['OPENAI_API_BASE'] = 'http://192.168.57.92:8080/' #from wsl linux\n",
 35      "os.environ['OPENAI_API_BASE'] = 'http://localhost:8080/'\n",
 36      "\n",
 37      "\n",
 38      "# Verify that the environment variables are set\n",
 39      "print(\"OPENAI_API_KEY:\", os.environ.get('OPENAI_API_KEY'))\n",
 40      "print(\"OPENAI_API_BASE:\", os.environ.get('OPENAI_API_BASE'))"
 41     ]
 42    },
 43    {
 44     "cell_type": "code",
 45     "execution_count": 32,
 46     "metadata": {},
 47     "outputs": [],
 48     "source": [
 49      "\n",
 50      "# Send the conversation and available functions to GPT\n",
 51      "messages = [{\"role\": \"user\", \"content\": \"What's the weather like in Boston?\"}]\n",
 52      "functions = [\n",
 53      "    {\n",
 54      "        \"name\": \"get_current_weather\",\n",
 55      "        \"description\": \"Get the current weather in a given location\",\n",
 56      "        \"parameters\": {\n",
 57      "            \"type\": \"object\",\n",
 58      "            \"properties\": {\n",
 59      "                \"location\": {\n",
 60      "                    \"type\": \"string\",\n",
 61      "                    \"description\": \"The city and state, e.g. San Francisco, CA\",\n",
 62      "                },\n",
 63      "                \"unit\": {\"type\": \"string\", \"enum\": [\"celsius\", \"fahrenheit\"]},\n",
 64      "            },\n",
 65      "            \"required\": [\"location\"],\n",
 66      "        },\n",
 67      "    }\n",
 68      "]\n"
 69     ]
 70    },
 71    {
 72     "cell_type": "code",
 73     "execution_count": 33,
 74     "metadata": {},
 75     "outputs": [
 76      {
 77       "name": "stdout",
 78       "output_type": "stream",
 79       "text": [
 80        "{ \"arguments\": {\"location\": \"Boston\" ,\"unit\" :\"fahrenheit\"}, \"function\": \"get_current_weather\"} <|im_end|>\n"
 81       ]
 82      }
 83     ],
 84     "source": [
 85      "openai.api_key = os.environ.get(\"OPENAI_API_KEY\")\n",
 86      "openai.api_base = os.environ.get(\"OPENAI_API_BASE\")\n",
 87      "\n",
 88      "response = openai.ChatCompletion.create(\n",
 89      "    model=\"Hermes-2-Pro-Llama-3-8B-Q5_K_M.gguf\",\n",
 90      "    messages=messages,\n",
 91      "    functions=functions,\n",
 92      "    tool_choice=\"auto\"\n",
 93      ")\n",
 94      "\n",
 95      "print(response.choices[0].message[\"content\"])"
 96     ]
 97    },
 98    {
 99     "cell_type": "code",
100     "execution_count": null,
101     "metadata": {},
102     "outputs": [],
103     "source": []
104    }
105   ],
106   "metadata": {
107    "kernelspec": {
108     "display_name": "llm-env",
109     "language": "python",
110     "name": "python3"
111    },
112    "language_info": {
113     "codemirror_mode": {
114      "name": "ipython",
115      "version": 3
116     },
117     "file_extension": ".py",
118     "mimetype": "text/x-python",
119     "name": "python",
120     "nbconvert_exporter": "python",
121     "pygments_lexer": "ipython3",
122     "version": "3.10.12"
123    }
124   },
125   "nbformat": 4,
126   "nbformat_minor": 2
127  }