| | 24 | == Client test with http post |
| | 25 | |
| | 26 | We write simple python3 code |
| | 27 | {{{ |
| | 28 | #!python |
| | 29 | import requests |
| | 30 | |
| | 31 | # Send a prompt to the Ollama API |
| | 32 | def ask_ollama(prompt): |
| | 33 | try: |
| | 34 | # Ollama API endpoint |
| | 35 | url = "http://192.168.19.15:11434/api/generate" |
| | 36 | |
| | 37 | # Payload for the API request |
| | 38 | payload = { |
| | 39 | "model": "deepseek-r1:8b", # Replace with the correct model name |
| | 40 | "prompt": prompt, |
| | 41 | "stream": False # Set to True if you want streaming responses |
| | 42 | } |
| | 43 | |
| | 44 | # Send the request to the Ollama API |
| | 45 | response = requests.post(url, json=payload) |
| | 46 | |
| | 47 | # Check if the request was successful |
| | 48 | if response.status_code == 200: |
| | 49 | # Parse the JSON response |
| | 50 | result = response.json() |
| | 51 | return result.get("response", "No response from model") |
| | 52 | else: |
| | 53 | print(f"Error: {response.status_code} - {response.text}") |
| | 54 | return None |
| | 55 | except Exception as e: |
| | 56 | print(f"Error sending request to Ollama: {e}") |
| | 57 | return None |
| | 58 | |
| | 59 | # Main function |
| | 60 | if __name__ == "__main__": |
| | 61 | # Define the prompt |
| | 62 | prompt = "What is 1 + 1?" |
| | 63 | |
| | 64 | # Send the prompt to Ollama |
| | 65 | print(f"Sending prompt: {prompt}") |
| | 66 | response = ask_ollama(prompt) |
| | 67 | |
| | 68 | if response: |
| | 69 | print("Model Response:") |
| | 70 | print(response) |
| | 71 | else: |
| | 72 | print("Failed to get a response from the model.") |
| | 73 | }}} |
| | 74 | |
| | 75 | |
| | 76 | |
| | 77 | |