Files
lab_local_models/goodmorning.py
2026-04-08 00:38:38 -04:00

37 lines
994 B
Python

import ollama
MODEL = "llama3.2:1b"
SYSTEM_PROMPT = """
You are a morning assistant. Be brief.
"""
def send_message(messages):
"""Sends a conversation to the model, streaming its reply to the screen."""
print("\nAssistant: ", end="", flush=True)
reply = ""
for chunk in ollama.chat(model=MODEL, messages=messages, stream=True):
piece = chunk.message.content
print(piece, end="", flush=True)
reply += piece
print()
return reply
def run():
"""Runs a chat loop, keeping track of the conversation."""
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
print("Good morning! Type 'quit' to exit.")
while True:
user_input = input("\nYou: ")
if user_input.strip().lower() == "quit":
break
messages.append({"role": "user", "content": user_input})
reply = send_message(messages)
messages.append({"role": "assistant", "content": reply})
if __name__ == "__main__":
run()