I have been trying to use a LLama API usig Groq cloud but encountering this error. I have tried set GROQ_API_KEY=api_key
And
$env:GROQ_API_KEY = "api_key"
But havent found the any solution and encountering the same error as the title.
CODE:
from groq import Groq
from dotenv import load_dotenv
import os
# Load environment variables from .env file
load_dotenv()
# Retrieve the API key from the environment variable
api_key = os.getenv("MY_KEY")
# Initialize the Groq client with the API key
client = Groq(api_key=api_key)
# Prompt the user for input
user_input = input("Enter your message: ")
# Define the message list with the user's input
messages = [
{"role": "user", "content": user_input}
]
# Create a chat completion request
completion = client.chatpletions.create(
model="llama-3.3-70b-versatile",
messages=messages,
temperature=1,
max_completion_tokens=1310,
top_p=1,
stream=True,
stop=None,
)
# Stream and print the response
for chunk in completion:
print(chunk.choices[0].delta.content or "", end="")