from langchain.schema.runnable import RunnableParallel, RunnableLambda
import json
import re
# Define retrieval function
def retrieve_context(inputs):
query = f"{inputs['subject']} - {', '.join(inputs['topics'])}" # Combine subject & topic for better retrieval
retrieved_docs = retriever.invoke(query)
print(retrieved_docs)
if not retrieved_docs:
return "No relevant documents found."
return "\n\n".join([doc.page_content for doc in retrieved_docs])
rag_chain = (
RunnableParallel({
"subject": lambda x: x["subject"],
"topics": lambda x: x["topics"],
"context": RunnableLambda(retrieve_context),
#"response_json":lambda x: json.dumps(RESPONSE_JSON)
})
| mcq_prompt
| llm
)
# Invoke the RAG Chain
response = rag_chain.invoke({"subject": "Chemistry", "topics": ["structure of benzene","From carbonyl compounds"," AromAtic HydrocArbon"]})
# Extract the JSON part using regex
json_match = re.search(r"\[\s*{.*}\s*\]", response.content, re.DOTALL) # Find JSON array in response
if json_match:
json_str = json_match.group(0) # Extract only JSON content
else:
print("Error: JSON not found in response")
json_str = "[]"
# Parse JSON
try:
mcq_data = json.loads(json_str)
except json.JSONDecodeError as e:
print("Error parsing JSON:", e)
mcq_data = []
# Store in a structured dictionary
mcq_dict = {f"Q{i+1}": item for i, item in enumerate(mcq_data)}
strong text
print(json.dumps(mcq_dict, indent=2))
The above code is LLM model generate the MCQ questions using Rag method using langchain framework, I am struggle in the part of displaying the generated question to the frontend. How to connect this code with the flask framework for displaying the questions