from avala import Client
from langchain_openai import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
client = Client()
llm = ChatOpenAI(model="gpt-4o")
# Fetch project and task stats
projects = list(client.projects.list())
project_summaries = []
for project in projects:
tasks = list(client.tasks.list(project=project.uid))
status_counts = {}
for task in tasks:
status = task.status or "unknown"
status_counts[status] = status_counts.get(status, 0) + 1
project_summaries.append({
"name": project.name,
"status": project.status,
"tasks": status_counts,
})
prompt = ChatPromptTemplate.from_messages([
("system", "You are a project analytics assistant. Answer questions about annotation project progress."),
("user", "Project data:\n{projects}\n\nQuestion: {question}"),
])
chain = prompt | llm
# Ask questions
questions = [
"Which project has the most pending tasks?",
"What's the overall completion rate across all projects?",
"Which projects might need more annotators?",
]
for q in questions:
answer = chain.invoke({"projects": project_summaries, "question": q})
print(f"Q: {q}\nA: {answer.content}\n")