client.ingest( workspace_id="my-app", actor_id="system", artifact_type="document", raw_payload={ "title": "Refund Policy", "content": "Refunds are accepted within 30 days of purchase. No refunds after 30 days." }, idempotency_key="doc_refund_policy_v1",)
pack = client.context_pack( query="Can I get a refund for a 45-day-old purchase?", workspace_id="my-app", max_tokens=1024,)if pack["abstain_flag"]: print("Insufficient memory to answer.")else: print(pack["context_text"]) # Ready to inject into your LLM print(f"Confidence: {pack['confidence']:.0%}") print(f"Receipt: {pack['receipt_id']}")
The context_text field is a ready-to-inject prompt string with [cite:N] markers:
from openai import OpenAIoai = OpenAI()completion = oai.chat.completions.create( model="gpt-4o-mini", messages=[ { "role": "system", "content": ( "Answer using ONLY the provided context. " "Cite sources using [cite:N] format.\n\n" + pack["context_text"] ), }, {"role": "user", "content": "Can I get a refund for a 45-day-old purchase?"}, ], temperature=0,)print(completion.choices[0].message.content)