Class RagEnabledService
Wraps an AIService to intercept queries through the RAG pipeline before sending to the LLM. All calls go through IRagPipeline.ProcessAsync โ the AIService itself is never modified.
public class RagEnabledService
- Inheritance
-
RagEnabledService
- Inherited Members
Methods
GetCompletionAsync(Message)
Processes a Message through RAG pipeline (extracts text content for retrieval).
public Task<string> GetCompletionAsync(Message message)
Parameters
messageMessage
Returns
GetCompletionAsync(Message, RagQueryOptions?, CancellationToken)
Processes a Message through RAG pipeline (extracts text content for retrieval) with per-request query overrides.
public Task<string> GetCompletionAsync(Message message, RagQueryOptions? options, CancellationToken cancellationToken = default)
Parameters
messageMessageoptionsRagQueryOptionscancellationTokenCancellationToken
Returns
GetCompletionAsync(string)
Processes the query through RAG pipeline, then sends the request message content to the LLM.
public Task<string> GetCompletionAsync(string query)
Parameters
querystring
Returns
GetCompletionAsync(string, RagQueryOptions?, CancellationToken)
Processes the query through RAG pipeline with per-request query overrides, then sends the request message content to the LLM.
public Task<string> GetCompletionAsync(string query, RagQueryOptions? options, CancellationToken cancellationToken = default)
Parameters
querystringoptionsRagQueryOptionscancellationTokenCancellationToken
Returns
RetrieveAsync(string, RagQueryOptions?, CancellationToken)
Performs RAG retrieval with per-request query overrides and returns the processed query (context + references) without calling the LLM.
public Task<RagProcessedQuery> RetrieveAsync(string query, RagQueryOptions? options, CancellationToken cancellationToken = default)
Parameters
querystringoptionsRagQueryOptionscancellationTokenCancellationToken
Returns
RetrieveAsync(string, CancellationToken)
Performs RAG retrieval and returns the processed query (context + references) without calling the LLM. Useful for inspecting what context would be sent.
public Task<RagProcessedQuery> RetrieveAsync(string query, CancellationToken cancellationToken = default)
Parameters
querystringcancellationTokenCancellationToken
Returns
StreamAsync(string, RagQueryOptions?, CancellationToken)
Streams the LLM response after RAG augmentation with per-request query overrides.
public IAsyncEnumerable<string> StreamAsync(string prompt, RagQueryOptions? options, CancellationToken cancellationToken = default)
Parameters
promptstringoptionsRagQueryOptionscancellationTokenCancellationToken
Returns
StreamAsync(string, CancellationToken)
Streams the LLM response after RAG augmentation.
public IAsyncEnumerable<string> StreamAsync(string prompt, CancellationToken cancellationToken = default)
Parameters
promptstringcancellationTokenCancellationToken
Returns
StreamOnceAsync(string, RagQueryOptions?, CancellationToken)
Streams the LLM response as a one-off query with per-request query overrides (no conversation history).
public IAsyncEnumerable<string> StreamOnceAsync(string prompt, RagQueryOptions? options, CancellationToken cancellationToken = default)
Parameters
promptstringoptionsRagQueryOptionscancellationTokenCancellationToken
Returns
StreamOnceAsync(string, CancellationToken)
Streams the LLM response as a one-off query (no conversation history).
public IAsyncEnumerable<string> StreamOnceAsync(string prompt, CancellationToken cancellationToken = default)
Parameters
promptstringcancellationTokenCancellationToken
Returns
WithoutRag()
Returns the underlying IAIService without RAG processing.
public IAIService WithoutRag()