AutoAI RAG Parameter Scheme

Below, you can find enums and dataclasses that are useful for specifying more complex parameters.

For each dataclass you can call the following methods:

  • get_sample_params()

Example:

AutoAIRAGModelParams.get_sample_params()
# {
#     "decoding_method": "sample",
#     "min_new_tokens": 5,
#     "max_new_tokens": 300,
#     "max_sequence_length": 4096,
# }
  • show()

Example:

AutoAIRAGModelParams.show()
# +---------------------+--------------------------------------+-----------------+
# | PARAMETER           | TYPE                                 | EXAMPLE VALUE   |
# +=====================+======================================+=================+
# | decoding_method     | str, TextGenDecodingMethod, NoneType | sample          |
# +---------------------+--------------------------------------+-----------------+
# | min_new_tokens      | int, NoneType                        | 5               |
# +---------------------+--------------------------------------+-----------------+
# | max_new_tokens      | int, NoneType                        | 300             |
# +---------------------+--------------------------------------+-----------------+
# | max_sequence_length | int, NoneType                        | 4096            |
# +---------------------+--------------------------------------+-----------------+

AutoAIRAGModelParams

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGModelParams(decoding_method=None, min_new_tokens=None, max_new_tokens=None, max_sequence_length=None, max_completion_tokens=None, temperature=None)[source]

Bases: BaseSchema

Deprecated parameters:
  • decoding_method

  • min_new_tokens

  • max_new_tokens

  • max_sequence_length

decoding_method = None
max_completion_tokens = None
max_new_tokens = None
max_sequence_length = None
min_new_tokens = None
temperature = None

AutoAIRAGModelConfig

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGModelConfig(model_id, parameters=None, chat_template_messages=None, prompt_template_text=None, context_template_text=None, word_to_token_ratio=None)[source]

Bases: BaseSchema

Deprecated parameters:
  • prompt_template_text

chat_template_messages = None
context_template_text = None
model_id
parameters = None
prompt_template_text = None
word_to_token_ratio = None

AutoAIRAGCustomModelConfig

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGCustomModelConfig(deployment_id, space_id=None, project_id=None, parameters=None, chat_template_messages=None, prompt_template_text=None, context_template_text=None, word_to_token_ratio=None)[source]

Bases: BaseSchema

Deprecated parameters:
  • prompt_template_text

chat_template_messages = None
context_template_text = None
deployment_id
parameters = None
project_id = None
prompt_template_text = None
space_id = None
word_to_token_ratio = None

HybridRankerStrategy

class ibm_watsonx_ai.foundation_models.schema.HybridRankerStrategy(value)[source]

Bases: StrEnum

RRF = 'rrf'
WEIGHTED = 'weighted'

AutoAIRAGHybridRankerParams

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGHybridRankerParams(strategy: str | ibm_watsonx_ai.foundation_models.schema._api.HybridRankerStrategy, sparse_vectors: dict[str, str] | None = None, alpha: float | None = None, k: int | None = None)[source]

Bases: BaseSchema

alpha = None
k = None
sparse_vectors = None
strategy

AutoAIRAGRetrievalConfig

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGRetrievalConfig(method: 'str | RetrievalMethod', number_of_chunks: int | None = None, window_size: int | None = None, hybrid_ranker: dict | ibm_watsonx_ai.foundation_models.schema._api.AutoAIRAGHybridRankerParams | None = None)[source]

Bases: BaseSchema

hybrid_ranker = None
method
number_of_chunks = None
window_size = None

AutoAIRAGLanguageConfig

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGLanguageConfig(auto_detect: bool | None = None)[source]

Bases: BaseSchema

auto_detect = None

AutoAIRAGGenerationConfig

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGGenerationConfig(language: dict | ibm_watsonx_ai.foundation_models.schema._api.AutoAIRAGLanguageConfig | None = None, foundation_models: list[dict | ibm_watsonx_ai.foundation_models.schema._api.AutoAIRAGModelConfig | ibm_watsonx_ai.foundation_models.schema._api.AutoAIRAGCustomModelConfig] | None = None)[source]

Bases: BaseSchema

foundation_models = None
language = None

AutoAIRAGChatTemplateMessagesConfig

class ibm_watsonx_ai.foundation_models.schema.AutoAIRAGChatTemplateMessagesConfig(system_message_text: str, user_message_text: str)[source]

Bases: BaseSchema

system_message_text
user_message_text