[docs]@set_service_action_metadata(endpoint=TextGenerationCreateEndpoint)defcreate(self,*,model_id:Optional[str]=None,prompt_id:Optional[str]=None,input:Optional[str]=None,inputs:Optional[Union[list[str],str]]=None,parameters:Optional[ModelLike[TextGenerationParameters]]=None,moderations:Optional[ModelLike[ModerationParameters]]=None,data:Optional[ModelLike[PromptTemplateData]]=None,execution_options:Optional[ModelLike[CreateExecutionOptions]]=None,)->Generator[TextGenerationCreateResponse,None,None]:""" Args: model_id: The ID of the model. prompt_id: The ID of the prompt which should be used. input: Prompt to process. It is recommended not to leave any trailing spaces. inputs: Prompt/prompts to process. It is recommended not to leave any trailing spaces. parameters: Parameters for text generation. moderations: Parameters for moderation. data: An optional data object for underlying prompt. execution_options: An optional configuration how SDK should work (error handling, limits, callbacks, ...) Yields: TextGenerationCreateResponse object (server response without modification). Raises: ApiResponseException: In case of a known API error. ApiNetworkException: In case of unhandled network error. ValidationError: In case of provided parameters are invalid. Note: To limit number of concurrent requests or change execution procedure, see 'execute_options' parameter. """ifinputsisnotNoneandinputisnotNone:raiseValueError("Either specify 'inputs' or 'input' parameter!")prompts:Optional[list[str]]=(cast_list(inputs)ifinputsisnotNoneelsecast_list(input)ifinputisnotNoneelseNone)ifnotpromptsandnotprompt_id:raiseValueError("At least one of the following parameters input/inputs/prompt_id must be specified!")metadata=get_service_action_metadata(self.create)parameters_formatted=to_model_optional(parameters,TextGenerationParameters)moderations_formatted=to_model_optional(moderations,ModerationParameters,copy=True)template_formatted=to_model_optional(data,PromptTemplateData)execution_options_formatted=to_model_instance([self.config.create_execution_options,execution_options],CreateExecutionOptions)assertexecution_options_formattedself._log_method_execution("Generate Create",prompts=prompts,prompt_id=prompt_id,parameters=parameters_formatted,moderations=moderations_formatted,data=template_formatted,execution_options=execution_options_formatted,)ifprompt_idisnotNone:withself._get_http_client()asclient:http_response=client.post(url=self._get_endpoint(metadata.endpoint),params=_TextGenerationCreateParametersQuery().model_dump(),json=_TextGenerationCreateRequest(input=None,model_id=model_id,moderations=moderations_formatted,parameters=parameters_formatted,prompt_id=prompt_id,data=template_formatted,).model_dump(),)yieldTextGenerationCreateResponse(**http_response.json())returnasyncdefhandler(batch_input:str,http_client:AsyncClient,limiter:BaseLimiter)->TextGenerationCreateResponse:self._log_method_execution("Generate Create - processing input",input=batch_input)asyncdefhandle_retry(ex:Exception):ifisinstance(ex,HTTPStatusError)andex.response.status_code==httpx.codes.TOO_MANY_REQUESTS:awaitlimiter.report_error()asyncdefhandle_success(*args):awaitlimiter.report_success()http_response=awaithttp_client.post(url=self._get_endpoint(metadata.endpoint),extensions={BaseRetryTransport.Callback.Retry:handle_retry,BaseRetryTransport.Callback.Success:handle_success,},params=_TextGenerationCreateParametersQuery().model_dump(),json=_TextGenerationCreateRequest(input=batch_input,model_id=model_id,moderations=moderations_formatted,parameters=parameters_formatted,prompt_id=prompt_id,data=template_formatted,).model_dump(),)response=TextGenerationCreateResponse(**http_response.json())ifexecution_options_formatted.callback:execution_options_formatted.callback(response)returnresponseyield fromexecute_async(inputs=prompts,handler=handler,limiters=[self._concurrency_limiter,self._get_local_limiter(execution_options_formatted.concurrency_limit),],http_client=self._get_async_http_client,ordered=execution_options_formatted.ordered,throw_on_error=execution_options_formatted.throw_on_error,)
[docs]@set_service_action_metadata(endpoint=TextGenerationStreamCreateEndpoint)defcreate_stream(self,*,input:Optional[str]=None,model_id:Optional[str]=None,prompt_id:Optional[str]=None,parameters:Optional[ModelLike[TextGenerationParameters]]=None,moderations:Optional[ModelLike[ModerationParameters]]=None,data:Optional[ModelLike[PromptTemplateData]]=None,)->Generator[TextGenerationStreamCreateResponse,None,None]:""" Yields: TextGenerationStreamCreateResponse (raw server response object) Raises: ApiResponseException: In case of a known API error. ApiNetworkException: In case of unhandled network error. ValidationError: In case of provided parameters are invalid. """metadata=get_service_action_metadata(self.create_stream)parameters_formatted=to_model_optional(parameters,TextGenerationParameters)moderations_formatted=to_model_optional(moderations,ModerationParameters,copy=True)template_formatted=to_model_optional(data,PromptTemplateData)self._log_method_execution("Generate Create Stream",input=input,parameters=parameters_formatted,moderations=moderations_formatted,template=template_formatted,)withself._get_http_client()asclient:yield fromgeneration_stream_handler(ResponseModel=TextGenerationStreamCreateResponse,logger=self._logger,generator=client.post_stream(url=self._get_endpoint(metadata.endpoint),params=_TextGenerationStreamCreateParametersQuery().model_dump(),json=_TextGenerationStreamCreateRequest(input=input,parameters=parameters_formatted,model_id=model_id,prompt_id=prompt_id,moderations=moderations_formatted,data=template_formatted,).model_dump(),),)
[docs]@set_service_action_metadata(endpoint=TextGenerationComparisonCreateEndpoint)defcompare(self,*,request:TextGenerationComparisonCreateRequestRequest,compare_parameters:Optional[ModelLike[TextGenerationComparisonParameters]]=None,name:Optional[str]=None,)->TextGenerationComparisonCreateResponse:""" Raises: ApiResponseException: In case of a known API error. ApiNetworkException: In case of unhandled network error. ValidationError: In case of provided parameters are invalid. """metadata=get_service_action_metadata(self.compare)request_formatted=to_model_instance(request,TextGenerationComparisonCreateRequestRequest,copy=True)compare_parameters_formatted=to_model_instance(compare_parameters,TextGenerationComparisonParameters)self._log_method_execution("Text Generation Compare",input=input,requests=request_formatted,parameters=compare_parameters_formatted,)withself._get_http_client()asclient:http_response=client.post(url=self._get_endpoint(metadata.endpoint),params=_TextGenerationComparisonCreateParametersQuery().model_dump(),json=_TextGenerationComparisonCreateRequest(name=name,compare_parameters=compare_parameters_formatted,request=request_formatted,).model_dump(),)returnTextGenerationComparisonCreateResponse(**http_response.json())