"""GradientContext and Gradient"""importuuidfromtypingimportAny,Optional,TYPE_CHECKINGifTYPE_CHECKING:fromadalflow.optim.parameterimportParameterfromdataclassesimportdataclass,fieldfromadalflow.core.base_data_classimportDataClass__all__=["GradientContext","Gradient"]
[docs]@dataclassclassGradientContext(DataClass):"""GradientContext is used to describe the component's function and trace its input and output. To get the component's function desc, use GradientContext.to_yaml_signature() To get the data: use instance.to_yaml() """variable_desc:str=field(metadata={"desc":"The description of the target parameter"})input_output:str=field(metadata={"desc":"The context of the gradient in form of a conversation indicating \ the relation of the current parameter to the response parameter"})response_desc:str=field(metadata={"desc":"The description of the response parameter"})
# input: Dict[str, Any] = field(# metadata={"desc": "The input to the whole system"}, default=None# )# ground_truth: Any = field(# metadata={"desc": "The ground truth of the response parameter"}, default=None# )
[docs]@dataclassclassGradient(DataClass):__doc__=r"""It will handle gradients and feedbacks. It tracks the d_from_response_id / d_to_pred_id and the score of the whole response. if two gradients have the same data_id, different from_response_id, and same from_response_component_id, this is a cycle component structure. """data_id:Optional[str]=None# the id of the response from data in the datasetfrom_response_component_id:str=(None# the id of the component from which the gradient is calculated)order:Optional[int]=None# the order of the gradient in the list of gradientsfrom_response_id:str=(None# the id of the response from which the gradient is calculated)to_pred_id:str=(None# the id of the parameter to which the gradient is calculated and attached to d(from_response_id) / d(to_pred_id))score:Optional[float]=Nonecontext:GradientContext=Nonedata:Any=Noneprompt:Optional[str]=None# the LLM prompt to generate the gradientis_default_copy:bool=False# whether the gradient is a default copydef__init__(self,*,from_response:"Parameter",to_pred:"Parameter",id:Optional[str]=None,# the id of the gradientscore:Optional[float]=None,data_id:Optional[str]=None,data:Any=None,):self.id=idorstr(uuid.uuid4())self._generate_name(from_response,to_pred)self.from_response_component_id=from_response.component_trace.idifnotself.from_response_component_id:raiseValueError("The from_response_component_id should not be None. Please ensure the component_trace is set.")self.from_response_id=from_response.idself.to_pred_id=to_pred.idself.score=scoreself.data_id=data_idifself.data_idisNone:raiseValueError("The data_id should not be None.")self.data=dataself.order=Nonedef_generate_name(self,response:"Parameter",pred:"Parameter"):self.name=f"d_{response.name}_/_{pred.name}({response.id}_/_{pred.id})"self.role_desc=f"Gradient from {response.name} to {pred.name}"
def__hash__(self):# Use immutable and unique attributes to compute the hashreturnhash((self.id,self.data_id,self.from_response_id,self.to_pred_id))def__eq__(self,other):# Ensure equality comparison is based on the same unique attributesifnotisinstance(other,Gradient):returnFalsereturn(self.id==other.idandself.data_id==other.data_idandself.from_response_id==other.from_response_idandself.to_pred_id==other.to_pred_id)