"""Base Classes for AdalFlow Optimizers, including Optimizer, TextOptimizer, and DemoOptimizer."""fromtypingimportDict,Any,Union,Iterable,Sequence,Listfromadalflow.optim.parameterimportParameterfromadalflow.core.base_data_classimportDataClassParamsT=Union[Iterable[Parameter],Iterable[Dict[str,Any]]]
[docs]classOptimizer:__doc__=r"""Base class for all optimizers."""proposing:bool=Falseparams:ParamsT
[docs]defpropose(self,*args,**kwargs):raiseNotImplementedError("propose method is not implemented")
[docs]defstep(self,*args,**kwargs):raiseNotImplementedError("step method is not implemented")
[docs]defrevert(self,*args,**kwargs):raiseNotImplementedError("revert method is not implemented")
[docs]classTextOptimizer(Optimizer):__doc__=r"""Base class for all text optimizers. Text optimizer is via textual gradient descent, which is a variant of gradient descent that optimizes the text directly. It will generate new values for a given text prompt.This includes: - System prompt - output format - prompt template """def__init__(self,*args,**kwargs):pass
[docs]defzero_grad(self):"""Clear all the gradients of the parameters."""raiseNotImplementedError("zero_grad method is not implemented")
[docs]classDemoOptimizer(Optimizer):__doc__=r"""Base class for all demo optimizers. Demo optimizer are few-shot optimization, where it will sample raw examples from train dataset or bootstrap examples from the model's output. It will work with a sampler to generate new values for a given text prompt. If bootstrap is used, it will require a teacher genearator to generate the examples. """_traces:Dict[str,Any]# key: parameter_id (demo)dataset:Sequence[DataClass]_weighted:boolexclude_input_fields_from_bootstrap_demos:bool=Falsedef__init__(self,weighted:bool=True,dataset:Sequence[DataClass]=None,exclude_input_fields_from_bootstrap_demos:bool=False,*args,**kwargs,):self._weighted=weightedself.dataset=datasetself.exclude_input_fields_from_bootstrap_demos=(exclude_input_fields_from_bootstrap_demos)
[docs]defconfig_shots(self,*args,**kwargs):r"""Initialize the samples for each parameter."""raiseNotImplementedError("init method is not implemented")
[docs]defset_dataset(self,dataset:Sequence[DataClass]):r"""Set the dataset for the optimizer."""self.dataset=dataset
[docs]defadd_scores(self,ids:List[str],scores:List[float],*args,**kwargs):r"""Add scores to the optimizer."""raiseNotImplementedError("add_scores method is not implemented")