3535from pytensor .compile import DeepCopyOp , Function , ProfileStats , get_mode
3636from pytensor .compile .sharedvalue import SharedVariable
3737from pytensor .graph .basic import Constant , Variable
38- from pytensor .graph .traversal import ancestors , graph_inputs
38+ from pytensor .graph .traversal import ancestors , explicit_graph_inputs , graph_inputs
3939from pytensor .tensor import as_tensor
4040from pytensor .tensor .math import variadic_add
4141from pytensor .tensor .random .op import RandomVariable
6262 convert_observed_data ,
6363 gradient ,
6464 hessian ,
65- inputvars ,
6665 join_nonshared_inputs ,
6766 rewrite_pregrad ,
6867)
@@ -588,6 +587,8 @@ def compile_logp(
588587 ) -> PointFunc :
589588 """Compiled log probability density function.
590589
590+ The function expects as input a dictionary with the same structure as self.initial_point()
591+
591592 Parameters
592593 ----------
593594 vars : list of random variables or potential terms, optional
@@ -599,7 +600,12 @@ def compile_logp(
599600 Whether to sum all logp terms or return elemwise logp for each variable.
600601 Defaults to True.
601602 """
602- return self .compile_fn (self .logp (vars = vars , jacobian = jacobian , sum = sum ), ** compile_kwargs )
603+ compile_kwargs .setdefault ("on_unused_input" , "ignore" )
604+ return self .compile_fn (
605+ inputs = self .value_vars ,
606+ outs = self .logp (vars = vars , jacobian = jacobian , sum = sum ),
607+ ** compile_kwargs ,
608+ )
603609
604610 def compile_dlogp (
605611 self ,
@@ -609,6 +615,9 @@ def compile_dlogp(
609615 ) -> PointFunc :
610616 """Compiled log probability density gradient function.
611617
618+ The function expects as input a dictionary with the same structure as self.initial_point()
619+
620+
612621 Parameters
613622 ----------
614623 vars : list of random variables or potential terms, optional
@@ -617,7 +626,12 @@ def compile_dlogp(
617626 jacobian : bool
618627 Whether to include jacobian terms in logprob graph. Defaults to True.
619628 """
620- return self .compile_fn (self .dlogp (vars = vars , jacobian = jacobian ), ** compile_kwargs )
629+ compile_kwargs .setdefault ("on_unused_input" , "ignore" )
630+ return self .compile_fn (
631+ inputs = self .value_vars ,
632+ outs = self .dlogp (vars = vars , jacobian = jacobian ),
633+ ** compile_kwargs ,
634+ )
621635
622636 def compile_d2logp (
623637 self ,
@@ -628,6 +642,8 @@ def compile_d2logp(
628642 ) -> PointFunc :
629643 """Compiled log probability density hessian function.
630644
645+ The function expects as input a dictionary with the same structure as self.initial_point()
646+
631647 Parameters
632648 ----------
633649 vars : list of random variables or potential terms, optional
@@ -636,8 +652,10 @@ def compile_d2logp(
636652 jacobian : bool
637653 Whether to include jacobian terms in logprob graph. Defaults to True.
638654 """
655+ compile_kwargs .setdefault ("on_unused_input" , "ignore" )
639656 return self .compile_fn (
640- self .d2logp (vars = vars , jacobian = jacobian , negate_output = negate_output ),
657+ inputs = self .value_vars ,
658+ outs = self .d2logp (vars = vars , jacobian = jacobian , negate_output = negate_output ),
641659 ** compile_kwargs ,
642660 )
643661
@@ -742,7 +760,7 @@ def dlogp(
742760 dlogp graph
743761 """
744762 if vars is None :
745- value_vars = None
763+ value_vars = self . continuous_value_vars
746764 else :
747765 if not isinstance (vars , list | tuple ):
748766 vars = [vars ]
@@ -782,7 +800,7 @@ def d2logp(
782800 d²logp graph
783801 """
784802 if vars is None :
785- value_vars = None
803+ value_vars = self . continuous_value_vars
786804 else :
787805 if not isinstance (vars , list | tuple ):
788806 vars = [vars ]
@@ -1616,7 +1634,7 @@ def compile_fn(
16161634 outs : Variable or sequence of Variables
16171635 PyTensor variable or iterable of PyTensor variables.
16181636 inputs : sequence of Variables, optional
1619- PyTensor input variables, defaults to pytensorf.inputvars(outs) .
1637+ PyTensor input variables, Required if there is more than one input .
16201638 mode
16211639 PyTensor compilation mode, default=None.
16221640 point_fn : bool
@@ -1630,7 +1648,11 @@ def compile_fn(
16301648 Compiled PyTensor function
16311649 """
16321650 if inputs is None :
1633- inputs = inputvars (outs )
1651+ inputs = list (explicit_graph_inputs (outs ))
1652+ if (not point_fn ) and len (inputs ) > 1 :
1653+ raise ValueError (
1654+ "compile_fn requires inputs to be specified when there is more than one input and point_fn is disabled."
1655+ )
16341656
16351657 with self :
16361658 fn = compile (
@@ -1793,7 +1815,7 @@ def point_logps(self, point=None, round_vals=2, **kwargs):
17931815 factor .name : np .round (np .asarray (factor_logp ), round_vals )
17941816 for factor , factor_logp in zip (
17951817 factors ,
1796- self .compile_fn (factor_logps_fn , ** kwargs )(point ),
1818+ self .compile_fn (inputs = self . value_vars , outs = factor_logps_fn , ** kwargs )(point ),
17971819 )
17981820 }
17991821
@@ -2126,8 +2148,8 @@ def compile_fn(
21262148 ----------
21272149 outs
21282150 PyTensor variable or iterable of PyTensor variables.
2129- inputs
2130- PyTensor input variables, defaults to pytensorf.inputvars(outs) .
2151+ inputs, optional
2152+ PyTensor input variables. Required if there is more than one input .
21312153 mode
21322154 PyTensor compilation mode, default=None.
21332155 point_fn : bool
0 commit comments