Skip to content

Commit

Permalink
suppress errors in ax - batch 1
Browse files Browse the repository at this point in the history
Differential Revision: D23969775

fbshipit-source-id: cb422b648033d3a5932ac6fb72a2d881bd3caa4a
  • Loading branch information
generatedunixname89002005307016 authored and facebook-github-bot committed Sep 29, 2020
1 parent 38edc9e commit e3fd5a6
Show file tree
Hide file tree
Showing 15 changed files with 25 additions and 78 deletions.
8 changes: 7 additions & 1 deletion ax/benchmark/botorch_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,13 @@ def make_basic_generation_strategy(
num_trials=-1,
model_kwargs={
"model_constructor": fixed_noise_gp_model_constructor,
"transforms": [Winsorize] + Cont_X_trans + Y_trans, # pyre-ignore[6]
# pyre-fixme[58]: `+` is not supported for operand types
# `List[typing.Type[Winsorize]]` and
# `List[typing.Type[ax.modelbridge.transforms.base.Transform]]`.
# pyre-fixme[58]: `+` is not supported for operand types
# `List[typing.Type[Winsorize]]` and
# `List[typing.Type[ax.modelbridge.transforms.base.Transform]]`.
"transforms": [Winsorize] + Cont_X_trans + Y_trans,
"transform_configs": {
"Winsorize": {
f"winsorization_{t}": v
Expand Down
3 changes: 0 additions & 3 deletions ax/core/base_trial.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,7 +398,6 @@ def _set_generation_step_index(self, generation_step_index: Optional[int]) -> No
"Cannot add generator runs from different generation steps to a "
"single trial."
)
# pyre-fixme[8]: Attribute has type `None`; used as `Optional[int]`.
self._generation_step_index = generation_step_index

@abstractproperty
Expand Down Expand Up @@ -601,7 +600,6 @@ def _status(self) -> TrialStatus:
that the trial statuses mapping on the experiment is updated always when
a trial status is updated.
"""
# pyre-fixme[7]: Expected `TrialStatus` but got `None`.
return self.__status

@_status.setter
Expand All @@ -613,5 +611,4 @@ def _status(self, trial_status: TrialStatus) -> None:
assert self.index in self._experiment._trial_indices_by_status[self._status]
self._experiment._trial_indices_by_status[self._status].remove(self.index)
self._experiment._trial_indices_by_status[trial_status].add(self.index)
# pyre-fixme[8]: Attribute has type `None`; used as `TrialStatus`.
self.__status = trial_status
1 change: 0 additions & 1 deletion ax/core/experiment.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,7 +269,6 @@ def optimization_config(self, optimization_config: OptimizationConfig) -> None:
for metric_name in optimization_config.metrics.keys():
if metric_name in self._tracking_metrics:
self.remove_tracking_metric(metric_name)
# pyre-fixme[8]: Attribute has type `None`; used as `OptimizationConfig`.
self._optimization_config = optimization_config

@property
Expand Down
6 changes: 3 additions & 3 deletions ax/core/parameter.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,11 +153,11 @@ def _validate_range_param(
ParameterType.FLOAT,
):
raise ValueError("RangeParameter type must be int or float.")
# pyre-fixme[6]: `>=` is not supported for operand types `Union[None, bool,
# pyre-fixme[58]: `>=` is not supported for operand types `Union[None, bool,
# float, int, str]` and `Union[None, bool, float, int, str]`.
if lower >= upper:
raise ValueError("max must be strictly larger than min.")
# pyre-fixme[6]: `<=` is not supported for operand types `Union[None, bool,
# pyre-fixme[58]: `<=` is not supported for operand types `Union[None, bool,
# float, int, str]` and `int`.
if log_scale and lower <= 0:
raise ValueError("Cannot take log when min <= 0.")
Expand Down Expand Up @@ -236,7 +236,7 @@ def set_digits(self, digits: int) -> "RangeParameter":
# Re-scale min and max to new digits definition
cast_lower = self.cast(self._lower)
cast_upper = self.cast(self._upper)
# pyre-fixme[6]: `>=` is not supported for operand types `Union[None, bool,
# pyre-fixme[58]: `>=` is not supported for operand types `Union[None, bool,
# float, int, str]` and `Union[None, bool, float, int, str]`.
if cast_lower >= cast_upper:
raise ValueError(
Expand Down
4 changes: 2 additions & 2 deletions ax/modelbridge/cross_validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,8 +144,8 @@ def cross_validate_by_trial(model: ModelBridge, trial: int = -1) -> List[CVResul
for obs in training_data:
if obs.features.trial_index is None:
continue
# pyre-fixme[6]: `<` is not supported for operand types `Optional[np.int64]`
# and `int`.
# pyre-fixme[58]: `<` is not supported for operand types
# `Optional[np.int64]` and `int`.
elif obs.features.trial_index < trial:
cv_training_data.append(obs)
elif obs.features.trial_index == trial:
Expand Down
4 changes: 0 additions & 4 deletions ax/modelbridge/generation_strategy.py
Original file line number Diff line number Diff line change
Expand Up @@ -277,8 +277,6 @@ def trial_indices_by_step(self) -> Dict[int, Set[int]]:
for trial_index, trial in self.experiment.trials.items():
if (
trial._generation_step_index is not None
# pyre-fixme[6]: `<=` is not supported for operand types `None` and
# `int`.
and trial._generation_step_index <= self._curr.index
):
trial_indices_by_step[trial._generation_step_index].add(trial_index)
Expand Down Expand Up @@ -592,8 +590,6 @@ def _save_seen_trial_indices(self) -> None:
has seen the data for). Useful when `use_update=True` for a given
generation step.
"""
# pyre-fixme[8]: Attribute has type `None`; used as `Dict[TrialStatus,
# Set[int]]`.
self._seen_trial_indices_by_status = deepcopy(
self.experiment.trial_indices_by_status
)
Expand Down
25 changes: 3 additions & 22 deletions ax/models/discrete/thompson.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,23 +59,12 @@ def fit(
parameter_values: List[TParamValueList],
outcome_names: List[str],
) -> None:
# pyre-fixme[8]: Attribute has type `None`; used as
# `List[List[typing.Union[None, bool, float, int, str]]]`.
self.X = self._fit_X(Xs=Xs)
# pyre-fixme[8]: Attribute has type `None`; used as `List[List[float]]`.
# pyre-fixme[8]: Attribute has type `None`; used as `List[List[float]]`.
self.Ys, self.Yvars = self._fit_Ys_and_Yvars(
Ys=Ys, Yvars=Yvars, outcome_names=outcome_names
)
# pyre-fixme[8]: Attribute has type `None`; used as
# `List[Dict[List[typing.Union[None, bool, float, int, str]], Tuple[float,
# float]]]`.
self.X_to_Ys_and_Yvars = self._fit_X_to_Ys_and_Yvars(
# pyre-fixme[6]: Expected `List[List[typing.Union[None, bool, float,
# int, str]]]` for 1st param but got `None`.
X=self.X,
Ys=self.Ys,
Yvars=self.Yvars,
X=self.X, Ys=self.Ys, Yvars=self.Yvars
)

# pyre-fixme[56]: While applying decorator
Expand All @@ -95,7 +84,6 @@ def gen(
raise ValueError("ThompsonSampler requires objective weights.")

arms = self.X
# pyre-fixme[6]: Expected `Sized` for 1st param but got `None`.
k = len(arms)

weights = self._generate_weights(
Expand All @@ -105,10 +93,10 @@ def gen(

# Second entry is used for tie-breaking
weighted_arms = [
# pyre-fixme[16]: `None` has no attribute `__getitem__`.
(weights[i], np.random.random(), arms[i])
for i in range(k)
# pyre-fixme[6]: Expected `float` for 1st param but got `Optional[float]`.
# pyre-fixme[58]: `>` is not supported for operand types `float` and
# `Optional[float]`.
if weights[i] > min_weight
]

Expand All @@ -134,12 +122,10 @@ def gen(
@copy_doc(DiscreteModel.predict)
def predict(self, X: List[TParamValueList]) -> Tuple[np.ndarray, np.ndarray]:
n = len(X) # number of parameterizations at which to make predictions
# pyre-fixme[6]: Expected `Sized` for 1st param but got `None`.
m = len(self.Ys) # number of outcomes
f = np.zeros((n, m)) # array of outcome predictions
cov = np.zeros((n, m, m)) # array of predictive covariances
predictX = [self._hash_TParamValueList(x) for x in X]
# pyre-fixme[6]: Expected `Iterable[Variable[_T]]` for 1st param but got `None`.
for i, X_to_Y_and_Yvar in enumerate(self.X_to_Ys_and_Yvars):
# iterate through outcomes
for j, x in enumerate(predictX):
Expand Down Expand Up @@ -182,7 +168,6 @@ def _generate_weights(
num_valid_samples = samples.shape[1]

winner_indices = np.argmax(samples, axis=0) # (num_samples,)
# pyre-fixme[6]: Expected `Sized` for 1st param but got `None`.
winner_counts = np.zeros(len(self.X)) # (k,)
for index in winner_indices:
winner_counts[index] += 1
Expand All @@ -195,15 +180,11 @@ def _produce_samples(
objective_weights: np.ndarray,
outcome_constraints: Optional[Tuple[np.ndarray, np.ndarray]],
) -> Tuple[np.ndarray, float]:
# pyre-fixme[6]: Expected `Sized` for 1st param but got `None`.
k = len(self.X)
samples_per_metric = np.zeros(
# pyre-fixme[6]: Expected `Sized` for 1st param but got `None`.
(k, num_samples, len(self.Ys))
) # k x num_samples x m
# pyre-fixme[6]: Expected `Iterable[Variable[_T]]` for 1st param but got `None`.
for i, Y in enumerate(self.Ys): # (k x 1)
# pyre-fixme[16]: `None` has no attribute `__getitem__`.
Yvar = self.Yvars[i] # (k x 1)
cov = np.diag(Yvar) # (k x k)
samples = np.random.multivariate_normal(
Expand Down
1 change: 0 additions & 1 deletion ax/models/random/sobol.py
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,6 @@ def init_engine(self, n_tunable_features: int) -> SobolEngine:
self._engine = SobolEngine(
dimension=n_tunable_features, scramble=self.scramble, seed=self.seed
).fast_forward(self.init_position)
# pyre-fixme[7]: Expected `SobolEngine` but got `None`.
return self._engine

@property
Expand Down
6 changes: 2 additions & 4 deletions ax/models/torch/alebo.py
Original file line number Diff line number Diff line change
Expand Up @@ -267,10 +267,8 @@ def get_map_model(
mll.train()
mll, info_dict = fit_gpytorch_scipy(mll, track_iterations=False, method="tnc")
logger.debug(info_dict)
# pyre-fixme[6]: Expected `List[botorch.optim.fit.OptimizationIteration]`
# for 1st param but got `float`.
# pyre-fixme[6]: Expected `List[botorch.optim.fit.OptimizationIteration]`
# for 1st param but got `float`.
# pyre-fixme[58]: `<` is not supported for operand types
# `Union[List[botorch.optim.fit.OptimizationIteration], float]` and `float`.
if info_dict["fopt"] < f_best:
f_best = float(info_dict["fopt"]) # pyre-ignore
sd_best = m.state_dict()
Expand Down
3 changes: 2 additions & 1 deletion ax/plot/table_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,8 @@ def transpose(m):

records = [[name.replace(":", " : ") for name in metric_names]] + transpose(records)
colors = [["#ffffff"] * len(metric_names)] + transpose(colors)
# pyre-fixme[6]: Expected `List[str]` for 1st param but got `List[float]`.
# pyre-fixme[58]: `+` is not supported for operand types `List[str]` and
# `List[float]`.
header = [f"<b>{x}</b>" for x in [f"{arm_noun}s"] + arm_names]
column_widths = [300] + [150] * len(arm_names)

Expand Down
2 changes: 0 additions & 2 deletions ax/storage/json_store/decoders.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def batch_trial_from_json(
batch._status_quo = status_quo
batch._status_quo_weight_override = status_quo_weight_override
batch.optimize_for_power = optimize_for_power
# pyre-fixme[8]: Attribute has type `None`; used as `Optional[int]`.
batch._generation_step_index = generation_step_index
batch._properties = properties
return batch
Expand Down Expand Up @@ -120,7 +119,6 @@ def trial_from_json(
trial._run_metadata = run_metadata or {}
trial._runner = runner
trial._num_arms_created = num_arms_created
# pyre-fixme[8]: Attribute has type `None`; used as `Optional[int]`.
trial._generation_step_index = generation_step_index
trial._properties = properties or {}
return trial
Expand Down
2 changes: 0 additions & 2 deletions ax/storage/sqa_store/decoder.py
Original file line number Diff line number Diff line change
Expand Up @@ -751,8 +751,6 @@ def trial_from_sqa(self, trial_sqa: SQATrial, experiment: Experiment) -> BaseTri
trial._runner = (
self.runner_from_sqa(trial_sqa.runner) if trial_sqa.runner else None
)
# pyre-fixme[9]: _generation_step_index has type `None`; used as
# `Optional[int]`.
trial._generation_step_index = trial_sqa.generation_step_index
trial._properties = trial_sqa.properties or {}
return trial
Expand Down
27 changes: 0 additions & 27 deletions ax/utils/measurement/synthetic_functions.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,6 @@ def domain(self) -> List[Tuple[float, ...]]:
where each element of the list is a tuple corresponding to the min
and max of the domain for that dimension.
"""
# pyre-fixme[7]: Expected `List[typing.Tuple[float, ...]]` but got `None`.
return self._domain

@property
Expand All @@ -130,7 +129,6 @@ def minimums(self) -> List[Tuple[float, ...]]:
Each element of the list is a d-tuple, where d is the dimensionality
of the inputs. There may be more than one global minimums.
"""
# pyre-fixme[7]: Expected `List[typing.Tuple[float, ...]]` but got `None`.
return self._minimums

@property
Expand All @@ -141,21 +139,18 @@ def maximums(self) -> List[Tuple[float, ...]]:
Each element of the list is a d-tuple, where d is the dimensionality
of the inputs. There may be more than one global minimums.
"""
# pyre-fixme[7]: Expected `List[typing.Tuple[float, ...]]` but got `None`.
return self._maximums

@property
@informative_failure_on_none
def fmin(self) -> float:
"""Value at global minimum(s)."""
# pyre-fixme[7]: Expected `float` but got `None`.
return self._fmin

@property
@informative_failure_on_none
def fmax(self) -> float:
"""Value at global minimum(s)."""
# pyre-fixme[7]: Expected `float` but got `None`.
return self._fmax

@classmethod
Expand Down Expand Up @@ -202,16 +197,10 @@ def from_botorch(
class Hartmann6(SyntheticFunction):
"""Hartmann6 function (6-dimensional with 1 global minimum)."""

# pyre-fixme[15]: `_required_dimensionality` overrides attribute defined in
# `SyntheticFunction` inconsistently.
_required_dimensionality = 6
_domain = [(0, 1) for i in range(6)]
# pyre-fixme[15]: `_minimums` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_minimums = [(0.20169, 0.150011, 0.476874, 0.275332, 0.311652, 0.6573)]
_fmin = -3.32237
# pyre-fixme[15]: `_fmax` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_fmax = 0.0
_alpha = np.array([1.0, 1.2, 3.0, 3.2])
_A = np.array(
Expand Down Expand Up @@ -277,18 +266,10 @@ def _f(self, X: np.ndarray) -> float:
class Branin(SyntheticFunction):
"""Branin function (2-dimensional with 3 global minima)."""

# pyre-fixme[15]: `_required_dimensionality` overrides attribute defined in
# `SyntheticFunction` inconsistently.
_required_dimensionality = 2
# pyre-fixme[15]: `_domain` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_domain = [(-5, 10), (0, 15)]
_minimums = [(-np.pi, 12.275), (np.pi, 2.275), (9.42478, 2.475)]
# pyre-fixme[15]: `_fmin` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_fmin = 0.397887
# pyre-fixme[15]: `_fmax` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_fmax = 294.0

# pyre-fixme[56]: While applying decorator
Expand All @@ -308,18 +289,10 @@ def _f(self, X: np.ndarray) -> float:
class Aug_Branin(SyntheticFunction):
"""Augmented Branin function (3-dimensional with infinitely many global minima)."""

# pyre-fixme[15]: `_required_dimensionality` overrides attribute defined in
# `SyntheticFunction` inconsistently.
_required_dimensionality = 3
# pyre-fixme[15]: `_domain` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_domain = [(-5, 10), (0, 15), (0, 1)]
_minimums = [(-np.pi, 12.275, 1), (np.pi, 2.275, 1), (9.42478, 2.475, 1)]
# pyre-fixme[15]: `_fmin` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_fmin = 0.397887
# pyre-fixme[15]: `_fmax` overrides attribute defined in `SyntheticFunction`
# inconsistently.
_fmax = 294.0

# pyre-fixme[56]: While applying decorator
Expand Down
2 changes: 0 additions & 2 deletions ax/utils/testing/core_stubs.py
Original file line number Diff line number Diff line change
Expand Up @@ -459,7 +459,6 @@ def get_batch_trial(abandon_arm: bool = True) -> BatchTrial:
batch.mark_arm_abandoned(batch.arms[0].name, "abandoned reason")
batch.runner = SyntheticRunner()
batch.set_status_quo_with_weight(status_quo=arms[0], weight=0.5)
# pyre-fixme[8]: Attribute has type `None`; used as `int`.
batch._generation_step_index = 0
return batch

Expand Down Expand Up @@ -513,7 +512,6 @@ def get_trial() -> Trial:
arm = get_arms_from_dict(get_arm_weights1())[0]
trial.add_arm(arm)
trial.runner = SyntheticRunner()
# pyre-fixme[8]: Attribute has type `None`; used as `int`.
trial._generation_step_index = 0
return trial

Expand Down
9 changes: 6 additions & 3 deletions ax/utils/testing/modeling_stubs.py
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@ def transform_optimization_config(
fixed_features: ObservationFeatures,
) -> OptimizationConfig:
return ( # pyre-ignore[7]: pyre is right, this is a hack for testing.
# pyre-fixme[6]: `+` is not supported for operand types
# pyre-fixme[58]: `+` is not supported for operand types
# `OptimizationConfig` and `int`.
optimization_config + 1
if isinstance(optimization_config, int)
Expand All @@ -185,7 +185,10 @@ def transform_observation_features(
for obsf in observation_features:
if "x" in obsf.parameters:
obsf.parameters["x"] = (
not_none(obsf.parameters["x"]) + 1 # pyre-ignore[6]
# pyre-fixme[58]: `+` is not supported for operand types
# `Union[float, str]` and `int`.
not_none(obsf.parameters["x"])
+ 1
)
return observation_features

Expand Down Expand Up @@ -228,7 +231,7 @@ def transform_optimization_config(
fixed_features: ObservationFeatures,
) -> OptimizationConfig:
return (
# pyre-fixme[6]: `**` is not supported for operand types
# pyre-fixme[58]: `**` is not supported for operand types
# `OptimizationConfig` and `int`.
optimization_config ** 2
if isinstance(optimization_config, int)
Expand Down

0 comments on commit e3fd5a6

Please sign in to comment.