Skip to content
New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

remove attrdict #118

Merged
merged 6 commits into from
Aug 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 1 addition & 2 deletions mango/domain/domain_space.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import math
import numpy as np
from collections.abc import Callable
import warnings
from itertools import compress
from functools import cached_property
import warnings
Expand Down Expand Up @@ -272,7 +271,7 @@ def classify_parameters(param_dict: dict) -> (set, set, set, set):
try:
# this check takes care of numpy ints as well
all_int = all(
x == int(x) and type(x) != bool for x in param_dict[par]
x == int(x) and not isinstance(x, bool) for x in param_dict[par]
)
except (ValueError, TypeError):
all_int = False
Expand Down
2 changes: 0 additions & 2 deletions mango/domain/parameter_sampler.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from abc import ABCMeta, abstractmethod

import warnings

import numpy as np
Expand Down
2 changes: 0 additions & 2 deletions mango/metatuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,6 @@
from tqdm.auto import tqdm
import random


## setting warnings to ignore for now
import warnings

warnings.filterwarnings("ignore")
Expand Down
10 changes: 5 additions & 5 deletions mango/optimizer/bayesian_learning.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,8 @@ def Get_Upper_Confidence_Bound(self, X):
return Value

"""
Returns the most optmal x along with mean value from the domain of x and making sure it is not a Duplicate (depending on closeness)
Returns the most optmal x along with mean value from the domain of x and
making sure it is not a Duplicate (depending on closeness)
used in batch setting: As mean is also returned
"""

Expand All @@ -128,7 +129,7 @@ def remove_duplicates(self, X, X_Sample, mu, Value):
# check if x_optimal is in X_Sample
check_closeness = self.closeness(x_optimal, X_Sample)

if check_closeness == False: # No close element to x_optimal in X_Sample
if check_closeness is False: # No close element to x_optimal in X_Sample
break

# we will look for next optimal value to try
Expand Down Expand Up @@ -159,7 +160,7 @@ def remove_duplicates_serial(self, X, X_Sample, Value):
# check if x_optimal is in X_Sample
check_closeness = self.closeness(x_optimal, X_Sample)

if check_closeness == False: # No close element to x_optimal in X_Sample
if check_closeness is False: # No close element to x_optimal in X_Sample
break

# we will look for next optimal value to try
Expand Down Expand Up @@ -287,7 +288,7 @@ def remove_duplicates_MetaTuner(self, X, X_Sample, mu, Value, Value_ext):
# check if x_optimal is in X_Sample
check_closeness = self.closeness(x_optimal, X_Sample)

if check_closeness == False: # No close element to x_optimal in X_Sample
if check_closeness is False: # No close element to x_optimal in X_Sample
break

# we will look for next optimal value to try
Expand Down Expand Up @@ -386,7 +387,6 @@ def get_next_batch_MetaTuner(

try:
self.surrogate.fit(X_temp, Y_temp)

except:
print("*" * 100)
print(X_temp)
Expand Down
1 change: 0 additions & 1 deletion mango/scheduler.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def wrapper(params_batch):


def celery(n_jobs, timeout=None):
import celery
from celery import exceptions

def decorator(func):
Expand Down
3 changes: 2 additions & 1 deletion mango/tuner.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,7 +246,8 @@ def runBayesianOptimizer(self):

if len(Y_next_list) == 0:
# no values returned
# this is problematic if domain is small and same value is tried again in the next iteration as the optimizer would be stuck
# this is problematic if domain is small and same value is tried again
# in the next iteration as the optimizer would be stuck
continue

Y_next_batch = Y_next_list.reshape(len(Y_next_list), 1)
Expand Down
19 changes: 2 additions & 17 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "arm-mango"
version = "1.5.0"
version = "1.5.1"
description = "parallel bayesian optimization over complex search spaces"
authors = ["Sandeep Singh Sandha <sandha.iitr@gmail.com>", "Mohit Aggarwal <mohitagg@gmail.com>"]
license = "Apache-2.0"
Expand All @@ -20,7 +20,6 @@ numpy = ">=1.17.0"
scipy = ">=1.4.1"
scikit_learn = ">=0.21.3"
tqdm = ">=4.36.1"
attrdict = ">=2.0.1"

[tool.poetry.group.dev.dependencies]
black = "^24.2.0"
Expand Down
Loading