Skip to content

Commit c5d40e5

Browse files
author
Sean Sullivan
committed
fix pylint
1 parent b5f168d commit c5d40e5

File tree

4 files changed

+14
-14
lines changed

4 files changed

+14
-14
lines changed

elastic_datashader/config.py

+5-5
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ def load_datashader_headers(header_path_str: str) -> Dict[Any, Any]:
4646
try:
4747
loaded_yaml = yaml.safe_load(header_path.read_text(encoding='utf8'))
4848
except (OSError, IOError, yaml.YAMLError) as ex:
49-
raise Exception(f"Failed to load HEADER_FILE from {header_path_str}") from ex
49+
raise IOError(f"Failed to load HEADER_FILE from {header_path_str}") from ex
5050

5151
if type(loaded_yaml) is not dict:
5252
raise ValueError(f"HEADER_FILE YAML should be a dict mapping, but received {loaded_yaml}")
@@ -60,7 +60,7 @@ def get_log_level(level_name: Optional[str]) -> int:
6060
level_value = getLevelName(level_name.upper())
6161

6262
if type(level_value) is not int:
63-
raise Exception(f"Invalid logging level {level_name}")
63+
raise ValueError(f"Invalid logging level {level_name}")
6464

6565
return level_value
6666

@@ -78,13 +78,13 @@ def is_base64_encoded(value: str) -> bool:
7878

7979
def check_config(c: Config) -> None:
8080
if not c.cache_path.exists():
81-
raise Exception(f"DATASHADER_CACHE_DIRECTORY '{c.cache_path}' does not exist")
81+
raise IOError(f"DATASHADER_CACHE_DIRECTORY '{c.cache_path}' does not exist")
8282

8383
if not c.cache_path.is_dir():
84-
raise Exception(f"DATASHADER_CACHE_DIRECTORY '{c.cache_path}' is not a directory")
84+
raise IOError(f"DATASHADER_CACHE_DIRECTORY '{c.cache_path}' is not a directory")
8585

8686
if c.api_key and not is_base64_encoded(c.api_key):
87-
raise Exception(f"DATASHADER_ELASTIC_API_KEY '{c.api_key}' does not appear to be base64 encoded")
87+
raise ValueError(f"DATASHADER_ELASTIC_API_KEY '{c.api_key}' does not appear to be base64 encoded")
8888

8989
def config_from_env(env) -> Config:
9090
return Config(

elastic_datashader/elastic.py

+3-3
Original file line numberDiff line numberDiff line change
@@ -382,7 +382,7 @@ def load_datashader_headers(header_file_path_str: Optional[str]) -> Dict[Any, An
382382
try:
383383
loaded_yaml = yaml.safe_load(header_file_path.read_text(encoding='utf8'))
384384
except (OSError, IOError, yaml.YAMLError) as ex:
385-
raise Exception(f"Failed to load HEADER_FILE from {header_file_path_str}") from ex
385+
raise IOError(f"Failed to load HEADER_FILE from {header_file_path_str}") from ex
386386

387387
if type(loaded_yaml) is not dict:
388388
raise ValueError(f"HEADER_FILE YAML should be a dict mapping, but received {loaded_yaml}")
@@ -576,14 +576,14 @@ def chunk_iter(iterable, chunk_size):
576576
chunks = [None] * chunk_size
577577
i = -1
578578
for i, v in enumerate(iterable):
579-
idx = (i % chunk_size)
579+
idx = i % chunk_size
580580
if idx == 0 and i > 0:
581581
i = -1
582582
yield (True, chunks)
583583
chunks[idx] = v
584584

585585
if i >= 0:
586-
last_written_idx = (i % chunk_size)
586+
last_written_idx = i % chunk_size
587587
yield (False, chunks[0:last_written_idx+1])
588588

589589
def bucket_noop(bucket, search):

elastic_datashader/parameters.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -228,14 +228,14 @@ def get_time_bounds(now: datetime, from_time: Optional[str], to_time: Optional[s
228228
start_time = convert_kibana_time(from_time, now, 'down')
229229
except ValueError as err:
230230
logger.exception("invalid from_time parameter")
231-
raise Exception("invalid from_time parameter") from err
231+
raise ValueError("invalid from_time parameter") from err
232232

233233
if to_time:
234234
try:
235235
stop_time = convert_kibana_time(to_time, now, 'up')
236236
except ValueError as err:
237237
logger.exception("invalid to_time parameter")
238-
raise Exception("invalid to_time parameter") from err
238+
raise ValueError("invalid to_time parameter") from err
239239

240240
if start_time and stop_time:
241241
start_time, stop_time = quantize_time_range(start_time, stop_time)
@@ -297,11 +297,11 @@ def extract_parameters(headers: Dict[Any, Any], query_params: Dict[Any, Any]) ->
297297
params["bucket_max"] = float(query_params.get("bucket_max", 1))
298298
params["timeOverlap"] = query_params.get("timeOverlap", "false") == "true"
299299
params["timeOverlapSize"] = query_params.get("timeOverlapSize", "auto")
300-
params["debug"] = (query_params.get("debug", False) == 'true')
300+
params["debug"] = query_params.get("debug", False) == 'true'
301301

302302
if params["geopoint_field"] is None:
303303
logger.error("missing geopoint_field")
304-
raise Exception("missing geopoint_field")
304+
raise ValueError("missing geopoint_field")
305305

306306
parameter_hash = get_parameter_hash(params)
307307
all_params = {**params, **unhashed_params}

elastic_datashader/tilegen.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -730,7 +730,7 @@ def generate_nonaggregated_tile(
730730
metrics.get("locations", 0),
731731
metrics.get("hits", 0),
732732
)
733-
metrics["query_time"] = (s2 - s1)
733+
metrics["query_time"] = s2 - s1
734734

735735
estimated_points_per_tile = get_estimated_points_per_tile(span_range, global_bounds, z, global_doc_cnt)
736736

@@ -1259,7 +1259,7 @@ def remap_bucket(bucket, search):
12591259

12601260
s2 = time.time()
12611261
logger.info("ES took %s (%s) for %s with %s searches", (s2 - s1), resp.total_took, len(df), resp.num_searches)
1262-
metrics["query_time"] = (s2 - s1)
1262+
metrics["query_time"] = s2 - s1
12631263
metrics["query_took"] = resp.total_took
12641264
metrics["num_searches"] = resp.num_searches
12651265
metrics["aborted"] = resp.aborted

0 commit comments

Comments
 (0)