Skip to content

Commit

Permalink
fix flake8 errors
Browse files Browse the repository at this point in the history
  • Loading branch information
pindge committed Aug 16, 2022
1 parent 406e19f commit 4b51c90
Show file tree
Hide file tree
Showing 7 changed files with 25 additions and 21 deletions.
4 changes: 2 additions & 2 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,8 @@ repos:
hooks:
- id: shed
# Python Linting
- repo: https://gitlab.com/pycqa/flake8
rev: 3.9.2
- repo: https://github.com/pycqa/flake8
rev: 4.0.1
hooks:
- id: flake8
additional_dependencies:
Expand Down
5 changes: 4 additions & 1 deletion cubedash/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,7 +295,10 @@ def enable_prometheus():
)

metrics = GunicornInternalPrometheusMetrics(app, group_by="endpoint")
_LOG.info(f"Prometheus metrics enabled : {metrics}")
_LOG.info(
"Prometheus metrics enabled : {metrics}",
extra=dict(metrics=metrics)
)


@app.before_first_request
Expand Down
8 changes: 4 additions & 4 deletions cubedash/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -407,7 +407,7 @@ def dataset_created(dataset: Dataset) -> Optional[datetime]:
try:
return default_utc(dc_utils.parse_time(value))
except ValueError:
_LOG.warn("invalid_dataset.creation_dt", dataset_id=dataset.id, value=value)
_LOG.warning("invalid_dataset.creation_dt", dataset_id=dataset.id, value=value)

return None

Expand Down Expand Up @@ -818,12 +818,12 @@ def dataset_shape(ds: Dataset) -> Tuple[Optional[Polygon], bool]:
return None, False

if extent is None:
log.warn("invalid_dataset.empty_extent")
log.warning("invalid_dataset.empty_extent")
return None, False
geom = shape(extent.to_crs(CRS(_TARGET_CRS)))

if not geom.is_valid:
log.warn(
log.warning(
"invalid_dataset.invalid_extent",
reason_text=shapely.validation.explain_validity(geom),
)
Expand All @@ -837,7 +837,7 @@ def dataset_shape(ds: Dataset) -> Tuple[Optional[Polygon], bool]:
return clean, False

if geom.is_empty:
_LOG.warn("invalid_dataset.empty_extent_geom", dataset_id=ds.id)
_LOG.warning("invalid_dataset.empty_extent_geom", dataset_id=ds.id)
return None, False

return geom, True
Expand Down
2 changes: 1 addition & 1 deletion cubedash/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def print_status(product_name=None, year=None, month=None, day=None, summary=Non
log.warning("product.unsupported", reason=e.reason)
return product_name, GenerateResult.UNSUPPORTED, None
except Exception:
log.exception("product.error", exc_info=True)
log.exception("product.error")
return product_name, GenerateResult.ERROR, None
finally:
store.index.close()
Expand Down
6 changes: 3 additions & 3 deletions cubedash/summary/_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,7 +273,7 @@ def footprint_srid(self):
epsg = self.footprint_crs.lower()

if not epsg.startswith("epsg:"):
_LOG.warn("unsupported.to_srid", crs=self.footprint_crs)
_LOG.warning("unsupported.to_srid", crs=self.footprint_crs)
return None
return int(epsg.split(":")[1])

Expand Down Expand Up @@ -319,12 +319,12 @@ def _create_unified_footprint(
# avoid non-noded intersection.
# TODO: does shapely have a snap-to-grid?
try:
_LOG.warn("summary.footprint.invalid_union", exc_info=True)
_LOG.warning("summary.footprint.invalid_union", exc_info=True)
geometry_union = shapely.ops.unary_union(
[p.footprint_geometry.buffer(0.001) for p in with_valid_geometries]
)
except ValueError:
_LOG.warn("summary.footprint.invalid_buffered_union", exc_info=True)
_LOG.warning("summary.footprint.invalid_buffered_union", exc_info=True)

# Attempt 3 at union: Recursive filter bad polygons first
polygonlist = _polygon_chain(with_valid_geometries)
Expand Down
16 changes: 8 additions & 8 deletions cubedash/summary/_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,7 +304,7 @@ def update_schema(engine: Engine) -> Set[PleaseRefresh]:
refresh = set()

if not pg_column_exists(engine, f"{CUBEDASH_SCHEMA}.product", "fixed_metadata"):
_LOG.warn("schema.applying_update.add_fixed_metadata")
_LOG.warning("schema.applying_update.add_fixed_metadata")
engine.execute(
f"""
alter table {CUBEDASH_SCHEMA}.product add column fixed_metadata jsonb
Expand All @@ -316,20 +316,20 @@ def update_schema(engine: Engine) -> Set[PleaseRefresh]:
engine,
f"{CUBEDASH_SCHEMA}.{_COLLECTION_ITEMS_INDEX.name}",
):
_LOG.warn("schema.applying_update.add_collection_items_idx")
_LOG.warning("schema.applying_update.add_collection_items_idx")
_COLLECTION_ITEMS_INDEX.create(engine)

if not pg_exists(
engine,
f"{CUBEDASH_SCHEMA}.{_ALL_COLLECTIONS_ORDER_INDEX.name}",
):
_LOG.warn("schema.applying_update.add_all_collections_idx")
_LOG.warning("schema.applying_update.add_all_collections_idx")
_ALL_COLLECTIONS_ORDER_INDEX.create(engine)

if not pg_column_exists(
engine, f"{CUBEDASH_SCHEMA}.time_overview", "product_refresh_time"
):
_LOG.warn("schema.applying_update.add_refresh_time")
_LOG.warning("schema.applying_update.add_refresh_time")
engine.execute(
f"""
alter table {CUBEDASH_SCHEMA}.time_overview
Expand All @@ -340,7 +340,7 @@ def update_schema(engine: Engine) -> Set[PleaseRefresh]:
if not pg_column_exists(
engine, f"{CUBEDASH_SCHEMA}.product", "last_successful_summary"
):
_LOG.warn("schema.applying_update.add_summary_success_time")
_LOG.warning("schema.applying_update.add_summary_success_time")
engine.execute(
f"""
alter table {CUBEDASH_SCHEMA}.product
Expand All @@ -363,7 +363,7 @@ def check_or_update_odc_schema(engine: Engine):
try:
# We can try to install it ourselves if we have permission, using ODC's code.
if not pg_column_exists(engine, ODC_DATASET.fullname, "updated"):
_LOG.warn("schema.applying_update.add_odc_change_triggers")
_LOG.warning("schema.applying_update.add_odc_change_triggers")
_utils.install_timestamp_trigger(engine)
except ProgrammingError as e:
# We don't have permission.
Expand All @@ -389,14 +389,14 @@ def check_or_update_odc_schema(engine: Engine):
if not pg_index_exists(
engine, ODC_DATASET.schema, ODC_DATASET.name, "ix_dataset_added"
):
_LOG.warn("schema.applying_update.add_odc_added_index")
_LOG.warning("schema.applying_update.add_odc_added_index")
statements.append(
f"create index ix_dataset_added on {ODC_DATASET.fullname}(added desc);"
)
if not pg_index_exists(
engine, ODC_DATASET.schema, ODC_DATASET.name, "ix_dataset_type_changed"
):
_LOG.warn("schema.applying_update.add_odc_changed_index")
_LOG.warning("schema.applying_update.add_odc_changed_index")
statements.append(
f"create index ix_dataset_type_changed on "
f"{ODC_DATASET.fullname}(dataset_type_ref, greatest(added, updated, archived) desc);"
Expand Down
5 changes: 3 additions & 2 deletions cubedash/summary/_stores.py
Original file line number Diff line number Diff line change
Expand Up @@ -761,7 +761,8 @@ def _get_linked_products(
).fetchone()

_LOG.info(
f"product.links.{kind}",
"product.links.{kind}",
extra=dict(kind=kind),
product=product.name,
linked=linked_product_names,
sample_percentage=round(sample_percentage, 2),
Expand Down Expand Up @@ -1468,7 +1469,7 @@ def refresh(
# Then choose the whole time range of the product to generate.
log.info("product.generate_whole_range")
if force:
log.warn("forcing_refresh")
log.warning("forcing_refresh")

# Regenerate the old months too, in case any have been deleted.
old_months = self._already_summarised_months(product_name)
Expand Down

0 comments on commit 4b51c90

Please # to comment.