Skip to content

Add caller module #75

New issue

Have a question about this project? # for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “#”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? # to your account

Merged
merged 1 commit into from
Aug 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
70 changes: 50 additions & 20 deletions src/autometrics/decorator.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Autometrics module."""
from contextvars import ContextVar
from contextvars import ContextVar, Token
import time
import inspect

Expand All @@ -20,7 +20,8 @@
T = TypeVar("T")


caller_var: ContextVar[str] = ContextVar("caller", default="")
caller_module_var: ContextVar[str] = ContextVar("caller.module", default="")
caller_function_var: ContextVar[str] = ContextVar("caller.function", default="")


# Bare decorator usage
Expand Down Expand Up @@ -63,12 +64,19 @@ def track_start(function: str, module: str):
function=function, module=module, track_concurrency=track_concurrency
)

def track_result_ok(start_time: float, function: str, module: str, caller: str):
def track_result_ok(
start_time: float,
function: str,
module: str,
caller_module: str,
caller_function: str,
):
get_tracker().finish(
start_time,
function=function,
module=module,
caller=caller,
caller_module=caller_module,
caller_function=caller_function,
objective=objective,
track_concurrency=track_concurrency,
result=Result.OK,
Expand All @@ -78,13 +86,15 @@ def track_result_error(
start_time: float,
function: str,
module: str,
caller: str,
caller_module: str,
caller_function: str,
):
get_tracker().finish(
start_time,
function=function,
module=module,
caller=caller,
caller_module=caller_module,
caller_function=caller_function,
objective=objective,
track_concurrency=track_concurrency,
result=Result.ERROR,
Expand All @@ -100,16 +110,23 @@ def sync_decorator(func: Callable[P, T]) -> Callable[P, T]:
@wraps(func)
def sync_wrapper(*args: P.args, **kwds: P.kwargs) -> T:
start_time = time.time()
caller = caller_var.get()
context_token = None
caller_module = caller_module_var.get()
caller_function = caller_function_var.get()
context_token_module: Optional[Token] = None
context_token_function: Optional[Token] = None

try:
context_token = caller_var.set(func_name)
context_token_module = caller_module_var.set(module_name)
context_token_function = caller_function_var.set(func_name)
if track_concurrency:
track_start(module=module_name, function=func_name)
result = func(*args, **kwds)
track_result_ok(
start_time, function=func_name, module=module_name, caller=caller
start_time,
function=func_name,
module=module_name,
caller_module=caller_module,
caller_function=caller_function,
)

except Exception as exception:
Expand All @@ -118,14 +135,17 @@ def sync_wrapper(*args: P.args, **kwds: P.kwargs) -> T:
start_time,
function=func_name,
module=module_name,
caller=caller,
caller_module=caller_module,
caller_function=caller_function,
)
# Reraise exception
raise exception

finally:
if context_token is not None:
caller_var.reset(context_token)
if context_token_module is not None:
caller_module_var.reset(context_token_module)
if context_token_function is not None:
caller_function_var.reset(context_token_function)

return result

Expand All @@ -142,16 +162,23 @@ def async_decorator(func: Callable[P, Awaitable[T]]) -> Callable[P, Awaitable[T]
@wraps(func)
async def async_wrapper(*args: P.args, **kwds: P.kwargs) -> T:
start_time = time.time()
caller = caller_var.get()
context_token = None
caller_module = caller_module_var.get()
caller_function = caller_function_var.get()
context_token_module: Optional[Token] = None
context_token_function: Optional[Token] = None

try:
context_token = caller_var.set(func_name)
context_token_module = caller_module_var.set(module_name)
context_token_function = caller_function_var.set(func_name)
if track_concurrency:
track_start(module=module_name, function=func_name)
result = await func(*args, **kwds)
track_result_ok(
start_time, function=func_name, module=module_name, caller=caller
start_time,
function=func_name,
module=module_name,
caller_module=caller_module,
caller_function=caller_function,
)

except Exception as exception:
Expand All @@ -160,14 +187,17 @@ async def async_wrapper(*args: P.args, **kwds: P.kwargs) -> T:
start_time,
function=func_name,
module=module_name,
caller=caller,
caller_module=caller_module,
caller_function=caller_function,
)
# Reraise exception
raise exception

finally:
if context_token is not None:
caller_var.reset(context_token)
if context_token_module is not None:
caller_module_var.reset(context_token_module)
if context_token_function is not None:
caller_function_var.reset(context_token_function)

return result

Expand Down
2 changes: 1 addition & 1 deletion src/autometrics/test_caller.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,6 @@ def bar():
assert blob is not None
data = blob.decode("utf-8")

expected = """function_calls_total{caller="test_caller_detection.<locals>.bar",function="test_caller_detection.<locals>.foo",module="autometrics.test_caller",objective_name="",objective_percentile="",result="ok"} 1.0"""
expected = """function_calls_total{caller_function="test_caller_detection.<locals>.bar",caller_module="autometrics.test_caller",function="test_caller_detection.<locals>.foo",module="autometrics.test_caller",objective_name="",objective_percentile="",result="ok"} 1.0"""
assert "wrapper" not in data
assert expected in data
28 changes: 14 additions & 14 deletions src/autometrics/test_decorator.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ def test_basic(self):
assert blob is not None
data = blob.decode("utf-8")

total_count = f"""function_calls_total{{caller="",function="basic_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0"""
total_count = f"""function_calls_total{{caller_function="",caller_module="",function="basic_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0"""
assert total_count in data

for latency in ObjectiveLatency:
Expand Down Expand Up @@ -94,7 +94,7 @@ async def test_basic_async(self):
assert blob is not None
data = blob.decode("utf-8")

total_count = f"""function_calls_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0"""
total_count = f"""function_calls_total{{caller_function="",caller_module="",function="basic_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 1.0"""
assert total_count in data

for latency in ObjectiveLatency:
Expand Down Expand Up @@ -130,7 +130,7 @@ def test_objectives(self):
assert blob is not None
data = blob.decode("utf-8")

total_count = f"""function_calls_total{{caller="",function="{function_name}",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0"""
total_count = f"""function_calls_total{{caller_function="",caller_module="",function="{function_name}",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0"""
assert total_count in data

# Check the latency buckets
Expand Down Expand Up @@ -170,7 +170,7 @@ async def test_objectives_async(self):
assert blob is not None
data = blob.decode("utf-8")

total_count = f"""function_calls_total{{caller="",function="basic_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0"""
total_count = f"""function_calls_total{{caller_function="",caller_module="",function="basic_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 1.0"""
assert total_count in data

# Check the latency buckets
Expand Down Expand Up @@ -199,7 +199,7 @@ def test_exception(self):
assert blob is not None
data = blob.decode("utf-8")

total_count = f"""function_calls_total{{caller="",function="error_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0"""
total_count = f"""function_calls_total{{caller_function="",caller_module="",function="error_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0"""
assert total_count in data

for latency in ObjectiveLatency:
Expand Down Expand Up @@ -230,7 +230,7 @@ async def test_async_exception(self):
assert blob is not None
data = blob.decode("utf-8")

total_count = f"""function_calls_total{{caller="",function="error_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0"""
total_count = f"""function_calls_total{{caller_function="",caller_module="",function="error_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 1.0"""
assert total_count in data

for latency in ObjectiveLatency:
Expand All @@ -253,10 +253,10 @@ def test_initialize_counters_sync(self):
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0"""
total_count_ok = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0"""
total_count_error = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0"""
assert total_count_error in data

def test_initialize_counters_sync_with_objective(self):
Expand All @@ -273,10 +273,10 @@ def test_initialize_counters_sync_with_objective(self):
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0"""
total_count_ok = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_total{{caller="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0"""
total_count_error = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0"""
assert total_count_error in data

@pytest.mark.asyncio
Expand All @@ -290,10 +290,10 @@ async def test_initialize_counters_async(self):
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0"""
total_count_ok = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0"""
total_count_error = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="",objective_percentile="",result="error"}} 0.0"""
assert total_count_error in data

@pytest.mark.asyncio
Expand All @@ -311,8 +311,8 @@ async def test_initialize_counters_async_with_objective(self):
assert blob is not None
data = blob.decode("utf-8")

total_count_ok = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0"""
total_count_ok = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="ok"}} 0.0"""
assert total_count_ok in data

total_count_error = f"""function_calls_total{{caller="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0"""
total_count_error = f"""function_calls_total{{caller_function="",caller_module="",function="never_called_async_function",module="autometrics.test_decorator",objective_name="{objective_name}",objective_percentile="{success_rate.value}",result="error"}} 0.0"""
assert total_count_error in data
44 changes: 37 additions & 7 deletions src/autometrics/tracker/opentelemetry.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@ def __count(
self,
function: str,
module: str,
caller: str,
caller_module: str,
caller_function: str,
objective: Optional[Objective],
exemplar: Optional[dict],
result: Result,
Expand All @@ -98,7 +99,8 @@ def __count(
"function": function,
"module": module,
"result": result.value,
"caller": caller,
"caller.module": caller_module,
"caller.function": caller_function,
OBJECTIVE_NAME: objective_name,
OBJECTIVE_PERCENTILE: percentile,
},
Expand Down Expand Up @@ -164,7 +166,8 @@ def finish(
start_time: float,
function: str,
module: str,
caller: str,
caller_module: str,
caller_function: str,
result: Result = Result.OK,
objective: Optional[Objective] = None,
track_concurrency: Optional[bool] = False,
Expand All @@ -176,7 +179,15 @@ def finish(
# https://github.com/autometrics-dev/autometrics-py/issues/41
# if os.getenv("AUTOMETRICS_EXEMPLARS") == "true":
# exemplar = get_exemplar()
self.__count(function, module, caller, objective, exemplar, result)
self.__count(
function,
module,
caller_module,
caller_function,
objective,
exemplar,
result,
)
self.__histogram(function, module, start_time, objective, exemplar)
if track_concurrency:
self.__up_down_counter_concurrency_instance.add(
Expand All @@ -194,6 +205,25 @@ def initialize_counters(
objective: Optional[Objective] = None,
):
"""Initialize tracking metrics for a function call at zero."""
caller = ""
self.__count(function, module, caller, objective, None, Result.OK, 0)
self.__count(function, module, caller, objective, None, Result.ERROR, 0)
caller_module = ""
caller_function = ""
self.__count(
function,
module,
caller_module,
caller_function,
objective,
None,
Result.OK,
0,
)
self.__count(
function,
module,
caller_module,
caller_function,
objective,
None,
Result.ERROR,
0,
)
Loading