Move LLMUsageTracker to a cookbook (#4549)

* Move LLMUsageTracker to a cookbook

* rename, add to index

* fmt and remove test

* Update python/packages/autogen-core/docs/src/user-guide/core-user-guide/cookbook/llm-usage-logger.ipynb

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>

* Fix missing quotation marks in notebook

---------

Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
This commit is contained in:
Jack Gerrits 2024-12-04 14:49:39 -08:00 committed by GitHub
parent 1a448c10b9
commit d85a607da9
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
5 changed files with 129 additions and 92 deletions

View File

@ -18,4 +18,5 @@ local-llms-ollama-litellm
instrumenting
topic-subscription-scenarios
structured-output-agent
llm-usage-logger
```

View File

@ -0,0 +1,128 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Tracking LLM usage with a logger\n",
"\n",
"The model clients included in AutoGen emit structured events that can be used to track the usage of the model. This notebook demonstrates how to use the logger to track the usage of the model.\n",
"\n",
"These events are logged to the logger with the name: :py:attr:`autogen_core.application.logging.EVENT_LOGGER_NAME`."
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import logging\n",
"\n",
"from autogen_core.application.logging.events import LLMCallEvent\n",
"\n",
"\n",
"class LLMUsageTracker(logging.Handler):\n",
" def __init__(self) -> None:\n",
" \"\"\"Logging handler that tracks the number of tokens used in the prompt and completion.\n",
"\n",
" Example:\n",
"\n",
" .. code-block:: python\n",
"\n",
" from autogen_core.application.logging import LLMUsageTracker, EVENT_LOGGER_NAME\n",
"\n",
" # Set up the logging configuration to use the custom handler\n",
" logger = logging.getLogger(EVENT_LOGGER_NAME)\n",
" logger.setLevel(logging.INFO)\n",
" llm_usage = LLMUsageTracker()\n",
" logger.handlers = [llm_usage]\n",
"\n",
" # ...\n",
"\n",
" print(llm_usage.prompt_tokens)\n",
" print(llm_usage.completion_tokens)\n",
"\n",
" \"\"\"\n",
" super().__init__()\n",
" self._prompt_tokens = 0\n",
" self._completion_tokens = 0\n",
"\n",
" @property\n",
" def tokens(self) -> int:\n",
" return self._prompt_tokens + self._completion_tokens\n",
"\n",
" @property\n",
" def prompt_tokens(self) -> int:\n",
" return self._prompt_tokens\n",
"\n",
" @property\n",
" def completion_tokens(self) -> int:\n",
" return self._completion_tokens\n",
"\n",
" def reset(self) -> None:\n",
" self._prompt_tokens = 0\n",
" self._completion_tokens = 0\n",
"\n",
" def emit(self, record: logging.LogRecord) -> None:\n",
" \"\"\"Emit the log record. To be used by the logging module.\"\"\"\n",
" try:\n",
" # Use the StructuredMessage if the message is an instance of it\n",
" if isinstance(record.msg, LLMCallEvent):\n",
" event = record.msg\n",
" self._prompt_tokens += event.prompt_tokens\n",
" self._completion_tokens += event.completion_tokens\n",
" except Exception:\n",
" self.handleError(record)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"Then, this logger can be attached like any other Python logger and the values read after the model is run."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from autogen_core.application.logging import EVENT_LOGGER_NAME\n",
"\n",
"# Set up the logging configuration to use the custom handler\n",
"logger = logging.getLogger(EVENT_LOGGER_NAME)\n",
"logger.setLevel(logging.INFO)\n",
"llm_usage = LLMUsageTracker()\n",
"logger.handlers = [llm_usage]\n",
"\n",
"# client.create(...)\n",
"\n",
"print(llm_usage.prompt_tokens)\n",
"print(llm_usage.completion_tokens)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": ".venv",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

View File

@ -1,5 +1,3 @@
from ._llm_usage import LLMUsageTracker
ROOT_LOGGER_NAME = "autogen_core"
"""str: Logger name used for structured event logging"""
@ -14,5 +12,4 @@ __all__ = [
"ROOT_LOGGER_NAME",
"EVENT_LOGGER_NAME",
"TRACE_LOGGER_NAME",
"LLMUsageTracker",
]

View File

@ -1,57 +0,0 @@
import logging
from .events import LLMCallEvent
class LLMUsageTracker(logging.Handler):
def __init__(self) -> None:
"""Logging handler that tracks the number of tokens used in the prompt and completion.
Example:
.. code-block:: python
from autogen_core.application.logging import LLMUsageTracker, EVENT_LOGGER_NAME
# Set up the logging configuration to use the custom handler
logger = logging.getLogger(EVENT_LOGGER_NAME)
logger.setLevel(logging.INFO)
llm_usage = LLMUsageTracker()
logger.handlers = [llm_usage]
# ...
print(llm_usage.prompt_tokens)
print(llm_usage.completion_tokens)
"""
super().__init__()
self._prompt_tokens = 0
self._completion_tokens = 0
@property
def tokens(self) -> int:
return self._prompt_tokens + self._completion_tokens
@property
def prompt_tokens(self) -> int:
return self._prompt_tokens
@property
def completion_tokens(self) -> int:
return self._completion_tokens
def reset(self) -> None:
self._prompt_tokens = 0
self._completion_tokens = 0
def emit(self, record: logging.LogRecord) -> None:
"""Emit the log record. To be used by the logging module."""
try:
# Use the StructuredMessage if the message is an instance of it
if isinstance(record.msg, LLMCallEvent):
event = record.msg
self._prompt_tokens += event.prompt_tokens
self._completion_tokens += event.completion_tokens
except Exception:
self.handleError(record)

View File

@ -1,32 +0,0 @@
import logging
from autogen_core.application.logging import EVENT_LOGGER_NAME, LLMUsageTracker
from autogen_core.application.logging.events import LLMCallEvent
def test_llm_usage() -> None:
# Set up the logging configuration to use the custom handler
logger = logging.getLogger(EVENT_LOGGER_NAME)
logger.setLevel(logging.INFO)
llm_usage = LLMUsageTracker()
logger.handlers = [llm_usage]
logger.info(LLMCallEvent(prompt_tokens=10, completion_tokens=20))
assert llm_usage.prompt_tokens == 10
assert llm_usage.completion_tokens == 20
logger.info(LLMCallEvent(prompt_tokens=1, completion_tokens=1))
assert llm_usage.prompt_tokens == 11
assert llm_usage.completion_tokens == 21
llm_usage.reset()
assert llm_usage.prompt_tokens == 0
assert llm_usage.completion_tokens == 0
logger.info(LLMCallEvent(prompt_tokens=1, completion_tokens=1))
assert llm_usage.prompt_tokens == 1
assert llm_usage.completion_tokens == 1