Skip to content

Commit

Permalink
👔 Project settings is now encapsulated into guidellm.config module
Browse files Browse the repository at this point in the history
* The module is currently has only `__init__` file
* All `os.getenv` replaced with `pydantic_settings` facade
* `GUIDELLM__` env prefix is set
  • Loading branch information
Dmytro Parfeniuk committed Jul 24, 2024
1 parent 361f7c6 commit 9047075
Show file tree
Hide file tree
Showing 10 changed files with 98 additions and 80 deletions.
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ set -o allexport; source .env; set +o allexport

| Variable | Default Value | Description |
| --------------- | --------------------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| OPENAI_BASE_URL | http://127.0.0.1:8080 | The host where the `openai` library will make requests to. For running integration tests it is required to have the external OpenAI compatible server running. |
| OPENAI_API_KEY | invalid | [OpenAI Platform](https://platform.openai.com/api-keys) to create a new API key. This value is not used for tests. |
| GUIDELLM__OPENAI__BASE_URL | http://127.0.0.1:8080 | The host where the `openai` library will make requests to. For running integration tests it is required to have the external OpenAI compatible server running. |
| GUIDELLM__OPENAI__API_KEY | invalid | [OpenAI Platform](https://platform.openai.com/api-keys) to create a new API key. This value is not used for tests. |

</br>

Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ dependencies = [
"numpy",
"openai",
"pydantic>=2.0.0",
"pydantic-settings>=2.0.0",
"pyyaml>=6.0.0",
"requests",
"transformers",
Expand Down
52 changes: 52 additions & 0 deletions src/config/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
from typing import Optional

from pydantic import BaseModel
from pydantic_settings import BaseSettings, SettingsConfigDict

__all__ = ["settings"]


class LoggingSettings(BaseModel):
disabled: bool = False
clear_loggers: bool = True
console_log_level: Optional[str] = "INFO"
log_file: Optional[str] = None
log_file_level: Optional[str] = None


class OpenAISettings(BaseModel):

# OpenAI API key.
api_key: str = "invalid"

# OpenAI-compatible server URL
# NOTE: The default value is default address of llama.cpp web server
base_url: str = "http://localhost:8080"


class Settings(BaseSettings):
"""
All the settings are powered by pydantic_settings and could be
populated from the .env file.
The format to populate the settings is next
```sh
export GUIDELLM__LOGGING__DISABLED=true
export GUIDELLM__OPENAI__API_KEY=******
```
"""

model_config = SettingsConfigDict(
env_prefix="GUIDELLM",
env_nested_delimiter="__",
env_file=".env",
extra="ignore",
)

logging: LoggingSettings = LoggingSettings()
openai: OpenAISettings = OpenAISettings()


settings = Settings()
4 changes: 2 additions & 2 deletions src/guidellm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,6 @@
evaluating and benchmarking large language models (LLMs).
"""

from .logger import LoggerConfig, configure_logger, logger
from .logger import configure_logger, logger

__all__ = ["logger", "configure_logger", "LoggerConfig"]
__all__ = ["logger", "configure_logger"]
12 changes: 5 additions & 7 deletions src/guidellm/backend/openai.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import functools
import os
from typing import Any, Dict, Generator, List, Optional

from loguru import logger
from openai import OpenAI, Stream
from openai.types import Completion
from transformers import AutoTokenizer

from config import settings
from guidellm.backend import Backend, BackendEngine, GenerativeResponse
from guidellm.core import TextGenerationRequest

Expand Down Expand Up @@ -47,18 +47,16 @@ def __init__(

self.request_args = request_args

if not (_api_key := (openai_api_key or os.getenv("OPENAI_API_KEY", None))):
if not (_api_key := (openai_api_key or settings.openai.api_key)):
raise ValueError(
"`OPENAI_API_KEY` environment variable "
"`GUIDELLM__OPENAI__API_KEY` environment variable "
"or --openai-api-key CLI parameter "
"must be specify for the OpenAI backend"
)

if not (
_base_url := (internal_callback_url or os.getenv("OPENAI_BASE_URL", None))
):
if not (_base_url := (internal_callback_url or settings.openai.base_url)):
raise ValueError(
"`OPENAI_BASE_URL` environment variable "
"`GUIDELLM__OPENAI__BASE_URL` environment variable "
"or --openai-base-url CLI parameter "
"must be specify for the OpenAI backend"
)
Expand Down
74 changes: 17 additions & 57 deletions src/guidellm/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,21 +6,22 @@
variables or direct function calls.
Environment Variables:
- GUIDELLM_LOG_DISABLED: Disable logging (default: false).
- GUIDELLM_CLEAR_LOGGERS: Clear existing loggers from loguru (default: true).
- GUIDELLM_LOG_LEVEL: Log level for console logging
- GUIDELLM__LOGGING__DISABLED: Disable logging (default: false).
- GUIDELLM__LOGGING__CLEAR_LOGGERS: Clear existing loggers
from loguru (default: true).
- GUIDELLM__LOGGING__LOG_LEVEL: Log level for console logging
(default: none, options: DEBUG, INFO, WARNING, ERROR, CRITICAL).
- GUIDELLM_LOG_FILE: Path to the log file for file logging
- GUIDELLM__LOGGING__FILE: Path to the log file for file logging
(default: guidellm.log if log file level set else none)
- GUIDELLM_LOG_FILE_LEVEL: Log level for file logging
- GUIDELLM__LOGGING__FILE_LEVEL: Log level for file logging
(default: INFO if log file set else none).
Usage:
from guidellm import logger, configure_logger, LoggerConfig
# Configure metrics with default settings
configure_logger(
config=LoggerConfig(
config=LoggingConfig
disabled=False,
clear_loggers=True,
console_log_level="DEBUG",
Expand All @@ -33,26 +34,16 @@
logger.info("This is an info message")
"""

import os
import sys
from dataclasses import dataclass
from typing import Optional

from loguru import logger

__all__ = ["LoggerConfig", "configure_logger", "logger"]
from config import LoggingSettings, settings

__all__ = ["configure_logger", "logger"]

@dataclass
class LoggerConfig:
disabled: bool = False
clear_loggers: bool = True
console_log_level: Optional[str] = "INFO"
log_file: Optional[str] = None
log_file_level: Optional[str] = None


def configure_logger(config: Optional[LoggerConfig] = None):
def configure_logger(config: LoggingSettings = settings.logging):
"""
Configure the metrics for LLM Compressor.
This function sets up the console and file logging
Expand All @@ -64,56 +55,25 @@ def configure_logger(config: Optional[LoggerConfig] = None):
:type config: LoggerConfig
"""

_ENV_CONFIG = LoggerConfig(
disabled=os.getenv("GUIDELLM_LOG_DISABLED") == "true",
clear_loggers=os.getenv("GUIDELLM_CLEAR_LOGGERS") == "true",
console_log_level=os.getenv("GUIDELLM_LOG_LEVEL"),
log_file=os.getenv("GUIDELLM_LOG_FILE"),
log_file_level=os.getenv("GUIDELLM_LOG_FILE_LEVEL"),
)

if not config:
config = LoggerConfig()
# override from environment variables, if set
logger_config = LoggerConfig(
disabled=_ENV_CONFIG.disabled or config.disabled,
console_log_level=_ENV_CONFIG.console_log_level or config.console_log_level,
log_file=_ENV_CONFIG.log_file or config.log_file,
log_file_level=_ENV_CONFIG.log_file_level or config.log_file_level,
)

if logger_config.disabled:
if config.disabled:
logger.disable("guidellm")
return

logger.enable("guidellm")

if logger_config.clear_loggers:
if config.clear_loggers:
logger.remove()

if logger_config.console_log_level:
if config.console_log_level:
# log as a human readable string with the time, function, level, and message
logger.add(
sys.stdout,
level=logger_config.console_log_level.upper(),
level=config.console_log_level.upper(),
format="{time} | {function} | {level} - {message}",
)

if logger_config.log_file or logger_config.log_file_level:
log_file = logger_config.log_file or "guidellm.log"
log_file_level = logger_config.log_file_level or "INFO"
if config.log_file or config.log_file_level:
log_file = config.log_file or "guidellm.log"
log_file_level = config.log_file_level or "INFO"
# log as json to the file for easier parsing
logger.add(log_file, level=log_file_level.upper(), serialize=True)


# invoke logger setup on import with default values enabling console logging with INFO
# and disabling file logging
configure_logger(
config=LoggerConfig(
disabled=False,
clear_loggers=True,
console_log_level="INFO",
log_file=None,
log_file_level=None,
)
)
6 changes: 6 additions & 0 deletions src/guidellm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from guidellm.backend import Backend
from guidellm.core import TextGenerationBenchmarkReport
from guidellm.executor import Executor
from guidellm.logger import configure_logger
from guidellm.request import (
EmulatedRequestGenerator,
FileRequestGenerator,
Expand Down Expand Up @@ -133,4 +134,9 @@ def print_report(report: TextGenerationBenchmarkReport):


if __name__ == "__main__":
# invoke logger setup on import with default values
# enabling console logging with INFO and disabling file logging
configure_logger()

# entrypoint
main()
5 changes: 2 additions & 3 deletions tests/conftest.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
import os
from typing import Callable, Optional

import pytest
from loguru import logger

from config import settings
from guidellm.backend import Backend, BackendEngine, OpenAIBackend


Expand All @@ -22,8 +22,7 @@ def inner_wrapper(*_, base_url: Optional[str] = None, **kwargs) -> OpenAIBackend
defaults = {
"backend_type": BackendEngine.OPENAI_SERVER,
"openai_api_key": "required but not used",
"internal_callback_url": base_url
or os.getenv("OPENAI_BASE_URL", "http://localhost:8080"),
"internal_callback_url": base_url or settings.openai.base_url,
}

defaults.update(kwargs)
Expand Down
7 changes: 4 additions & 3 deletions tests/integration/backend/test_openai_backend_submit.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import os
import time
from typing import Callable

Expand All @@ -7,6 +6,7 @@
from openai.pagination import SyncPage
from openai.types import Model

from config import settings
from guidellm.backend import OpenAIBackend
from guidellm.core import TextGenerationRequest, TextGenerationResult

Expand All @@ -17,9 +17,10 @@ def openai_server_healthcheck():
Check if the openai server is running
"""

if not (openai_server := os.getenv("OPENAI_BASE_URL", None)):
if not (openai_server := settings.openai.base_url):
raise ValueError(
"Integration backend tests can't be run without OPENAI_BASE_URL specified"
"Integration backend tests can't be run without "
"GUIDELLM__OPENAI__BASE_URL specified"
)

try:
Expand Down
13 changes: 7 additions & 6 deletions tests/unit/test_logger.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import pytest

from guidellm import LoggerConfig, configure_logger, logger
from config import LoggingSettings
from guidellm import configure_logger, logger


@pytest.fixture(autouse=True)
Expand All @@ -25,7 +26,7 @@ def test_default_logger_settings(capsys):

def test_configure_logger_console_settings(capsys):
# Test configuring the logger to change console log level
config = LoggerConfig(console_log_level="DEBUG")
config = LoggingSettings(console_log_level="DEBUG")
configure_logger(config=config)
logger.info("Info message")
logger.debug("Debug message")
Expand All @@ -38,7 +39,7 @@ def test_configure_logger_console_settings(capsys):
def test_configure_logger_file_settings(tmp_path):
# Test configuring the logger to log to a file
log_file = tmp_path / "test.log"
config = LoggerConfig(log_file=str(log_file), log_file_level="DEBUG")
config = LoggingSettings(log_file=str(log_file), log_file_level="DEBUG")
configure_logger(config=config)
logger.info("Info message")
logger.debug("Debug message")
Expand All @@ -52,7 +53,7 @@ def test_configure_logger_file_settings(tmp_path):
def test_configure_logger_console_and_file(capsys, tmp_path):
# Test configuring the logger to change both console and file settings
log_file = tmp_path / "test.log"
config = LoggerConfig(
config = LoggingSettings(
console_log_level="ERROR", log_file=str(log_file), log_file_level="INFO"
)
configure_logger(config=config)
Expand All @@ -75,7 +76,7 @@ def test_environment_variable_override(monkeypatch, capsys, tmp_path):
monkeypatch.setenv("GUIDELLM_LOG_FILE", str(tmp_path / "env_test.log"))
monkeypatch.setenv("GUIDELLM_LOG_FILE_LEVEL", "DEBUG")

configure_logger(config=LoggerConfig())
configure_logger(config=LoggingSettings())
logger.info("Info message")
logger.error("Error message")
logger.debug("Debug message")
Expand All @@ -96,7 +97,7 @@ def test_environment_variable_disable_logging(monkeypatch, capsys):
# Test environment variable to disable logging
monkeypatch.setenv("GUIDELLM_LOG_DISABLED", "true")

configure_logger(config=LoggerConfig())
configure_logger(config=LoggingSettings())
logger.info("Info message")
logger.error("Error message")

Expand Down

0 comments on commit 9047075

Please sign in to comment.