Skip to content

Commit 63f8408

Browse files
nathan-weinbergElbehery
authored andcommitted
feat: add auto-generated CI documentation pre-commit hook (#2890)
Our CI is entirely undocumented, this commit adds a README.md file with a table of the current CI and what is does --------- Signed-off-by: Nathan Weinberg <[email protected]>
1 parent cf73146 commit 63f8408

File tree

71 files changed

+208
-195
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+208
-195
lines changed

.pre-commit-config.yaml

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,24 @@ repos:
145145
pass_filenames: false
146146
require_serial: true
147147
files: ^.github/workflows/.*$
148+
- id: check-logger-usage
149+
name: Check for proper logger usage (use llama_stack.log instead)
150+
entry: bash
151+
language: system
152+
types: [python]
153+
pass_filenames: true
154+
args:
155+
- -c
156+
- |
157+
matches=$(grep -EnH '^[^#]*\b(import logging|from logging\b)' "$@" | grep -v '# allow-direct-logging' || true)
158+
if [ -n "$matches" ]; then
159+
# GitHub Actions annotation format
160+
while IFS=: read -r file line_num rest; do
161+
echo "::error file=$file,line=$line_num::Do not use 'import logging' or 'from logging import' in $file. Use the custom logger instead: from llama_stack.log import get_logger; logger = get_logger(name=__name__, category=\"core\"). If direct logging is truly needed, add: # allow-direct-logging"
162+
done <<< "$matches"
163+
exit 1
164+
fi
165+
exit 0
148166
149167
ci:
150168
autofix_commit_msg: 🎨 [pre-commit.ci] Auto format from pre-commit.com hooks

llama_stack/core/build.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
# the root directory of this source tree.
66

77
import importlib.resources
8-
import logging
98
import sys
109
from pathlib import Path
1110

@@ -17,11 +16,10 @@
1716
from llama_stack.core.external import load_external_apis
1817
from llama_stack.core.utils.exec import run_command
1918
from llama_stack.core.utils.image_types import LlamaStackImageType
19+
from llama_stack.log import get_logger
2020
from llama_stack.providers.datatypes import Api
2121
from llama_stack.templates.template import DistributionTemplate
2222

23-
log = logging.getLogger(__name__)
24-
2523
# These are the dependencies needed by the distribution server.
2624
# `llama-stack` is automatically installed by the installation script.
2725
SERVER_DEPENDENCIES = [
@@ -34,6 +32,8 @@
3432
"opentelemetry-exporter-otlp-proto-http",
3533
]
3634

35+
logger = get_logger(name=__name__, category="core")
36+
3737

3838
class ApiInput(BaseModel):
3939
api: Api
@@ -170,7 +170,7 @@ def build_image(
170170
return_code = run_command(args)
171171

172172
if return_code != 0:
173-
log.error(
173+
logger.error(
174174
f"Failed to build target {image_name} with return code {return_code}",
175175
)
176176

llama_stack/core/configure.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
#
44
# This source code is licensed under the terms described in the LICENSE file in
55
# the root directory of this source tree.
6-
import logging
76
import textwrap
87
from typing import Any
98

@@ -21,9 +20,10 @@
2120
from llama_stack.core.utils.config_dirs import EXTERNAL_PROVIDERS_DIR
2221
from llama_stack.core.utils.dynamic import instantiate_class_type
2322
from llama_stack.core.utils.prompt_for_config import prompt_for_config
23+
from llama_stack.log import get_logger
2424
from llama_stack.providers.datatypes import Api, ProviderSpec
2525

26-
logger = logging.getLogger(__name__)
26+
logger = get_logger(name=__name__, category="core")
2727

2828

2929
def configure_single_provider(registry: dict[str, ProviderSpec], provider: Provider) -> Provider:

llama_stack/core/external.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from llama_stack.core.datatypes import BuildConfig, StackRunConfig
1212
from llama_stack.log import get_logger
1313

14-
logger = get_logger(name=__name__, category="core")
14+
logger = get_logger(name=__name__, category="distribution")
1515

1616

1717
def load_external_apis(config: StackRunConfig | BuildConfig | None) -> dict[Api, ExternalApiSpec]:

llama_stack/core/library_client.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77
import asyncio
88
import inspect
99
import json
10-
import logging
1110
import os
1211
import sys
1312
from concurrent.futures import ThreadPoolExecutor
@@ -48,14 +47,15 @@
4847
from llama_stack.core.utils.config import redact_sensitive_fields
4948
from llama_stack.core.utils.context import preserve_contexts_async_generator
5049
from llama_stack.core.utils.exec import in_notebook
50+
from llama_stack.log import get_logger
5151
from llama_stack.providers.utils.telemetry.tracing import (
5252
CURRENT_TRACE_CONTEXT,
5353
end_trace,
5454
setup_logger,
5555
start_trace,
5656
)
5757

58-
logger = logging.getLogger(__name__)
58+
logger = get_logger(name=__name__, category="distribution")
5959

6060
T = TypeVar("T")
6161

@@ -173,6 +173,8 @@ def _remove_root_logger_handlers(self):
173173
"""
174174
Remove all handlers from the root logger. Needed to avoid polluting the console with logs.
175175
"""
176+
import logging # allow-direct-logging
177+
176178
root_logger = logging.getLogger()
177179

178180
for handler in root_logger.handlers[:]:

llama_stack/core/providers.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from .datatypes import StackRunConfig
1717
from .utils.config import redact_sensitive_fields
1818

19-
logger = get_logger(name=__name__, category="core")
19+
logger = get_logger(name=__name__, category="distribution")
2020

2121

2222
class ProviderImplConfig(BaseModel):

llama_stack/core/request_headers.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -6,19 +6,19 @@
66

77
import contextvars
88
import json
9-
import logging
109
from contextlib import AbstractContextManager
1110
from typing import Any
1211

1312
from llama_stack.core.datatypes import User
13+
from llama_stack.log import get_logger
1414

1515
from .utils.dynamic import instantiate_class_type
1616

17-
log = logging.getLogger(__name__)
18-
1917
# Context variable for request provider data and auth attributes
2018
PROVIDER_DATA_VAR = contextvars.ContextVar("provider_data", default=None)
2119

20+
logger = get_logger(name=__name__, category="distribution")
21+
2222

2323
class RequestProviderDataContext(AbstractContextManager):
2424
"""Context manager for request provider data"""
@@ -61,7 +61,7 @@ def get_request_provider_data(self) -> Any:
6161
provider_data = validator(**val)
6262
return provider_data
6363
except Exception as e:
64-
log.error(f"Error parsing provider data: {e}")
64+
logger.error(f"Error parsing provider data: {e}")
6565
return None
6666

6767

@@ -83,7 +83,7 @@ def parse_request_provider_data(headers: dict[str, str]) -> dict[str, Any] | Non
8383
try:
8484
return json.loads(val)
8585
except json.JSONDecodeError:
86-
log.error("Provider data not encoded as a JSON object!")
86+
logger.error("Provider data not encoded as a JSON object!")
8787
return None
8888

8989

llama_stack/core/resolver.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
VectorDBsProtocolPrivate,
5555
)
5656

57-
logger = get_logger(name=__name__, category="core")
57+
logger = get_logger(name=__name__, category="distribution")
5858

5959

6060
class InvalidProviderError(Exception):

llama_stack/core/routers/datasets.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
from llama_stack.log import get_logger
1313
from llama_stack.providers.datatypes import RoutingTable
1414

15-
logger = get_logger(name=__name__, category="core")
15+
logger = get_logger(name=__name__, category="distribution")
1616

1717

1818
class DatasetIORouter(DatasetIO):

llama_stack/core/routers/eval_scoring.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
from llama_stack.log import get_logger
1717
from llama_stack.providers.datatypes import RoutingTable
1818

19-
logger = get_logger(name=__name__, category="core")
19+
logger = get_logger(name=__name__, category="distribution")
2020

2121

2222
class ScoringRouter(Scoring):

0 commit comments

Comments
 (0)