Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions litellm/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -1377,6 +1377,7 @@ def add_known_models():
from .llms.github_copilot.responses.transformation import (
GithubCopilotResponsesAPIConfig,
)
from .llms.github_copilot.embedding.transformation import GithubCopilotEmbeddingConfig
from .llms.nebius.chat.transformation import NebiusConfig
from .llms.wandb.chat.transformation import WandbConfig
from .llms.dashscope.chat.transformation import DashScopeChatConfig
Expand Down
6 changes: 2 additions & 4 deletions litellm/llms/github_copilot/chat/transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,10 @@
from litellm.types.llms.openai import AllMessageValues

from ..authenticator import Authenticator
from ..common_utils import GetAPIKeyError
from ..common_utils import GetAPIKeyError, GITHUB_COPILOT_API_BASE


class GithubCopilotConfig(OpenAIConfig):
GITHUB_COPILOT_API_BASE = "https://api.githubcopilot.com/"

def __init__(
self,
api_key: Optional[str] = None,
Expand All @@ -28,7 +26,7 @@ def _get_openai_compatible_provider_info(
custom_llm_provider: str,
) -> Tuple[Optional[str], Optional[str], str]:
dynamic_api_base = (
self.authenticator.get_api_base() or self.GITHUB_COPILOT_API_BASE
self.authenticator.get_api_base() or GITHUB_COPILOT_API_BASE
)
try:
dynamic_api_key = self.authenticator.get_api_key()
Expand Down
27 changes: 27 additions & 0 deletions litellm/llms/github_copilot/common_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,18 @@
Constants for Copilot integration
"""
from typing import Optional, Union
from uuid import uuid4

import httpx

from litellm.llms.base_llm.chat.transformation import BaseLLMException

# Constants
COPILOT_VERSION = "0.26.7"
EDITOR_PLUGIN_VERSION = f"copilot-chat/{COPILOT_VERSION}"
USER_AGENT = f"GitHubCopilotChat/{COPILOT_VERSION}"
API_VERSION = "2025-04-01"
GITHUB_COPILOT_API_BASE = "https://api.githubcopilot.com"

class GithubCopilotError(BaseLLMException):
def __init__(
Expand Down Expand Up @@ -46,3 +53,23 @@ class RefreshAPIKeyError(GithubCopilotError):

class GetAPIKeyError(GithubCopilotError):
pass


def get_copilot_default_headers(api_key: str) -> dict:
"""
Get default headers for GitHub Copilot Responses API.

Based on copilot-api's header configuration.
"""
return {
"Authorization": f"Bearer {api_key}",
"content-type": "application/json",
"copilot-integration-id": "vscode-chat",
"editor-version": "vscode/1.95.0", # Fixed version for stability
"editor-plugin-version": EDITOR_PLUGIN_VERSION,
"user-agent": USER_AGENT,
"openai-intent": "conversation-panel",
"x-github-api-version": API_VERSION,
"x-request-id": str(uuid4()),
"x-vscode-user-agent-library-version": "electron-fetch",
}
192 changes: 192 additions & 0 deletions litellm/llms/github_copilot/embedding/transformation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,192 @@
"""
GitHub Copilot Embedding API Configuration.

This module provides the configuration for GitHub Copilot's Embedding API.

Implementation based on analysis of the copilot-api project by caozhiyuan:
https://github.com/caozhiyuan/copilot-api
"""
from typing import TYPE_CHECKING, Any, Optional

import httpx

from litellm._logging import verbose_logger
from litellm.exceptions import AuthenticationError
from litellm.llms.base_llm.embedding.transformation import BaseEmbeddingConfig
from litellm.types.llms.openai import AllEmbeddingInputValues
from litellm.types.utils import EmbeddingResponse
from litellm.utils import convert_to_model_response_object

from ..authenticator import Authenticator
from ..common_utils import (
GetAPIKeyError,
GITHUB_COPILOT_API_BASE,
get_copilot_default_headers,
)

if TYPE_CHECKING:
from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj

LiteLLMLoggingObj = _LiteLLMLoggingObj
else:
LiteLLMLoggingObj = Any


class GithubCopilotEmbeddingConfig(BaseEmbeddingConfig):
"""
Configuration for GitHub Copilot's Embedding API.

Reference: https://api.githubcopilot.com/embeddings
"""

def __init__(self) -> None:
super().__init__()
self.authenticator = Authenticator()

def validate_environment(
self,
headers: dict,
model: str,
messages: list,
optional_params: dict,
litellm_params: dict,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
) -> dict:
"""
Validate environment and set up headers for GitHub Copilot API.
"""
try:
# Get GitHub Copilot API key via OAuth
api_key = self.authenticator.get_api_key()

if not api_key:
raise AuthenticationError(
model=model,
llm_provider="github_copilot",
message="GitHub Copilot API key is required. Please authenticate via OAuth Device Flow.",
)

# Get default headers
default_headers = get_copilot_default_headers(api_key)

# Merge with existing headers (user's extra_headers take priority)
merged_headers = {**default_headers, **headers}

verbose_logger.debug(
f"GitHub Copilot Embedding API: Successfully configured headers for model {model}"
)

return merged_headers

except GetAPIKeyError as e:
raise AuthenticationError(
model=model,
llm_provider="github_copilot",
message=str(e),
)

def get_complete_url(
self,
api_base: Optional[str],
api_key: Optional[str],
model: str,
optional_params: dict,
litellm_params: dict,
stream: Optional[bool] = None,
) -> str:
"""
Get the complete URL for GitHub Copilot Embedding API endpoint.
"""
# Use provided api_base or fall back to authenticator's base or default
api_base = (
self.authenticator.get_api_base()
or api_base
or GITHUB_COPILOT_API_BASE
)

# Remove trailing slashes
api_base = api_base.rstrip("/")

# Return the embeddings endpoint
return f"{api_base}/embeddings"

def transform_embedding_request(
self,
model: str,
input: AllEmbeddingInputValues,
optional_params: dict,
headers: dict,
) -> dict:
"""
Transform embedding request to GitHub Copilot format.
"""

# Ensure input is a list
if isinstance(input, str):
input = [input]

# Strip 'github_copilot/' prefix if present
if model.startswith("github_copilot/"):
model = model.replace("github_copilot/", "", 1)

return {
"model": model,
"input": input,
**optional_params,
}

def transform_embedding_response(
self,
model: str,
raw_response: httpx.Response,
model_response: EmbeddingResponse,
logging_obj: LiteLLMLoggingObj,
api_key: Optional[str],
request_data: dict,
optional_params: dict,
litellm_params: dict,
) -> EmbeddingResponse:
"""
Transform embedding response from GitHub Copilot format.
"""
logging_obj.post_call(original_response=raw_response.text)

# GitHub Copilot returns standard OpenAI-compatible embedding response
response_json = raw_response.json()

return convert_to_model_response_object(
response_object=response_json,
model_response_object=model_response,
response_type="embedding",
)

def get_supported_openai_params(self, model: str) -> list:
return [
"timeout",
"dimensions",
"encoding_format",
"user",
]

def map_openai_params(
self,
non_default_params: dict,
optional_params: dict,
model: str,
drop_params: bool,
) -> dict:
for param, value in non_default_params.items():
if param in self.get_supported_openai_params(model):
optional_params[param] = value
return optional_params

def get_error_class(
self, error_message: str, status_code: int, headers: Any
) -> Any:
from litellm.llms.openai.openai import OpenAIConfig

return OpenAIConfig().get_error_class(
error_message=error_message, status_code=status_code, headers=headers
)

38 changes: 7 additions & 31 deletions litellm/llms/github_copilot/responses/transformation.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
https://github.com/caozhiyuan/copilot-api
"""
from typing import TYPE_CHECKING, Any, Dict, Optional, Union
from uuid import uuid4

from litellm._logging import verbose_logger
from litellm.constants import DEFAULT_MAX_RECURSE_DEPTH
Expand All @@ -22,7 +21,11 @@
from litellm.types.utils import LlmProviders

from ..authenticator import Authenticator
from ..common_utils import GetAPIKeyError
from ..common_utils import (
GetAPIKeyError,
GITHUB_COPILOT_API_BASE,
get_copilot_default_headers,
)

if TYPE_CHECKING:
from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj
Expand All @@ -31,12 +34,6 @@
else:
LiteLLMLoggingObj = Any

# GitHub Copilot API Constants (from copilot-api)
COPILOT_VERSION = "0.26.7"
EDITOR_PLUGIN_VERSION = f"copilot-chat/{COPILOT_VERSION}"
USER_AGENT = f"GitHubCopilotChat/{COPILOT_VERSION}"
API_VERSION = "2025-04-01"


class GithubCopilotResponsesAPIConfig(OpenAIResponsesAPIConfig):
"""
Expand All @@ -55,8 +52,6 @@ class GithubCopilotResponsesAPIConfig(OpenAIResponsesAPIConfig):
Reference: https://api.githubcopilot.com/
"""

GITHUB_COPILOT_API_BASE = "https://api.githubcopilot.com"

def __init__(self) -> None:
super().__init__()
self.authenticator = Authenticator()
Expand Down Expand Up @@ -119,7 +114,7 @@ def validate_environment(
)

# Get default headers (from copilot-api configuration)
default_headers = self._get_default_headers(api_key)
default_headers = get_copilot_default_headers(api_key)

# Merge with existing headers (user's extra_headers take priority)
merged_headers = {**default_headers, **headers}
Expand Down Expand Up @@ -173,7 +168,7 @@ def get_complete_url(
api_base = (
api_base
or self.authenticator.get_api_base()
or self.GITHUB_COPILOT_API_BASE
or GITHUB_COPILOT_API_BASE
)

# Remove trailing slashes
Expand All @@ -184,25 +179,6 @@ def get_complete_url(

# ==================== Helper Methods ====================

def _get_default_headers(self, api_key: str) -> Dict[str, str]:
"""
Get default headers for GitHub Copilot Responses API.

Based on copilot-api's header configuration.
"""
return {
"Authorization": f"Bearer {api_key}",
"content-type": "application/json",
"copilot-integration-id": "vscode-chat",
"editor-version": "vscode/1.95.0", # Fixed version for stability
"editor-plugin-version": EDITOR_PLUGIN_VERSION,
"user-agent": USER_AGENT,
"openai-intent": "conversation-panel",
"x-github-api-version": API_VERSION,
"x-request-id": str(uuid4()),
"x-vscode-user-agent-library-version": "electron-fetch",
}

def _get_input_from_params(
self, litellm_params: Optional[GenericLiteLLMParams]
) -> Optional[Union[str, ResponseInputParam]]:
Expand Down
16 changes: 16 additions & 0 deletions litellm/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -4253,6 +4253,22 @@ def embedding( # noqa: PLR0915
headers=headers or extra_headers,
litellm_params=litellm_params_dict,
)
elif custom_llm_provider == "github_copilot":
api_key = (api_key or litellm.api_key)
response = base_llm_http_handler.embedding(
model=model,
input=input,
custom_llm_provider=custom_llm_provider,
api_base=api_base,
api_key=api_key,
logging_obj=logging,
timeout=timeout,
model_response=EmbeddingResponse(),
optional_params=optional_params,
client=client,
aembedding=aembedding,
litellm_params=litellm_params_dict,
)
elif (
model in litellm.open_ai_embedding_models
or custom_llm_provider == "openai"
Expand Down
2 changes: 2 additions & 0 deletions litellm/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -7285,6 +7285,8 @@ def get_provider_embedding_config(
return litellm.SnowflakeEmbeddingConfig()
elif litellm.LlmProviders.COMETAPI == provider:
return litellm.CometAPIEmbeddingConfig()
elif litellm.LlmProviders.GITHUB_COPILOT == provider:
return litellm.GithubCopilotEmbeddingConfig()
elif litellm.LlmProviders.SAGEMAKER == provider:
from litellm.llms.sagemaker.embedding.transformation import (
SagemakerEmbeddingConfig,
Expand Down
Loading
Loading