Skip to content

Commit b4fb69b

Browse files
committed
feat(github-copilot): Add Embedding API support
1 parent 7f42b9b commit b4fb69b

File tree

9 files changed

+494
-36
lines changed

9 files changed

+494
-36
lines changed

litellm/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1377,6 +1377,7 @@ def add_known_models():
13771377
from .llms.github_copilot.responses.transformation import (
13781378
GithubCopilotResponsesAPIConfig,
13791379
)
1380+
from .llms.github_copilot.embedding.transformation import GithubCopilotEmbeddingConfig
13801381
from .llms.nebius.chat.transformation import NebiusConfig
13811382
from .llms.wandb.chat.transformation import WandbConfig
13821383
from .llms.dashscope.chat.transformation import DashScopeChatConfig

litellm/llms/github_copilot/chat/transformation.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -5,12 +5,10 @@
55
from litellm.types.llms.openai import AllMessageValues
66

77
from ..authenticator import Authenticator
8-
from ..common_utils import GetAPIKeyError
8+
from ..common_utils import GetAPIKeyError, GITHUB_COPILOT_API_BASE
99

1010

1111
class GithubCopilotConfig(OpenAIConfig):
12-
GITHUB_COPILOT_API_BASE = "https://api.githubcopilot.com/"
13-
1412
def __init__(
1513
self,
1614
api_key: Optional[str] = None,
@@ -28,7 +26,7 @@ def _get_openai_compatible_provider_info(
2826
custom_llm_provider: str,
2927
) -> Tuple[Optional[str], Optional[str], str]:
3028
dynamic_api_base = (
31-
self.authenticator.get_api_base() or self.GITHUB_COPILOT_API_BASE
29+
self.authenticator.get_api_base() or GITHUB_COPILOT_API_BASE
3230
)
3331
try:
3432
dynamic_api_key = self.authenticator.get_api_key()

litellm/llms/github_copilot/common_utils.py

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,18 @@
22
Constants for Copilot integration
33
"""
44
from typing import Optional, Union
5+
from uuid import uuid4
56

67
import httpx
78

89
from litellm.llms.base_llm.chat.transformation import BaseLLMException
910

11+
# Constants
12+
COPILOT_VERSION = "0.26.7"
13+
EDITOR_PLUGIN_VERSION = f"copilot-chat/{COPILOT_VERSION}"
14+
USER_AGENT = f"GitHubCopilotChat/{COPILOT_VERSION}"
15+
API_VERSION = "2025-04-01"
16+
GITHUB_COPILOT_API_BASE = "https://api.githubcopilot.com"
1017

1118
class GithubCopilotError(BaseLLMException):
1219
def __init__(
@@ -46,3 +53,23 @@ class RefreshAPIKeyError(GithubCopilotError):
4653

4754
class GetAPIKeyError(GithubCopilotError):
4855
pass
56+
57+
58+
def get_copilot_default_headers(api_key: str) -> dict:
59+
"""
60+
Get default headers for GitHub Copilot Responses API.
61+
62+
Based on copilot-api's header configuration.
63+
"""
64+
return {
65+
"Authorization": f"Bearer {api_key}",
66+
"content-type": "application/json",
67+
"copilot-integration-id": "vscode-chat",
68+
"editor-version": "vscode/1.95.0", # Fixed version for stability
69+
"editor-plugin-version": EDITOR_PLUGIN_VERSION,
70+
"user-agent": USER_AGENT,
71+
"openai-intent": "conversation-panel",
72+
"x-github-api-version": API_VERSION,
73+
"x-request-id": str(uuid4()),
74+
"x-vscode-user-agent-library-version": "electron-fetch",
75+
}
Lines changed: 188 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,188 @@
1+
"""
2+
GitHub Copilot Embedding API Configuration.
3+
4+
This module provides the configuration for GitHub Copilot's Embedding API.
5+
6+
Implementation based on analysis of the copilot-api project by caozhiyuan:
7+
https://github.com/caozhiyuan/copilot-api
8+
"""
9+
from typing import TYPE_CHECKING, Any, Optional
10+
11+
import httpx
12+
13+
from litellm._logging import verbose_logger
14+
from litellm.exceptions import AuthenticationError
15+
from litellm.llms.base_llm.embedding.transformation import BaseEmbeddingConfig
16+
from litellm.types.llms.openai import AllEmbeddingInputValues
17+
from litellm.types.utils import EmbeddingResponse
18+
from litellm.utils import convert_to_model_response_object
19+
20+
from ..authenticator import Authenticator
21+
from ..common_utils import (
22+
GetAPIKeyError,
23+
GITHUB_COPILOT_API_BASE,
24+
get_copilot_default_headers,
25+
)
26+
27+
if TYPE_CHECKING:
28+
from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj
29+
30+
LiteLLMLoggingObj = _LiteLLMLoggingObj
31+
else:
32+
LiteLLMLoggingObj = Any
33+
34+
35+
class GithubCopilotEmbeddingConfig(BaseEmbeddingConfig):
36+
"""
37+
Configuration for GitHub Copilot's Embedding API.
38+
39+
Reference: https://api.githubcopilot.com/embeddings
40+
"""
41+
42+
def __init__(self) -> None:
43+
super().__init__()
44+
self.authenticator = Authenticator()
45+
46+
def validate_environment(
47+
self,
48+
headers: dict,
49+
model: str,
50+
messages: list,
51+
optional_params: dict,
52+
litellm_params: dict,
53+
api_key: Optional[str] = None,
54+
api_base: Optional[str] = None,
55+
) -> dict:
56+
"""
57+
Validate environment and set up headers for GitHub Copilot API.
58+
"""
59+
try:
60+
# Get GitHub Copilot API key via OAuth
61+
api_key = self.authenticator.get_api_key()
62+
63+
if not api_key:
64+
raise AuthenticationError(
65+
model=model,
66+
llm_provider="github_copilot",
67+
message="GitHub Copilot API key is required. Please authenticate via OAuth Device Flow.",
68+
)
69+
70+
# Get default headers
71+
default_headers = get_copilot_default_headers(api_key)
72+
73+
# Merge with existing headers (user's extra_headers take priority)
74+
merged_headers = {**default_headers, **headers}
75+
76+
verbose_logger.debug(
77+
f"GitHub Copilot Embedding API: Successfully configured headers for model {model}"
78+
)
79+
80+
return merged_headers
81+
82+
except GetAPIKeyError as e:
83+
raise AuthenticationError(
84+
model=model,
85+
llm_provider="github_copilot",
86+
message=str(e),
87+
)
88+
89+
def get_complete_url(
90+
self,
91+
api_base: Optional[str],
92+
api_key: Optional[str],
93+
model: str,
94+
optional_params: dict,
95+
litellm_params: dict,
96+
stream: Optional[bool] = None,
97+
) -> str:
98+
"""
99+
Get the complete URL for GitHub Copilot Embedding API endpoint.
100+
"""
101+
# Use provided api_base or fall back to authenticator's base or default
102+
api_base = (
103+
self.authenticator.get_api_base()
104+
or api_base
105+
or GITHUB_COPILOT_API_BASE
106+
)
107+
108+
# Remove trailing slashes
109+
api_base = api_base.rstrip("/")
110+
111+
# Return the embeddings endpoint
112+
return f"{api_base}/embeddings"
113+
114+
def transform_embedding_request(
115+
self,
116+
model: str,
117+
input: AllEmbeddingInputValues,
118+
optional_params: dict,
119+
headers: dict,
120+
) -> dict:
121+
"""
122+
Transform embedding request to GitHub Copilot format.
123+
"""
124+
125+
# Ensure input is a list
126+
if isinstance(input, str):
127+
input = [input]
128+
129+
return {
130+
"model": model,
131+
"input": input,
132+
**optional_params,
133+
}
134+
135+
def transform_embedding_response(
136+
self,
137+
model: str,
138+
raw_response: httpx.Response,
139+
model_response: EmbeddingResponse,
140+
logging_obj: LiteLLMLoggingObj,
141+
api_key: Optional[str],
142+
request_data: dict,
143+
optional_params: dict,
144+
litellm_params: dict,
145+
) -> EmbeddingResponse:
146+
"""
147+
Transform embedding response from GitHub Copilot format.
148+
"""
149+
logging_obj.post_call(original_response=raw_response.text)
150+
151+
# GitHub Copilot returns standard OpenAI-compatible embedding response
152+
response_json = raw_response.json()
153+
154+
return convert_to_model_response_object(
155+
response_object=response_json,
156+
model_response_object=model_response,
157+
response_type="embedding",
158+
)
159+
160+
def get_supported_openai_params(self, model: str) -> list:
161+
return [
162+
"timeout",
163+
"dimensions",
164+
"encoding_format",
165+
"user",
166+
]
167+
168+
def map_openai_params(
169+
self,
170+
non_default_params: dict,
171+
optional_params: dict,
172+
model: str,
173+
drop_params: bool,
174+
) -> dict:
175+
for param, value in non_default_params.items():
176+
if param in self.get_supported_openai_params(model):
177+
optional_params[param] = value
178+
return optional_params
179+
180+
def get_error_class(
181+
self, error_message: str, status_code: int, headers: Any
182+
) -> Any:
183+
from litellm.llms.openai.openai import OpenAIConfig
184+
185+
return OpenAIConfig().get_error_class(
186+
error_message=error_message, status_code=status_code, headers=headers
187+
)
188+

litellm/llms/github_copilot/responses/transformation.py

Lines changed: 7 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,6 @@
88
https://github.com/caozhiyuan/copilot-api
99
"""
1010
from typing import TYPE_CHECKING, Any, Dict, Optional, Union
11-
from uuid import uuid4
1211

1312
from litellm._logging import verbose_logger
1413
from litellm.constants import DEFAULT_MAX_RECURSE_DEPTH
@@ -22,7 +21,11 @@
2221
from litellm.types.utils import LlmProviders
2322

2423
from ..authenticator import Authenticator
25-
from ..common_utils import GetAPIKeyError
24+
from ..common_utils import (
25+
GetAPIKeyError,
26+
GITHUB_COPILOT_API_BASE,
27+
get_copilot_default_headers,
28+
)
2629

2730
if TYPE_CHECKING:
2831
from litellm.litellm_core_utils.litellm_logging import Logging as _LiteLLMLoggingObj
@@ -31,12 +34,6 @@
3134
else:
3235
LiteLLMLoggingObj = Any
3336

34-
# GitHub Copilot API Constants (from copilot-api)
35-
COPILOT_VERSION = "0.26.7"
36-
EDITOR_PLUGIN_VERSION = f"copilot-chat/{COPILOT_VERSION}"
37-
USER_AGENT = f"GitHubCopilotChat/{COPILOT_VERSION}"
38-
API_VERSION = "2025-04-01"
39-
4037

4138
class GithubCopilotResponsesAPIConfig(OpenAIResponsesAPIConfig):
4239
"""
@@ -55,8 +52,6 @@ class GithubCopilotResponsesAPIConfig(OpenAIResponsesAPIConfig):
5552
Reference: https://api.githubcopilot.com/
5653
"""
5754

58-
GITHUB_COPILOT_API_BASE = "https://api.githubcopilot.com"
59-
6055
def __init__(self) -> None:
6156
super().__init__()
6257
self.authenticator = Authenticator()
@@ -119,7 +114,7 @@ def validate_environment(
119114
)
120115

121116
# Get default headers (from copilot-api configuration)
122-
default_headers = self._get_default_headers(api_key)
117+
default_headers = get_copilot_default_headers(api_key)
123118

124119
# Merge with existing headers (user's extra_headers take priority)
125120
merged_headers = {**default_headers, **headers}
@@ -173,7 +168,7 @@ def get_complete_url(
173168
api_base = (
174169
api_base
175170
or self.authenticator.get_api_base()
176-
or self.GITHUB_COPILOT_API_BASE
171+
or GITHUB_COPILOT_API_BASE
177172
)
178173

179174
# Remove trailing slashes
@@ -184,25 +179,6 @@ def get_complete_url(
184179

185180
# ==================== Helper Methods ====================
186181

187-
def _get_default_headers(self, api_key: str) -> Dict[str, str]:
188-
"""
189-
Get default headers for GitHub Copilot Responses API.
190-
191-
Based on copilot-api's header configuration.
192-
"""
193-
return {
194-
"Authorization": f"Bearer {api_key}",
195-
"content-type": "application/json",
196-
"copilot-integration-id": "vscode-chat",
197-
"editor-version": "vscode/1.95.0", # Fixed version for stability
198-
"editor-plugin-version": EDITOR_PLUGIN_VERSION,
199-
"user-agent": USER_AGENT,
200-
"openai-intent": "conversation-panel",
201-
"x-github-api-version": API_VERSION,
202-
"x-request-id": str(uuid4()),
203-
"x-vscode-user-agent-library-version": "electron-fetch",
204-
}
205-
206182
def _get_input_from_params(
207183
self, litellm_params: Optional[GenericLiteLLMParams]
208184
) -> Optional[Union[str, ResponseInputParam]]:

litellm/main.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4253,6 +4253,22 @@ def embedding( # noqa: PLR0915
42534253
headers=headers or extra_headers,
42544254
litellm_params=litellm_params_dict,
42554255
)
4256+
elif custom_llm_provider == "github_copilot":
4257+
api_key = (api_key or litellm.api_key)
4258+
response = base_llm_http_handler.embedding(
4259+
model=model,
4260+
input=input,
4261+
custom_llm_provider=custom_llm_provider,
4262+
api_base=api_base,
4263+
api_key=api_key,
4264+
logging_obj=logging,
4265+
timeout=timeout,
4266+
model_response=EmbeddingResponse(),
4267+
optional_params=optional_params,
4268+
client=client,
4269+
aembedding=aembedding,
4270+
litellm_params=litellm_params_dict,
4271+
)
42564272
elif (
42574273
model in litellm.open_ai_embedding_models
42584274
or custom_llm_provider == "openai"

litellm/utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7285,6 +7285,8 @@ def get_provider_embedding_config(
72857285
return litellm.SnowflakeEmbeddingConfig()
72867286
elif litellm.LlmProviders.COMETAPI == provider:
72877287
return litellm.CometAPIEmbeddingConfig()
7288+
elif litellm.LlmProviders.GITHUB_COPILOT == provider:
7289+
return litellm.GithubCopilotEmbeddingConfig()
72887290
elif litellm.LlmProviders.SAGEMAKER == provider:
72897291
from litellm.llms.sagemaker.embedding.transformation import (
72907292
SagemakerEmbeddingConfig,

0 commit comments

Comments
 (0)