Skip to content

Commit f550fc1

Browse files
author
张文超
committed
fix:增加返回体超时重试
1 parent b01bfd8 commit f550fc1

File tree

7 files changed

+240
-19
lines changed

7 files changed

+240
-19
lines changed

.github/workflows/release.yml

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -85,12 +85,6 @@ jobs:
8585
name: Claude Code Proxy ${{ steps.version.outputs.VERSION }}
8686
body: |
8787
# Claude Code Proxy ${{ steps.version.outputs.VERSION }}
88-
89-
## 🎉 新特性
90-
- 提供 OpenAI 兼容的 API 接口
91-
- 支持 Claude 3.5 Sonnet 模型
92-
- 支持流式响应
93-
- 支持多种部署方式
9488
9589
## 🔧 配置说明
9690

README.md

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@ Claude Code Proxy SSY 是一个命令行工具,可以将Claude API转换为胜
1313

1414
## 📦 安装
1515

16+
### 使用前提(安装Claude Code)
17+
注册 [胜算云](https://www.shengsuanyun.com) , 限时注册赠送免费额度
18+
1619
### 使用前提(安装Claude Code)
1720

1821
```shell
@@ -80,7 +83,7 @@ claudeproxy setup
8083
- 引导您输入胜算云 API 密钥
8184
- 获取可用模型列表
8285
- 让您选择大模型和小模型
83-
- 保存配置到 `~/.claudeproxy/.env`
86+
- 保存配置到 `~/.claudeproxy/config.json`
8487

8588
### 2. 启动服务
8689

@@ -151,7 +154,7 @@ claudeproxy clean
151154

152155
## ⚙️ 配置选项
153156

154-
默认配置保存在 `~/.claudeproxy/.env` 文件中:
157+
默认配置保存在 `~/.claudeproxy/config.json` 文件中:
155158

156159
```bash
157160
BASE_URL=https://router.shengsuanyun.com/api/v1
@@ -170,6 +173,12 @@ SMALL_MODEL_NAME=selected-small-model
170173

171174
您也可以通过环境变量覆盖这些设置。
172175

176+
## ⚙️ 使用claude code
177+
178+
```bash
179+
claude
180+
```
181+
173182
## 🔧 开发
174183

175184
### 前置要求

internal/handlers/handlers.go

Lines changed: 43 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -123,6 +123,17 @@ func (h *Handler) CreateMessage(c *gin.Context) {
123123
"small_model": h.config.SmallModelName,
124124
}).Info("Model selection completed")
125125

126+
// Debug: configuration details
127+
h.logger.WithFields(logrus.Fields{
128+
"openai_base_url": h.config.OpenAIBaseURL,
129+
"app_name": h.config.AppName,
130+
"big_model": h.config.BigModelName,
131+
"small_model": h.config.SmallModelName,
132+
"open_claude_cache": h.config.OpenClaudeCache,
133+
"original_model": req.Model,
134+
"selected_model": openAIReq.Model,
135+
}).Debug("Configuration and model selection details")
136+
126137
// Handle streaming vs non-streaming
127138
if req.Stream {
128139
h.handleStreamingRequest(c, openAIReq, req.Model)
@@ -136,31 +147,53 @@ func (h *Handler) handleStreamingRequest(c *gin.Context, openAIReq *models.OpenA
136147
ctx, cancel := context.WithTimeout(c.Request.Context(), 60*time.Second)
137148
defer cancel()
138149

150+
h.logger.WithFields(logrus.Fields{
151+
"original_model": originalModel,
152+
"selected_model": openAIReq.Model,
153+
"request_type": "streaming",
154+
}).Debug("Starting streaming request")
155+
139156
// Make streaming request to OpenAI
140157
resp, err := h.openAIClient.CreateStreamingChatCompletion(ctx, openAIReq)
141158
if err != nil {
142-
h.logger.WithError(err).Error("Failed to create streaming completion")
159+
h.logger.WithFields(logrus.Fields{
160+
"error": err.Error(),
161+
}).Error("OpenAI streaming request failed")
143162
h.streamingService.HandleStreamingError(c, err)
144163
return
145164
}
146165

166+
h.logger.Debug("OpenAI streaming connection established")
167+
147168
// Stream the response
148169
if err := h.streamingService.StreamResponse(c, resp, originalModel); err != nil {
149-
h.logger.WithError(err).Error("Failed to stream response")
170+
h.logger.WithFields(logrus.Fields{
171+
"error": err.Error(),
172+
}).Error("Streaming response failed")
150173
h.streamingService.HandleStreamingError(c, err)
151174
return
152175
}
176+
177+
h.logger.Debug("Streaming request completed successfully")
153178
}
154179

155180
// handleNonStreamingRequest handles non-streaming message requests
156181
func (h *Handler) handleNonStreamingRequest(c *gin.Context, openAIReq *models.OpenAIRequest, originalModel string) {
157182
ctx, cancel := context.WithTimeout(c.Request.Context(), 60*time.Second)
158183
defer cancel()
159184

185+
h.logger.WithFields(logrus.Fields{
186+
"original_model": originalModel,
187+
"selected_model": openAIReq.Model,
188+
"request_type": "non_streaming",
189+
}).Debug("Starting non-streaming request")
190+
160191
// Make request to OpenAI
161192
openAIResp, err := h.openAIClient.CreateChatCompletion(ctx, openAIReq)
162193
if err != nil {
163-
h.logger.WithError(err).Error("Failed to create completion")
194+
h.logger.WithFields(logrus.Fields{
195+
"error": err.Error(),
196+
}).Error("OpenAI request failed")
164197
if apiErr, ok := err.(*models.APIError); ok {
165198
c.JSON(apiErr.HTTPStatus(), models.ErrorResponse{Error: apiErr})
166199
} else {
@@ -171,16 +204,22 @@ func (h *Handler) handleNonStreamingRequest(c *gin.Context, openAIReq *models.Op
171204
return
172205
}
173206

207+
h.logger.Debug("OpenAI request completed successfully")
208+
174209
// Convert response to Anthropic format
175210
anthropicResp, err := h.conversionService.ConvertOpenAIToAnthropic(openAIResp, originalModel)
176211
if err != nil {
177-
h.logger.WithError(err).Error("Failed to convert response")
212+
h.logger.WithFields(logrus.Fields{
213+
"error": err.Error(),
214+
}).Error("Response conversion failed")
178215
c.JSON(http.StatusInternalServerError, models.ErrorResponse{
179216
Error: models.NewInternalError("Failed to process response"),
180217
})
181218
return
182219
}
183220

221+
h.logger.Debug("Response conversion completed successfully")
222+
184223
// Log the response
185224
h.logger.WithFields(logrus.Fields{
186225
"response_id": anthropicResp.ID,

internal/server/server.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ func New(cfg *config.Config) *Server {
5656
// Create services
5757
openAIClient := services.NewOpenAIClient(cfg, logger)
5858
modelSelector := services.NewModelSelectorService(cfg, logger)
59-
conversionService := services.NewConversionService(modelSelector, cfg)
59+
conversionService := services.NewConversionService(modelSelector, cfg, logger)
6060
tokenService := services.NewTokenCountingService()
6161
streamingService := services.NewStreamingService(conversionService, logger)
6262

internal/services/conversion.go

Lines changed: 55 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,19 +9,23 @@ import (
99

1010
"claude-code-provider-proxy/internal/config"
1111
"claude-code-provider-proxy/internal/models"
12+
13+
"github.com/sirupsen/logrus"
1214
)
1315

1416
// ConversionService handles conversion between Anthropic and OpenAI formats
1517
type ConversionService struct {
1618
modelSelector *ModelSelectorService
1719
config *config.Config
20+
logger *logrus.Logger
1821
}
1922

2023
// NewConversionService creates a new conversion service
21-
func NewConversionService(modelSelector *ModelSelectorService, cfg *config.Config) *ConversionService {
24+
func NewConversionService(modelSelector *ModelSelectorService, cfg *config.Config, logger *logrus.Logger) *ConversionService {
2225
return &ConversionService{
2326
modelSelector: modelSelector,
2427
config: cfg,
28+
logger: logger,
2529
}
2630
}
2731

@@ -32,12 +36,29 @@ func (s *ConversionService) isClaudeModel(targetModelName string) bool {
3236

3337
// ConvertAnthropicToOpenAI converts an Anthropic request to OpenAI format
3438
func (s *ConversionService) ConvertAnthropicToOpenAI(req *models.AnthropicRequest, fallbackModel string) (*models.OpenAIRequest, error) {
39+
// Debug log: original Anthropic request
40+
if reqBytes, err := json.MarshalIndent(req, "", " "); err == nil {
41+
s.logger.WithFields(logrus.Fields{
42+
"original_request": string(reqBytes),
43+
}).Debug("Original Anthropic request")
44+
} else {
45+
s.logger.WithFields(logrus.Fields{
46+
"error": err.Error(),
47+
}).Error("Failed to marshal original request")
48+
}
49+
3550
// Use model selector to choose the appropriate model
3651
selectedModel := fallbackModel
3752
if s.modelSelector != nil {
3853
selectedModel = s.modelSelector.SelectModel(req.Model, req)
3954
}
4055

56+
s.logger.WithFields(logrus.Fields{
57+
"selected_model": selectedModel,
58+
"original_model": req.Model,
59+
"fallback_model": fallbackModel,
60+
}).Debug("Model selection completed")
61+
4162
openAIReq := &models.OpenAIRequest{
4263
Model: selectedModel,
4364
MaxTokens: req.MaxTokens, // Use the max_tokens from the original request
@@ -76,6 +97,17 @@ func (s *ConversionService) ConvertAnthropicToOpenAI(req *models.AnthropicReques
7697
}
7798
}
7899

100+
// Debug log: converted OpenAI request
101+
if openAIBytes, err := json.MarshalIndent(openAIReq, "", " "); err == nil {
102+
s.logger.WithFields(logrus.Fields{
103+
"converted_request": string(openAIBytes),
104+
}).Debug("Converted OpenAI request")
105+
} else {
106+
s.logger.WithFields(logrus.Fields{
107+
"error": err.Error(),
108+
}).Error("Failed to marshal converted request")
109+
}
110+
79111
return openAIReq, nil
80112
}
81113

@@ -581,6 +613,17 @@ func (s *ConversionService) convertToolChoice(choice *models.AnthropicToolChoice
581613

582614
// ConvertOpenAIToAnthropic converts OpenAI response to Anthropic format
583615
func (s *ConversionService) ConvertOpenAIToAnthropic(resp *models.OpenAIResponse, originalModel string) (*models.AnthropicResponse, error) {
616+
// Debug log: original OpenAI response
617+
if respBytes, err := json.MarshalIndent(resp, "", " "); err == nil {
618+
s.logger.WithFields(logrus.Fields{
619+
"original_response": string(respBytes),
620+
}).Debug("Original OpenAI response")
621+
} else {
622+
s.logger.WithFields(logrus.Fields{
623+
"error": err.Error(),
624+
}).Error("Failed to marshal original response")
625+
}
626+
584627
if len(resp.Choices) == 0 {
585628
return nil, fmt.Errorf("no choices in OpenAI response")
586629
}
@@ -606,6 +649,17 @@ func (s *ConversionService) ConvertOpenAIToAnthropic(resp *models.OpenAIResponse
606649
}
607650
anthropicResp.Content = content
608651

652+
// Debug log: converted Anthropic response
653+
if anthropicBytes, err := json.MarshalIndent(anthropicResp, "", " "); err == nil {
654+
s.logger.WithFields(logrus.Fields{
655+
"converted_response": string(anthropicBytes),
656+
}).Debug("Converted Anthropic response")
657+
} else {
658+
s.logger.WithFields(logrus.Fields{
659+
"error": err.Error(),
660+
}).Error("Failed to marshal converted response")
661+
}
662+
609663
return anthropicResp, nil
610664
}
611665

0 commit comments

Comments
 (0)