Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
47 commits
Select commit Hold shift + click to select a range
a6d73ee
Attempt to fix structured outputs for OpenAI
AggressivelyMeows May 3, 2025
b22713e
feat: enhance OpenAI schema handling and model parsing in completions…
AggressivelyMeows May 5, 2025
7e7455d
Merge remote-tracking branch 'origin/main' into llm-do
AggressivelyMeows May 5, 2025
e0612a1
Fix tests
AggressivelyMeows May 5, 2025
5c0975b
Enhance LLM response formatting and update model configurations
AggressivelyMeows May 6, 2025
9bd92a2
test: search
sam-lippert May 7, 2025
3b63bec
Update LLM provider endpoint and enhance response handling logic
AggressivelyMeows May 7, 2025
58e5c74
Merge branch 'llm-do' of https://github.com/drivly/ai into llm-do
AggressivelyMeows May 7, 2025
d9b548a
test: `models` and `providers`
sam-lippert May 7, 2025
2842b7d
Fix PDF support with OpenAI
AggressivelyMeows May 7, 2025
6cad430
Merge branch 'llm-do' of https://github.com/drivly/ai into llm-do
AggressivelyMeows May 7, 2025
ae18467
test: check fails
sam-lippert May 7, 2025
7b5edf8
test: move to sdk folder
sam-lippert May 8, 2025
d38eef1
Fix text streaming
AggressivelyMeows May 8, 2025
1b79a9c
Merge branch 'llm-do' of https://github.com/drivly/ai into llm-do
AggressivelyMeows May 8, 2025
254a8f7
test: add OpenRouter feature tests for llm.do service
devin-ai-integration[bot] May 8, 2025
07ebb0e
fix: update OpenRouter tests to fix TypeScript errors and add test image
devin-ai-integration[bot] May 8, 2025
93108fe
fix: handle Claude models with thinking capability correctly
devin-ai-integration[bot] May 8, 2025
9fc0566
refactor: format
sam-lippert May 8, 2025
443ae92
test: move to todo
sam-lippert May 8, 2025
ab901c4
feat: tools testing, fix tool schemas, add model and provider icons
AggressivelyMeows May 8, 2025
f5f7bbb
Merge branch 'llm-do' of https://github.com/drivly/ai into llm-do
AggressivelyMeows May 8, 2025
de32d1b
chore: remove TypeScript error suppression in build-models.ts
AggressivelyMeows May 8, 2025
4d5ffd1
test: models/providers
sam-lippert May 9, 2025
d56bd6c
chore: update dependencies and improve model handling
AggressivelyMeows May 12, 2025
f93e384
Merge branch 'llm-do' of https://github.com/drivly/ai into llm-do
AggressivelyMeows May 12, 2025
cc9618f
refactor: format
sam-lippert May 13, 2025
bdd2a27
feat: auto router passthrough
sam-lippert May 13, 2025
ad83a61
feat: enhance LLM request handling and model options
AggressivelyMeows May 13, 2025
4e2eb43
Merge branch 'llm-do' of https://github.com/drivly/ai into llm-do
AggressivelyMeows May 13, 2025
a2ffaa7
feat: enhance error handling and model integration in LLM processing
AggressivelyMeows May 14, 2025
51a5c8b
Merge remote-tracking branch 'origin/main' into llm-do
AggressivelyMeows May 14, 2025
8abbcfd
refactor: update LLM processing and model definitions
AggressivelyMeows May 15, 2025
b31b8d1
Merge remote-tracking branch 'origin/main' into llm-do
AggressivelyMeows May 15, 2025
31bf6a0
refactor: enhance model management and update constants
AggressivelyMeows May 15, 2025
3ae365f
refactor: enhance AI provider integration and model definitions
AggressivelyMeows May 15, 2025
a176aa5
chore: Fix types
AggressivelyMeows May 15, 2025
3e40458
refactor: enhance model options handling in AI provider and chat comp…
AggressivelyMeows May 15, 2025
0b2d082
Merge pull request #2255 from drivly/llm-do
AggressivelyMeows May 15, 2025
a7aa335
fix: Add defensive coding and error handling to domain processing loo…
devin-ai-integration[bot] May 15, 2025
17074ea
Merge pull request #2256 from drivly/devin/ENG-858-main-1747341474
nathanclevenger May 15, 2025
52dcf23
feat: add stripe integration
sam-lippert May 16, 2025
1788d9b
feat: add credit endpoint
sam-lippert May 16, 2025
f4e2966
refactor: remove logs
sam-lippert May 16, 2025
394eec3
ENG-859: Fix SDK binary generation and permissions
devin-ai-integration[bot] May 16, 2025
135ebc3
ENG-859: Fix SDK bin.js compilation
devin-ai-integration[bot] May 16, 2025
42abd0f
ENG-859: Fix SDK tsconfig.json configuration
devin-ai-integration[bot] May 16, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 17 additions & 1 deletion .vscode/launch.json
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
"version": "0.2.0",
"configurations": [
{
"name": "Next.js: debug full stack",
"name": "Next.js: debug full stack (Chrome)",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/node_modules/next/dist/bin/next",
Expand All @@ -20,6 +20,22 @@
},
"cwd": "${workspaceFolder}"
},
{
"name": "Next.js: debug full stack (Edge)",
"type": "node",
"request": "launch",
"program": "${workspaceFolder}/node_modules/next/dist/bin/next",
"runtimeArgs": ["--inspect"],
"skipFiles": ["<node_internals>/**"],
"serverReadyAction": {
"action": "debugWithEdge",
"killOnServerStop": true,
"pattern": "- Local:.+(https?://.+)",
"uriFormat": "%s",
"webRoot": "${workspaceFolder}"
},
"cwd": "${workspaceFolder}"
},
{
"type": "node",
"request": "launch",
Expand Down
40 changes: 40 additions & 0 deletions app/(apis)/llm/chat/completions/analytics.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
import { after } from 'next/server'
import config from '@/payload.config'
import { getPayload } from 'payload'

type EventType = 'llm.tool-use' | 'llm.completion'

// Must at least have a user, everything else is optional.
type Metadata = {
user: string,
[key: string]: any
}

export function createDataPoint(type: EventType, metadata: Metadata, data: Record<string, any>) {
// Send analytics after the request is done.
// This is the newer version of waitUntil.
after(
async () => {
const payload = await getPayload({ config })

try {
await payload.create({
collection: 'events',
data: {
type,
source: 'llm.do',
data,
metadata,
// Do More Work tenant.
tenant: '67eff7d61cb630b09c9de598'
}
})
} catch (error) {
console.error(
'[ANALYTICS] Error creating log',
error
)
}
}
)
}
Loading
Loading