Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 7 additions & 8 deletions conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,13 +43,12 @@ def _is_under_sample_root(path_obj: pathlib.Path) -> bool:
return False


def pytest_collect_file(parent, path):
def pytest_collect_file(parent: pytest.Collector, file_path: pathlib.Path):
"""
PyTest collection hook: decide whether *path* should become a test item.
`path` is a py.path.local object; convert to Path for easier checks.
PyTest collection hook: decide whether *file_path* should become a test item.
"""
if path.ext == ".py" and _is_under_sample_root(pathlib.Path(path)):
return SampleItem.from_parent(parent, name=path.basename, fspath=path)
if file_path.suffix == ".py" and _is_under_sample_root(file_path):
return SampleItem.from_parent(parent, name=file_path.name, path=file_path)


class SampleItem(pytest.Item):
Expand All @@ -60,11 +59,11 @@ class SampleItem(pytest.Item):

def runtest(self):
# Execute the script in its own namespace.
runpy.run_path(str(self.fspath))
runpy.run_path(str(self.path))

def repr_failure(self, excinfo):
# Nicely format any exception raised during runtest().
return f"Sample {self.fspath} failed:\n{excinfo.value}"
return f"Sample {self.path} failed:\n{excinfo.value}"

def reportinfo(self):
return self.fspath, 0, "sample script"
return self.path, 0, "sample script"
13 changes: 12 additions & 1 deletion migration/.dockerignore
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,18 @@ output/
__pycache__/
*.pyc
*.pyo
.pytest_cache/
.ruff_cache/
.mypy_cache/

# Migration output
migration_*.json
agent_*.json
agent_*.json

# Local development environments
.venv/
venv/

# Coverage artifacts
.coverage
htmlcov/
122 changes: 74 additions & 48 deletions migration/.env.example
Original file line number Diff line number Diff line change
@@ -1,53 +1,79 @@
# V1 to V2 Migration Environment Configuration
# Copy this file to .env and fill in your values
# Copy this file to .env and fill in only the values you need.
#
# The rewritten migration tool is production-first:
# - Source assistant definitions come from the legacy API, a project endpoint,
# a project connection string, or Cosmos DB.
# - Target agent creation always uses Foundry Agent Service through
# AIProjectClient.create_version(...).
# - Runtime validation always uses conversations + responses.

# Azure Project Configuration (choose one method)
# Method 1: Project Endpoint
PROJECT_ENDPOINT_URL=https://your-project-name.cognitiveservices.azure.com
# -----------------------------------------------------------------------------
# Source input settings
# -----------------------------------------------------------------------------

# Method 2: Project Connection String
# PROJECT_CONNECTION_STRING=endpoint=https://your-project.cognitiveservices.azure.com;subscriptionid=your-sub-id;resourcegroupname=your-rg;projectname=your-project
# Legacy Assistants API input
AGENTS_HOST=eastus.api.azureml.ms
AGENTS_SUBSCRIPTION=<legacy-subscription-id>
AGENTS_RESOURCE_GROUP=<legacy-resource-group>
AGENTS_WORKSPACE=<legacy-workspace-name>
AGENTS_API_VERSION=2025-05-15-preview

# Cosmos DB Configuration (optional - for Cosmos input/output)
# Optional fallback name kept for compatibility with older wrappers
# ASSISTANT_API_VERSION=2025-05-15-preview

# Cosmos DB input
# Prefer COSMOS_CONNECTION_STRING, but COSMOS_DB_CONNECTION_STRING is still accepted.
# COSMOS_CONNECTION_STRING=AccountEndpoint=https://your-cosmos.documents.azure.com:443/;AccountKey=your-key==;
# COSMOS_DB_CONNECTION_STRING=AccountEndpoint=https://your-cosmos.documents.azure.com:443/;AccountKey=your-key==;
# COSMOS_DB_DATABASE_NAME=assistants
# COSMOS_DB_CONTAINER_NAME=v1_assistants

# OpenAI v1 API Configuration (optional - for v1 API input)
# ASSISTANT_API_BASE=https://api.openai.com/v1
# ASSISTANT_API_KEY=sk-your-openai-key
# ASSISTANT_API_VERSION=v1

# Azure OpenAI v1 Configuration (optional - alternative to OpenAI)
# ASSISTANT_API_BASE=https://your-aoai.openai.azure.com
# ASSISTANT_API_KEY=your-azure-openai-key
# ASSISTANT_API_VERSION=2024-02-15-preview

# v2 API Configuration (optional - for v2 API output)
# V2_API_BASE=https://your-v2-api.cognitiveservices.azure.com
# V2_API_KEY=your-v2-api-key
# V2_API_VERSION=2024-05-01-preview

# Azure Authentication (optional - for service principal auth)
# AZURE_TENANT_ID=your-tenant-id
# AZURE_CLIENT_ID=your-client-id
# AZURE_CLIENT_SECRET=your-client-secret
# AZURE_SUBSCRIPTION_ID=your-subscription-id
# AZURE_RESOURCE_GROUP=your-resource-group
# AZURE_PROJECT_NAME=your-project-name

# Example configurations for common scenarios:

# Scenario 1: Migrate from Azure AI Project to v2 API
# PROJECT_ENDPOINT_URL=https://myproject-eastus.cognitiveservices.azure.com
# V2_API_BASE=https://myproject-v2-eastus.cognitiveservices.azure.com
# V2_API_KEY=your-v2-key

# Scenario 2: Migrate from OpenAI to Cosmos DB
# ASSISTANT_API_BASE=https://api.openai.com/v1
# ASSISTANT_API_KEY=sk-your-key
# COSMOS_DB_CONNECTION_STRING=AccountEndpoint=https://...;AccountKey=...;

# Scenario 3: Migrate from Cosmos DB to Azure AI Project v2
# COSMOS_DB_CONNECTION_STRING=AccountEndpoint=https://...;AccountKey=...;
# PROJECT_ENDPOINT_URL=https://myproject.cognitiveservices.azure.com
COSMOS_DB_DATABASE_NAME=testDB2
COSMOS_DB_CONTAINER_NAME=testContainer1

# -----------------------------------------------------------------------------
# Production Foundry target settings
# -----------------------------------------------------------------------------

# Optional override when the production project endpoint does not match the
# default hostname construction from --production-resource.
# Example: https://your-resource.services.ai.azure.com/api/projects/your-project
# PRODUCTION_PROJECT_ENDPOINT=https://your-resource.services.ai.azure.com/api/projects/your-project

# Optional override for the project name used when building a production
# endpoint from --production-resource.
# PRODUCTION_PROJECT_NAME=your-project

# -----------------------------------------------------------------------------
# Authentication
# -----------------------------------------------------------------------------

# The docker auth wrappers normally populate these automatically.
# Set them only if you are running the script directly and already have tokens.
# AZ_TOKEN=<source-tenant-access-token>
# PRODUCTION_TOKEN=<production-tenant-access-token>

# Optional service-principal auth for DefaultAzureCredential fallback
# AZURE_TENANT_ID=<tenant-id>
# AZURE_CLIENT_ID=<client-id>
# AZURE_CLIENT_SECRET=<client-secret>

# Optional: request a prerelease azure-ai-projects build inside the container
# for project connection string scenarios.
# NEED_BETA_VERSION=true

# -----------------------------------------------------------------------------
# Example scenarios
# -----------------------------------------------------------------------------

# Source from legacy API, target an existing production Foundry project
# AGENTS_HOST=eastus.api.azureml.ms
# AGENTS_SUBSCRIPTION=<legacy-subscription-id>
# AGENTS_RESOURCE_GROUP=<legacy-resource-group>
# AGENTS_WORKSPACE=<legacy-workspace-name>
# PRODUCTION_PROJECT_ENDPOINT=https://nextgen-eastus.services.ai.azure.com/api/projects/nextgen-eastus

# Source from Cosmos DB, target a production Foundry project name override
# COSMOS_CONNECTION_STRING=AccountEndpoint=https://...;AccountKey=...;
# PRODUCTION_PROJECT_NAME=my-foundry-project

# Project connection string source with prerelease SDK request in Docker
# NEED_BETA_VERSION=true
30 changes: 30 additions & 0 deletions migration/CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
# Changelog

All notable changes to the migration solution are documented in this file.

## Released March 20th, 2026

### Changed

- Reworked migration output to target the current Foundry Agent Service model instead of legacy assistant-style write APIs.
- Standardized agent creation on `AIProjectClient.agents.create_version(...)` with explicit `PromptAgentDefinition` payloads.
- Moved runtime validation to `project.get_openai_client()` using conversations plus responses for post-migration smoke testing.
- Updated source ingestion so migrations can still read from the legacy Assistants API, project endpoints, project connection strings, or Cosmos DB while always writing to the Foundry Agent Service target.
- Refreshed Docker packaging and dependency guidance around `azure-ai-projects>=2.0.0`, with optional prerelease support for connection-string helper scenarios.
- Aligned `.env.example` with the rewritten production-first environment model and current variable names.
- Removed stale Docker wrapper passthrough for unused legacy variables such as `ASSISTANT_API_BASE`, `PROJECT_ENDPOINT_URL`, and `V2_API_BASE`.

### Added

- Offline pytest coverage for connection string parsing, tool translation, prompt definition building, orchestration flow, and conversations/responses validation behavior.
- Wrapper regression coverage for PowerShell `--help`, Bash syntax validation, and Bash `--help` execution in the migration test suite.
- Explicit unsupported-tool warnings and recommendations for `connected_agent`, `event_binding`, and `output_binding`.
- Optional post-migration test tool injection for function, MCP, computer use, image generation, and Azure Function scenarios.

### Fixed

- Modernized the repository pytest collection hook to avoid the pytest 9 deprecation warning.
- Updated Docker context ignores to exclude local virtual environments, test caches, and coverage artifacts.
- Added a true help-only path to the PowerShell Docker wrapper so usage validation does not require Docker or production arguments.
- Reworked the Bash Docker wrapper argument parsing and normalized shell compatibility so `bash -n` and help-mode validation succeed on Windows-hosted workspaces.
- Confirmed local migration validation with compile checks, wrapper smoke checks, and an expanded pytest suite passing with 11 tests.
13 changes: 6 additions & 7 deletions migration/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,8 @@ COPY requirements.txt .
# Install Python dependencies from requirements.txt
RUN pip install -r requirements.txt

# Copy the migration script and supporting files
# Copy the migration script
COPY v1_to_v2_migration.py .
COPY read_cosmos_data.py .

# Create a non-root user for security
RUN useradd -m -u 1000 migration
Expand All @@ -37,13 +36,13 @@ RUN apt-get update && apt-get install -y gosu && rm -rf /var/lib/apt/lists/*
RUN echo '#!/bin/bash\n\
set -e\n\
if [ "$NEED_BETA_VERSION" = "true" ]; then\n\
echo "🔧 Installing azure-ai-projects beta version for connection string support..."\n\
pip install --quiet --upgrade azure-ai-projects==1.0.0b10\n\
echo "✅ Beta version 1.0.0b10 installed"\n\
echo "Installing prerelease azure-ai-projects build for connection string support..."\n\
pip install --quiet --upgrade --pre azure-ai-projects\n\
echo "Prerelease azure-ai-projects installed"\n\
else\n\
echo "Using standard azure-ai-projects version 1.0.0"\n\
echo "Using stable azure-ai-projects 2.x from requirements.txt"\n\
fi\n\
echo "🔐 Switching to migration user..."\n\
echo "Switching to migration user..."\n\
chown -R migration:migration /app\n\
exec gosu migration python v1_to_v2_migration.py "$@"\n\
' > /app/entrypoint.sh && chmod +x /app/entrypoint.sh
Expand Down
Loading
Loading