Skip to content
This repository was archived by the owner on Nov 27, 2024. It is now read-only.

Commit b1d2358

Browse files
author
James Tayler
committed
ensure tests run sequentially
1 parent 3ac21b0 commit b1d2358

File tree

6 files changed

+21
-10
lines changed

6 files changed

+21
-10
lines changed
Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
namespace OnnxStack.IntegrationTests;
2+
3+
/// <summary>
4+
/// All integration tests need to go in a single collection, so tests in different classes run sequentially and not in parallel.
5+
/// </summary>
6+
[CollectionDefinition("IntegrationTests")]
7+
public class IntegrationTestCollection { }

OnnxStack.IntegrationTests/StableDiffusionTests.cs

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
using System.Security.Cryptography;
21
using FluentAssertions;
32
using FluentAssertions.Execution;
43
using Microsoft.Extensions.DependencyInjection;
@@ -13,13 +12,10 @@
1312
namespace OnnxStack.IntegrationTests;
1413

1514
/// <summary>
16-
/// These tests just run on CPU execution provider for now, but could switch it to CUDA and run on GPU
17-
/// if the necessary work is done to setup the docker container to allow GPU passthrough to the container.
18-
/// See https://blog.roboflow.com/use-the-gpu-in-docker/ for an example of how to do this.
19-
///
20-
/// Can then also setup a self-hosted runner in Github Actions to run the tests on your own GPU as part of the CI/CD pipeline.
15+
/// These tests could be run via a self-hosted runner in Github Actions to run the tests on your own GPU as part of the CI/CD pipeline.
2116
/// Maybe something like https://www.youtube.com/watch?v=rVq-SCNyxVc
2217
/// </summary>
18+
[Collection("IntegrationTests")]
2319
public class StableDiffusionTests
2420
{
2521
private readonly IStableDiffusionService _stableDiffusion;

OnnxStack.IntegrationTests/Usings.cs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,4 @@
1-
global using Xunit;
1+
global using Xunit;
2+
3+
// need all tests to run one at a time sequentially to not overwhelm the GPU
4+
[assembly: CollectionBehavior(DisableTestParallelization = true)]

OnnxStackCore.sln

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "SolutionItems", "SolutionIt
1313
.gitignore = .gitignore
1414
docker-compose.yml = docker-compose.yml
1515
README.md = README.md
16-
run-docker-tests.sh = run-docker-tests.sh
16+
run-integration-tests-cuda.sh = run-integration-tests-cuda.sh
1717
EndProjectSection
1818
EndProject
1919
Global

run-docker-tests.sh

Lines changed: 0 additions & 2 deletions
This file was deleted.

run-integration-tests-cuda.sh

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
#! /bin/bash
2+
# running this requires:
3+
# - nvidia GPU with sufficient VRAM
4+
# - nvidia drivers installed on the host system
5+
# - nvidia-container-toolkit installed on the host system (see: https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/latest/install-guide.html)
6+
# - nvidia-smi also reports peak VRAM close 24GB while running the tests
7+
docker-compose up --build

0 commit comments

Comments
 (0)