Skip to content

Commit 3a9cdf7

Browse files
samfreundmcm001
andauthored
Verify native library integrity during extraction/load (#2368)
## Description We've hit a problem where the `CombinedRuntimeLoader` extracts native files, but gets interrupted in the middle. This is bad, cause all `CombinedRuntimeLoader` used to check a file was its existence. This change uses the hash of the file to verify that it's correct. This will be checked at runtime, everytime, if the file is extant. If this check fails, we will delete the extant file and attempt to reextract. We also check a newly extracted file, if that hash does not match we error. Note that this is reliant on PhotonVision/wpilib-tool-plugin#8 and should follow #2367 ## Meta Merge checklist: - [x] Pull Request title is [short, imperative summary](https://cbea.ms/git-commit/) of proposed changes - [x] The description documents the _what_ and _why_, including events that led to this PR - [ ] If this PR changes behavior or adds a feature, user documentation is updated - [ ] If this PR touches photon-serde, all messages have been regenerated and hashes have not changed unexpectedly - [ ] If this PR touches configuration, this is backwards compatible with all settings going back to the previous seasons's last release (seasons end after champs ends) - [ ] If this PR touches pipeline settings or anything related to data exchange, the frontend typing is updated - [ ] If this PR addresses a bug, a regression test for it is added --------- Co-authored-by: Matt M <matthew.morley.ca@gmail.com>
1 parent 95f637f commit 3a9cdf7

File tree

4 files changed

+117
-27
lines changed

4 files changed

+117
-27
lines changed

.github/workflows/build.yml

Lines changed: 25 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -443,9 +443,32 @@ jobs:
443443
- uses: actions/download-artifact@v7
444444
with:
445445
name: ${{ matrix.artifact-name }}
446-
- run: java -jar ${{ matrix.extraOpts }} *.jar --smoketest
446+
# The jar is run twice to exercise different code paths.
447+
- run: |
448+
echo "=== First run ==="
449+
java -jar ${{ matrix.extraOpts }} *.jar --smoketest
450+
echo "=== Checking for files to corrupt ==="
451+
find ~ -type f \( -name "*.so" -o -name "*.dylib" \) | head -20
452+
if [ -d ~/.wpilib ]; then
453+
echo "~/.wpilib directory exists"
454+
echo "Contents of ~/.wpilib:"
455+
find ~/.wpilib -type f \( -name "*.so" -o -name "*.dylib" \) | head -10
456+
RANDOM_FILE=$(find ~/.wpilib -type f \( -name "*.so" -o -name "*.dylib" \) | sort -R | head -n 1)
457+
if [ ! -z "$RANDOM_FILE" ]; then
458+
echo "Corrupting file: $RANDOM_FILE"
459+
echo "corrupted data" > "$RANDOM_FILE"
460+
else
461+
echo "No .so or .dylib files found in ~/.wpilib"
462+
fi
463+
else
464+
echo "~/.wpilib directory does not exist"
465+
fi
466+
echo "=== Second run ==="
467+
java -jar ${{ matrix.extraOpts }} *.jar --smoketest
447468
if: ${{ (matrix.os) != 'windows-latest' }}
448-
- run: ls *.jar | %{ Write-Host "Running $($_.Name)"; Start-Process "java" -ArgumentList "-jar `"$($_.FullName)`" --smoketest" -NoNewWindow -Wait; break }
469+
- run: |
470+
ls *.jar | %{ Write-Host "Running $($_.Name)"; Start-Process "java" -ArgumentList "-jar `"$($_.FullName)`" --smoketest" -NoNewWindow -Wait; break }
471+
ls *.jar | %{ Write-Host "Running $($_.Name)"; Start-Process "java" -ArgumentList "-jar `"$($_.FullName)`" --smoketest" -NoNewWindow -Wait; break }
449472
if: ${{ (matrix.os) == 'windows-latest' }}
450473
451474
build-image:

build.gradle

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ plugins {
55
id "com.diffplug.spotless" version "8.1.0"
66
id "edu.wpi.first.wpilib.repositories.WPILibRepositoriesPlugin" version "2020.2"
77
id "edu.wpi.first.GradleRIO" version "2026.2.1"
8-
id 'org.photonvision.tools.WpilibTools' version '2.3.3-photon'
8+
id 'org.photonvision.tools.WpilibTools' version '2.4.1-photon'
99
id 'com.google.protobuf' version '0.9.3' apply false
1010
id 'edu.wpi.first.GradleJni' version '1.1.0'
1111
id "org.ysb33r.doxygen" version "2.0.0" apply false

photon-targeting/src/main/java/org/photonvision/common/hardware/Platform.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,13 +17,13 @@
1717

1818
package org.photonvision.common.hardware;
1919

20-
import edu.wpi.first.util.CombinedRuntimeLoader;
2120
import java.io.BufferedReader;
2221
import java.io.File;
2322
import java.io.IOException;
2423
import java.nio.file.Files;
2524
import java.nio.file.Paths;
2625
import java.util.function.Supplier;
26+
import org.photonvision.jni.CombinedRuntimeLoader;
2727

2828
@SuppressWarnings({"unused", "doclint"})
2929
public enum Platform {

photon-targeting/src/main/java/org/photonvision/jni/CombinedRuntimeLoader.java

Lines changed: 90 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -18,24 +18,34 @@
1818

1919
package org.photonvision.jni;
2020

21-
import com.fasterxml.jackson.core.type.TypeReference;
2221
import com.fasterxml.jackson.databind.ObjectMapper;
22+
import java.io.BufferedInputStream;
2323
import java.io.File;
24+
import java.io.FileInputStream;
2425
import java.io.IOException;
2526
import java.nio.file.Files;
2627
import java.nio.file.Paths;
28+
import java.nio.file.StandardCopyOption;
29+
import java.security.DigestInputStream;
30+
import java.security.MessageDigest;
31+
import java.security.NoSuchAlgorithmException;
2732
import java.util.ArrayList;
28-
import java.util.HashMap;
33+
import java.util.HexFormat;
2934
import java.util.List;
3035
import java.util.Map;
3136
import java.util.Objects;
37+
import java.util.concurrent.CopyOnWriteArrayList;
3238

3339
/** Loads dynamic libraries for all platforms. */
3440
public final class CombinedRuntimeLoader {
3541
private CombinedRuntimeLoader() {}
3642

3743
private static String extractionDirectory;
3844

45+
private static final Object extractCompleteLock = new Object();
46+
private static boolean extractAndVerifyComplete = false;
47+
private static List<String> filesAlreadyExtracted = new CopyOnWriteArrayList<>();
48+
3949
/**
4050
* Returns library extraction directory.
4151
*
@@ -124,6 +134,27 @@ private static String getLoadErrorMessage(String libraryName, UnsatisfiedLinkErr
124134
return msg.toString();
125135
}
126136

137+
/**
138+
* Architecture-specific information containing file hashes for a specific CPU architecture (e.g.,
139+
* x86-64, arm64).
140+
*/
141+
public record ArchInfo(Map<String, String> fileHashes) {}
142+
143+
/**
144+
* Platform-specific information containing architectures for a specific OS platform (e.g., linux,
145+
* windows).
146+
*/
147+
public record PlatformInfo(Map<String, ArchInfo> architectures) {}
148+
149+
/** Overall resource information to be serialized */
150+
public record ResourceInformation(
151+
// Combined MD5 hash of all native resource files
152+
String hash,
153+
// Platform-specific native libraries organized by platform then architecture
154+
Map<String, PlatformInfo> platforms,
155+
// List of supported versions for these native resources
156+
List<String> versions) {}
157+
127158
/**
128159
* Extract a list of native libraries.
129160
*
@@ -133,48 +164,65 @@ private static String getLoadErrorMessage(String libraryName, UnsatisfiedLinkErr
133164
* @return List of all libraries that were extracted
134165
* @throws IOException Thrown if resource not found or file could not be extracted
135166
*/
136-
@SuppressWarnings("unchecked")
137167
public static <T> List<String> extractLibraries(Class<T> clazz, String resourceName)
138168
throws IOException {
139-
TypeReference<HashMap<String, Object>> typeRef = new TypeReference<>() {};
140169
ObjectMapper mapper = new ObjectMapper();
141-
Map<String, Object> map;
170+
ResourceInformation resourceInfo;
142171
try (var stream = clazz.getResourceAsStream(resourceName)) {
143-
map = mapper.readValue(stream, typeRef);
172+
resourceInfo = mapper.readValue(stream, ResourceInformation.class);
144173
}
145174

146175
var platformPath = Paths.get(getPlatformPath());
147176
var platform = platformPath.getName(0).toString();
148177
var arch = platformPath.getName(1).toString();
149178

150-
var platformMap = (Map<String, List<String>>) map.get(platform);
179+
var platformInfo = resourceInfo.platforms().get(platform);
180+
if (platformInfo == null) {
181+
throw new IOException("Platform " + platform + " not found in resource information");
182+
}
183+
184+
var archInfo = platformInfo.architectures().get(arch);
185+
if (archInfo == null) {
186+
throw new IOException("Architecture " + arch + " not found for platform " + platform);
187+
}
151188

152-
var fileList = platformMap.get(arch);
189+
// Map of <file to extract> to <hash we loaded from the JSON>
190+
Map<String, String> filenameToHash = archInfo.fileHashes();
153191

154192
var extractionPathString = getExtractionDirectory();
155193

156194
if (extractionPathString == null) {
157-
String hash = (String) map.get("hash");
195+
// Folder to extract to derived from overall hash
196+
String combinedHash = resourceInfo.hash();
158197

159198
var defaultExtractionRoot = getDefaultExtractionRoot();
160-
var extractionPath = Paths.get(defaultExtractionRoot, platform, arch, hash);
199+
var extractionPath = Paths.get(defaultExtractionRoot, platform, arch, combinedHash);
161200
extractionPathString = extractionPath.toString();
162201

163202
setExtractionDirectory(extractionPathString);
164203
}
165204

166205
List<String> extractedFiles = new ArrayList<>();
167206

168-
byte[] buffer = new byte[0x10000]; // 64K copy buffer
169-
170-
for (var file : fileList) {
207+
for (String file : filenameToHash.keySet()) {
171208
try (var stream = clazz.getResourceAsStream(file)) {
172209
Objects.requireNonNull(stream);
173210

174211
var outputFile = Paths.get(extractionPathString, new File(file).getName());
212+
213+
String fileHash = filenameToHash.get(file);
214+
175215
extractedFiles.add(outputFile.toString());
176216
if (outputFile.toFile().exists()) {
177-
continue;
217+
if (hashEm(outputFile.toFile()).equals(fileHash)) {
218+
continue;
219+
} else {
220+
// Hashes don't match, delete and re-extract
221+
System.err.println(
222+
outputFile.toAbsolutePath().toString()
223+
+ " failed validation - deleting and re-extracting");
224+
outputFile.toFile().delete();
225+
}
178226
}
179227
var parent = outputFile.getParent();
180228
if (parent == null) {
@@ -183,17 +231,32 @@ public static <T> List<String> extractLibraries(Class<T> clazz, String resourceN
183231
parent.toFile().mkdirs();
184232

185233
try (var os = Files.newOutputStream(outputFile)) {
186-
int readBytes;
187-
while ((readBytes = stream.read(buffer)) != -1) { // NOPMD
188-
os.write(buffer, 0, readBytes);
189-
}
234+
Files.copy(stream, outputFile, StandardCopyOption.REPLACE_EXISTING);
235+
}
236+
237+
if (!hashEm(outputFile.toFile()).equals(fileHash)) {
238+
throw new IOException("Hash of extracted file does not match expected hash");
190239
}
191240
}
192241
}
193242

194243
return extractedFiles;
195244
}
196245

246+
private static String hashEm(File f) throws IOException {
247+
try {
248+
MessageDigest fileHash = MessageDigest.getInstance("MD5");
249+
try (var dis =
250+
new DigestInputStream(new BufferedInputStream(new FileInputStream(f)), fileHash)) {
251+
dis.readAllBytes();
252+
}
253+
var ret = HexFormat.of().formatHex(fileHash.digest());
254+
return ret;
255+
} catch (NoSuchAlgorithmException e) {
256+
throw new IOException("Unable to verify extracted native files", e);
257+
}
258+
}
259+
197260
/**
198261
* Load a single library from a list of extracted files.
199262
*
@@ -229,12 +292,16 @@ public static void loadLibrary(String libraryName, List<String> extractedFiles)
229292
*/
230293
public static <T> void loadLibraries(Class<T> clazz, String... librariesToLoad)
231294
throws IOException {
232-
// Extract everything
233-
234-
var extractedFiles = extractLibraries(clazz, "/ResourceInformation.json");
295+
synchronized (extractCompleteLock) {
296+
if (extractAndVerifyComplete == false) {
297+
// Extract everything
298+
filesAlreadyExtracted = extractLibraries(clazz, "/ResourceInformation.json");
299+
extractAndVerifyComplete = true;
300+
}
235301

236-
for (var library : librariesToLoad) {
237-
loadLibrary(library, extractedFiles);
302+
for (var library : librariesToLoad) {
303+
loadLibrary(library, filesAlreadyExtracted);
304+
}
238305
}
239306
}
240307
}

0 commit comments

Comments
 (0)