Skip to content

Commit 054d921

Browse files
authored
Export InferenceProviderMappingEntry from types.ts (#1540)
Currently `InferenceProviderMappingEntry` is defined in a module that is not exported. This PR moves it to the `types.ts` module for better reusability (will be useful for huggingface/doc-builder#574 (comment))
1 parent 7e811fa commit 054d921

File tree

5 files changed

+16
-20
lines changed

5 files changed

+16
-20
lines changed

packages/inference/src/lib/getInferenceProviderMapping.ts

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -2,23 +2,12 @@ import type { WidgetType } from "@huggingface/tasks";
22
import { HF_HUB_URL } from "../config.js";
33
import { HARDCODED_MODEL_INFERENCE_MAPPING } from "../providers/consts.js";
44
import { EQUIVALENT_SENTENCE_TRANSFORMERS_TASKS } from "../providers/hf-inference.js";
5-
import type { InferenceProvider, InferenceProviderOrPolicy, ModelId } from "../types.js";
5+
import type { InferenceProvider, InferenceProviderMappingEntry, InferenceProviderOrPolicy, ModelId } from "../types.js";
66
import { typedInclude } from "../utils/typedInclude.js";
77
import { InferenceClientHubApiError, InferenceClientInputError } from "../errors.js";
88

99
export const inferenceProviderMappingCache = new Map<ModelId, InferenceProviderMappingEntry[]>();
1010

11-
export interface InferenceProviderMappingEntry {
12-
adapter?: string;
13-
adapterWeightsPath?: string;
14-
hfModelId: ModelId;
15-
provider: string;
16-
providerId: string;
17-
status: "live" | "staging";
18-
task: WidgetType;
19-
type?: "single-model" | "tag-filter";
20-
}
21-
2211
/**
2312
* Normalize inferenceProviderMapping to always return an array format.
2413
* This provides backward and forward compatibility for the API changes.

packages/inference/src/lib/makeRequestOptions.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
import { HF_HEADER_X_BILL_TO, HF_HUB_URL } from "../config.js";
22
import { PACKAGE_NAME, PACKAGE_VERSION } from "../package.js";
3-
import type { InferenceTask, Options, RequestArgs } from "../types.js";
4-
import type { InferenceProviderMappingEntry } from "./getInferenceProviderMapping.js";
3+
import type { InferenceTask, InferenceProviderMappingEntry, Options, RequestArgs } from "../types.js";
54
import { getInferenceProviderMapping } from "./getInferenceProviderMapping.js";
65
import type { getProviderHelper } from "./getProviderHelper.js";
76
import { isUrl } from "./isUrl.js";

packages/inference/src/providers/consts.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
import type { InferenceProviderMappingEntry } from "../lib/getInferenceProviderMapping.js";
2-
import type { InferenceProvider } from "../types.js";
1+
import type { InferenceProvider, InferenceProviderMappingEntry } from "../types.js";
32
import { type ModelId } from "../types.js";
43

54
/**

packages/inference/src/snippets/getInferenceSnippets.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,9 @@ import {
88
} from "@huggingface/tasks";
99
import type { PipelineType, WidgetType } from "@huggingface/tasks";
1010
import type { ChatCompletionInputMessage, GenerationParameters } from "@huggingface/tasks";
11-
import type { InferenceProviderMappingEntry } from "../lib/getInferenceProviderMapping.js";
1211
import { getProviderHelper } from "../lib/getProviderHelper.js";
1312
import { makeRequestOptionsFromResolvedModel } from "../lib/makeRequestOptions.js";
14-
import type { InferenceProviderOrPolicy, InferenceTask, RequestArgs } from "../types.js";
13+
import type { InferenceProviderMappingEntry, InferenceProviderOrPolicy, InferenceTask, RequestArgs } from "../types.js";
1514
import { templates } from "./templates.exported.js";
1615

1716
export type InferenceSnippetOptions = {

packages/inference/src/types.ts

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
1-
import type { ChatCompletionInput, PipelineType } from "@huggingface/tasks";
2-
import type { InferenceProviderMappingEntry } from "./lib/getInferenceProviderMapping.js";
1+
import type { ChatCompletionInput, PipelineType, WidgetType } from "@huggingface/tasks";
32

43
/**
54
* HF model id, like "meta-llama/Llama-3.3-70B-Instruct"
@@ -63,6 +62,17 @@ export type InferenceProvider = (typeof INFERENCE_PROVIDERS)[number];
6362

6463
export type InferenceProviderOrPolicy = (typeof PROVIDERS_OR_POLICIES)[number];
6564

65+
export interface InferenceProviderMappingEntry {
66+
adapter?: string;
67+
adapterWeightsPath?: string;
68+
hfModelId: ModelId;
69+
provider: string;
70+
providerId: string;
71+
status: "live" | "staging";
72+
task: WidgetType;
73+
type?: "single-model" | "tag-filter";
74+
}
75+
6676
export interface BaseArgs {
6777
/**
6878
* The access token to use. Without it, you'll get rate-limited quickly.

0 commit comments

Comments
 (0)