Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 57 additions & 0 deletions codegen/NativeLocalAISpec.g.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
/*
* This file is auto-generated from a NativeModule spec file in js.
*
* This is a C++ Spec class that should be used with MakeTurboModuleProvider to register native modules
* in a way that also verifies at compile time that the native module matches the interface required
* by the TurboModule JS spec.
*/
#pragma once
// clang-format off

#include <NativeModules.h>
#include <tuple>

namespace ArtificialChatModules {

struct LocalAISpec_LocalAICapabilities {
bool isSupported;
bool hasNPU;
bool hasGPU;
std::optional<std::string> modelName;
};


inline winrt::Microsoft::ReactNative::FieldMap GetStructInfo(LocalAISpec_LocalAICapabilities*) noexcept {
winrt::Microsoft::ReactNative::FieldMap fieldMap {
{L"isSupported", &LocalAISpec_LocalAICapabilities::isSupported},
{L"hasNPU", &LocalAISpec_LocalAICapabilities::hasNPU},
{L"hasGPU", &LocalAISpec_LocalAICapabilities::hasGPU},
{L"modelName", &LocalAISpec_LocalAICapabilities::modelName},
};
return fieldMap;
}

struct LocalAISpec : winrt::Microsoft::ReactNative::TurboModuleSpec {
static constexpr auto methods = std::tuple{
SyncMethod<LocalAISpec_LocalAICapabilities() noexcept>{0, L"checkCapabilities"},
Method<void(std::string, std::optional<std::string>, Promise<std::string>) noexcept>{1, L"generateText"},
};

template <class TModule>
static constexpr void ValidateModule() noexcept {
constexpr auto methodCheckResults = CheckMethods<TModule, LocalAISpec>();

REACT_SHOW_METHOD_SPEC_ERRORS(
0,
"checkCapabilities",
" REACT_SYNC_METHOD(checkCapabilities) LocalAISpec_LocalAICapabilities checkCapabilities() noexcept { /* implementation */ }\n"
" REACT_SYNC_METHOD(checkCapabilities) static LocalAISpec_LocalAICapabilities checkCapabilities() noexcept { /* implementation */ }\n");
REACT_SHOW_METHOD_SPEC_ERRORS(
1,
"generateText",
" REACT_METHOD(generateText) void generateText(std::string prompt, std::optional<std::string> systemInstructions, ::React::ReactPromise<std::string> &&result) noexcept { /* implementation */ }\n"
" REACT_METHOD(generateText) static void generateText(std::string prompt, std::optional<std::string> systemInstructions, ::React::ReactPromise<std::string> &&result) noexcept { /* implementation */ }\n");
}
};

} // namespace ArtificialChatModules
100 changes: 69 additions & 31 deletions src/AiQuery.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,10 @@ import {
OpenAiApi,
CallOpenAi,
} from './OpenAI';
import {
CallLocalAI,
IsLocalAIAvailable,
} from './LocalAI';
import { AiSection } from './AiResponse';
import {
ChatSource,
Expand Down Expand Up @@ -130,36 +134,66 @@ Respond with the image prompt string in the required format. Do not respond conv
React.useEffect(() => {
if (isRequestForImage === false) {
setIsLoading(true);
CallOpenAi({
api: OpenAiApi.ChatCompletion,
apiKey: settingsContext.apiKey,
instructions: settingsContext.systemInstructions,
identifier: 'TEXT-ANSWER:',
prompt: prompt,
options: {
endpoint: settingsContext.aiEndpoint,
chatModel: settingsContext.chatModel,
promptHistory: chatHistory.entries.
filter((entry) => { return entry.responses !== undefined && entry.id < id; }).
map((entry) => { return {role: entry.type == ChatSource.Human ? 'user' : 'assistant', 'content': entry.responses ? entry.responses[0] : ''}; }),
},
onError: error => {
onResponse({
prompt: prompt,
responses: [error] ?? [''],
contentType: ChatContent.Error});
},
onResult: result => {
onResponse({
prompt: prompt,
responses: result ?? [''],
contentType: ChatContent.Text});
},
onComplete: () => {
setIsLoading(false);
chatScroll.scrollToEnd();
},
});

// Check if user prefers local AI and it's available
const shouldUseLocalAI = settingsContext.useLocalAI && IsLocalAIAvailable();

if (shouldUseLocalAI) {
// Use local AI for text generation
CallLocalAI({
instructions: settingsContext.systemInstructions,
identifier: 'LOCAL-TEXT-ANSWER:',
prompt: prompt,
onError: error => {
onResponse({
prompt: prompt,
responses: [error] ?? [''],
contentType: ChatContent.Error});
},
onResult: result => {
onResponse({
prompt: prompt,
responses: result ?? [''],
contentType: ChatContent.Text});
},
onComplete: () => {
setIsLoading(false);
chatScroll.scrollToEnd();
},
});
} else {
// Use OpenAI for text generation
CallOpenAi({
api: OpenAiApi.ChatCompletion,
apiKey: settingsContext.apiKey,
instructions: settingsContext.systemInstructions,
identifier: 'TEXT-ANSWER:',
prompt: prompt,
options: {
endpoint: settingsContext.aiEndpoint,
chatModel: settingsContext.chatModel,
promptHistory: chatHistory.entries.
filter((entry) => { return entry.responses !== undefined && entry.id < id; }).
map((entry) => { return {role: entry.type == ChatSource.Human ? 'user' : 'assistant', 'content': entry.responses ? entry.responses[0] : ''}; }),
},
onError: error => {
onResponse({
prompt: prompt,
responses: [error] ?? [''],
contentType: ChatContent.Error});
},
onResult: result => {
onResponse({
prompt: prompt,
responses: result ?? [''],
contentType: ChatContent.Text});
},
onComplete: () => {
setIsLoading(false);
chatScroll.scrollToEnd();
},
});
}
} else {
if (isRequestForImage == true && imagePrompt !== undefined) {
setIsLoading(true);
Expand Down Expand Up @@ -206,7 +240,11 @@ Respond with the image prompt string in the required format. Do not respond conv
<Text style={styles.text}>Generating image...</Text>
)
) : (
<Text style={styles.text}>Generating text...</Text>
<Text style={styles.text}>
{settingsContext.useLocalAI && IsLocalAIAvailable()
? 'Generating text using local AI...'
: 'Generating text...'}
</Text>
)
) : (
<Text style={styles.text}>Done loading</Text>
Expand Down
3 changes: 3 additions & 0 deletions src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ function App(): JSX.Element {
const [showAboutPopup, setShowAboutPopup] = React.useState(false);
const [readToMeVoice, setReadToMeVoice] = React.useState<string>('');
const [systemInstructions, setSystemInstructions] = React.useState<string>('The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly. You may use markdown syntax in the response as appropriate.');
const [useLocalAI, setUseLocalAI] = React.useState<boolean>(false);

const isDarkMode = currentTheme === 'dark';
const isHighContrast = false;
Expand All @@ -49,6 +50,8 @@ function App(): JSX.Element {
setReadToMeVoice: setReadToMeVoice,
systemInstructions: systemInstructions,
setSystemInstructions: setSystemInstructions,
useLocalAI: useLocalAI,
setUseLocalAI: setUseLocalAI,
};

const popups = {
Expand Down
81 changes: 81 additions & 0 deletions src/LocalAI.tsx
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
import NativeLocalAI from './NativeLocalAI';

type CallLocalAIType = {
instructions?: string;
identifier?: string;
prompt: string;
onError: (error: string) => void;
onResult: (results: string[]) => void;
onComplete: () => void;
};

const CallLocalAI = async ({
instructions,
identifier,
prompt,
onError,
onResult,
onComplete,
}: CallLocalAIType) => {
try {
if (!NativeLocalAI) {
onError('Local AI is not available on this platform');
onComplete();
return;
}

// Check if local AI is supported
const capabilities = NativeLocalAI.checkCapabilities();
if (!capabilities.isSupported) {
onError('Local AI is not supported on this device. Compatible NPU/GPU hardware required.');
onComplete();
return;
}

console.debug(`Start LocalAI ${identifier}"${prompt}"`);

const actualInstructions =
instructions ??
'The following is a conversation with an AI assistant. The assistant is helpful, creative, clever, and very friendly. You may use markdown syntax in the response as appropriate.';

const result = await NativeLocalAI.generateText(prompt, actualInstructions);

console.log(`LocalAI response: "${result}"`);
onResult([result]);
} catch (error) {
console.error('LocalAI error:', error);
onError(error instanceof Error ? error.message : 'Error generating local AI response');
} finally {
console.debug(`End LocalAI ${identifier}"${prompt}"`);
onComplete();
}
};

// Function to check if local AI is available
const IsLocalAIAvailable = (): boolean => {
if (!NativeLocalAI) {
return false;
}

try {
const capabilities = NativeLocalAI.checkCapabilities();
return capabilities.isSupported;
} catch {
return false;
}
};

// Function to get local AI capabilities info
const GetLocalAICapabilities = () => {
if (!NativeLocalAI) {
return { isSupported: false, hasNPU: false, hasGPU: false };
}

try {
return NativeLocalAI.checkCapabilities();
} catch {
return { isSupported: false, hasNPU: false, hasGPU: false };
}
};

export { CallLocalAI, IsLocalAIAvailable, GetLocalAICapabilities };
18 changes: 18 additions & 0 deletions src/NativeLocalAI.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import type { TurboModule } from 'react-native';
import { TurboModuleRegistry } from 'react-native';

export interface LocalAICapabilities {
isSupported: boolean;
hasNPU: boolean;
hasGPU: boolean;
modelName?: string;
}

export interface Spec extends TurboModule {
checkCapabilities(): LocalAICapabilities;
generateText(prompt: string, systemInstructions?: string): Promise<string>;
}

export default TurboModuleRegistry.get<Spec>(
'LocalAI'
) as Spec | null;
Loading