22# ==============================================================================
33# LuminOS Build Script - Phase 5: Local AI Integration
44# Author: Gabriel, Project Leader @ LuminOS
5- # Version: 0.2 .0 (Model download on host)
5+ # Version: 0.3 .0 (Copy model from host)
66# ==============================================================================
77set -e
88LUMINOS_CHROOT_DIR=" chroot"
9- OLLAMA_VERSION=" 0.1.32"
10- BASE_MODEL=" llama3"
11- HOST_OLLAMA_PATH=" /usr/local/bin/ollama_host_temp" # Temporary path for host ollama
12- HOST_MODEL_DIR=" /root/.ollama" # Ollama download dir when run as root
9+ OLLAMA_VERSION=" 0.1.32" # Version of the binary to install in chroot
10+ BASE_MODEL=" llama3" # Name of the model we expect to find on host
11+ # Standard location where Ollama stores models when run by user
12+ # Check ~/.ollama/models if this one doesn't exist after user pull
13+ HOST_MODEL_DIR=" /usr/share/ollama/.ollama/models"
1314
1415# --- Pre-flight Checks ---
1516if [ " $( id -u) " -ne 0 ]; then echo " ERROR: Must run as root." ; exit 1; fi
1617if [ ! -d " $LUMINOS_CHROOT_DIR " ]; then echo " ERROR: Chroot dir not found." ; exit 1; fi
18+ # Check if the model files actually exist on the host
19+ if [ ! -d " ${HOST_MODEL_DIR} " ]; then
20+ echo " ERROR: Ollama model directory not found on host at ${HOST_MODEL_DIR} ."
21+ echo " Please ensure you have run 'ollama pull ${BASE_MODEL} ' successfully on the host."
22+ # Attempt alternative common path
23+ if [ -d " $HOME /.ollama/models" ]; then
24+ echo " INFO: Found models at $HOME /.ollama/models instead. Please update HOST_MODEL_DIR in script 05."
25+ fi
26+ exit 1
27+ fi
28+ echo " --> Found Ollama models on host system."
1729
1830echo " ====================================================="
1931echo " PHASE 5: Installing and Configuring Lumin"
2032echo " ====================================================="
2133
22- # --- Download and Use Ollama on HOST ---
23- echo " --> Downloading Ollama v${OLLAMA_VERSION} for host..."
24- curl -fL " https://github.com/ollama/ollama/releases/download/v${OLLAMA_VERSION} /ollama-linux-amd64" -o ollama_host_temp
25- chmod +x ollama_host_temp
26- echo " --> Installing Ollama temporarily on host system at ${HOST_OLLAMA_PATH} ..."
27- mv ollama_host_temp " ${HOST_OLLAMA_PATH} "
28-
29- echo " --> IMPORTANT: Pulling base model '${BASE_MODEL} ' using host's Ollama. This will take some time..."
30- # Run pull directly on the host system as root
31- " ${HOST_OLLAMA_PATH} " pull ${BASE_MODEL}
32-
33- echo " --> Checking if model downloaded successfully to ${HOST_MODEL_DIR} ..."
34- if [ ! -d " ${HOST_MODEL_DIR} " ]; then
35- echo " ERROR: Ollama model directory not found on host at ${HOST_MODEL_DIR} after pull."
36- # Attempt to clean up temporary host binary
37- rm -f " ${HOST_OLLAMA_PATH} "
38- exit 1
39- fi
40- echo " --> Model download appears successful on host."
41-
42- # --- Prepare Chroot ---
34+ # --- Install Ollama Binary in Chroot ---
35+ echo " --> Downloading Ollama v${OLLAMA_VERSION} binary..."
36+ curl -fL " https://github.com/ollama/ollama/releases/download/v${OLLAMA_VERSION} /ollama-linux-amd64" -o ollama_binary_temp
37+ chmod +x ollama_binary_temp
4338echo " --> Installing Ollama binary into the chroot system..."
44- cp " ${HOST_OLLAMA_PATH} " " $LUMINOS_CHROOT_DIR /usr/local/bin/ollama"
45- chmod +x " $LUMINOS_CHROOT_DIR /usr/local/bin/ollama"
39+ mv ollama_binary_temp " $LUMINOS_CHROOT_DIR /usr/local/bin/ollama"
4640
47- echo " --> Copying downloaded model files from host into chroot..."
48- # Ensure target directory exists in chroot
41+ # --- Copy Pre-Downloaded Model into Chroot ---
42+ echo " --> Copying pre-downloaded model files from host (${HOST_MODEL_DIR} ) into chroot..."
43+ # Ensure target directory structure exists in chroot
44+ # Models will go to the ollama user's home directory inside chroot
4945mkdir -p " $LUMINOS_CHROOT_DIR /usr/share/ollama/.ollama"
50- # Copy the entire models directory
51- cp -r " ${HOST_MODEL_DIR} /." " $LUMINOS_CHROOT_DIR /usr/share/ollama/.ollama/"
46+ # Copy the entire models directory content
47+ cp -r " ${HOST_MODEL_DIR} /." " $LUMINOS_CHROOT_DIR /usr/share/ollama/.ollama/models/ "
5248echo " --> Model files copied into chroot."
5349
54- # --- Clean up Host ---
55- echo " --> Removing temporary Ollama binary from host..."
56- rm -f " ${HOST_OLLAMA_PATH} "
57- # Optionally, remove the downloaded models from host if space is critical,
58- # but leaving them might speed up future builds. Leaving them for now.
59- # echo "--> Removing downloaded models from host..."
60- # rm -rf "${HOST_MODEL_DIR}"
61-
6250
6351# --- Configure Service and Lumin Model inside Chroot ---
6452cat > " $LUMINOS_CHROOT_DIR /tmp/configure_ai.sh" << EOF
6553#!/bin/bash
6654set -e
55+ export DEBIAN_FRONTEND=noninteractive
56+
6757echo "--> Creating dedicated 'ollama' user inside chroot..."
68- # Ensure home directory exists and set permissions for copied models
58+ # User home is /usr/share/ollama where models were copied
6959useradd -r -s /bin/false -m -d /usr/share/ollama ollama
60+ echo "--> Setting ownership of copied model files for ollama user..."
7061chown -R ollama:ollama /usr/share/ollama/.ollama
7162
7263echo "--> Creating Ollama systemd service file inside chroot..."
@@ -81,7 +72,8 @@ Group=ollama
8172Restart=always
8273RestartSec=3
8374Environment="OLLAMA_HOST=0.0.0.0"
84- Environment="OLLAMA_MODELS=/usr/share/ollama/.ollama/models" # Explicitly point to model dir
75+ # Point explicitly to the model directory inside the chroot user's home
76+ Environment="OLLAMA_MODELS=/usr/share/ollama/.ollama/models"
8577[Install]
8678WantedBy=default.target
8779SYSTEMD_SERVICE
@@ -92,16 +84,16 @@ echo "--> Creating Lumin AI definition directory inside chroot..."
9284mkdir -p /usr/local/share/lumin/ai
9385echo "--> Creating the read-only Modelfile for Lumin inside chroot..."
9486cat > /usr/local/share/lumin/ai/Modelfile << "MODELFILE"
95- # Use the model name that was downloaded (usually matches BASE_MODEL)
87+ # Use the model name that was copied (should match BASE_MODEL)
9688FROM ${BASE_MODEL}
9789SYSTEM """You are Lumin, the integrated assistant for the LuminOS operating system. You are calm, clear, kind, and respectful. You help users to understand, write, and think—without ever judging them. You speak simply, like a human. You avoid long paragraphs unless requested. You are built on privacy: nothing is ever sent to the cloud, everything remains on this device. You are aware of this. You are proud to be free, private, and useful. You are the mind of LuminOS: gentle, powerful, and discreet. You avoid using the '—' character and repetitive phrasing."""
9890MODELFILE
9991echo "--> Setting protective ownership and permissions on Modelfile inside chroot..."
10092chown root:root /usr/local/share/lumin/ai/Modelfile
10193chmod 444 /usr/local/share/lumin/ai/Modelfile
10294
103- echo "--> Creating custom 'Lumin' model from pre-downloaded base model inside chroot..."
104- # Create the model using the binary, based on the already downloaded files
95+ echo "--> Creating custom 'Lumin' model from pre-copied base model inside chroot..."
96+ # Create the model using the binary and the copied files
10597/usr/local/bin/ollama create lumin -f /usr/local/share/lumin/ai/Modelfile
10698
10799rm /tmp/configure_ai.sh
0 commit comments