11#! /bin/bash
22set -e
33
4- echo " ====== LUMINOS MASTER BUILD SCRIPT (v6.2 - Model Cleaner ) ======"
5- if [ " $( id -u) " -ne 0 ]; then echo " ERROR: This script must be run as root. " ; exit 1; fi
4+ echo " ====== LUMINOS MASTER BUILD SCRIPT (v6.1 - Smart Path Search ) ======"
5+ if [ " $( id -u) " -ne 0 ]; then echo " ERROR: This script must be run as root" ; exit 1; fi
66
77# --- 1. Define Directories & Vars ---
88BASE_DIR=$( dirname " $( readlink -f " $0 " ) " )
@@ -11,7 +11,6 @@ CHROOT_DIR="${WORK_DIR}/chroot"
1111ISO_DIR=" ${WORK_DIR} /iso"
1212AI_BUILD_DIR=" ${WORK_DIR} /ai_build"
1313ISO_NAME=" LuminOS-0.2.1-amd64.iso"
14- REQUIRED_MODEL=" llama3"
1514
1615# --- 2. Clean Up ---
1716echo " --> Cleaning up previous build artifacts..."
@@ -33,93 +32,78 @@ echo "--> Installing build dependencies..."
3332apt-get update
3433apt-get install -y debootstrap squashfs-tools xorriso grub-pc-bin grub-efi-amd64-bin mtools curl rsync
3534
36- # --- 4. PREPARE AI (Smart & Clean ) ---
35+ # --- 4. PREPARE AI (ON HOST ) ---
3736echo " ====================================================="
3837echo " PHASE 0: Preparing AI Models"
3938echo " ====================================================="
4039TARGET_MODEL_DIR=" ${AI_BUILD_DIR} /models"
4140mkdir -p " ${TARGET_MODEL_DIR} "
4241
43- # Detect User Home
42+ # Detect the real user behind sudo to find their home
4443REAL_USER=" ${SUDO_USER:- $USER } "
4544USER_HOME=$( getent passwd " $REAL_USER " | cut -d: -f6)
4645
46+ # List of all possible places the model might be
4747POSSIBLE_LOCATIONS=(
4848 " ${USER_HOME} /.ollama/models"
49+ " /root/.ollama/models"
4950 " /usr/share/ollama/.ollama/models"
5051 " /var/lib/ollama/.ollama/models"
51- " /root/.ollama/models"
5252)
5353
5454MODEL_FOUND=false
5555
56- # Strategy A: Copy existing models (Draft)
56+ # Strategy A: SEARCH existing models
57+ echo " --> Searching for existing models..."
5758for LOC in " ${POSSIBLE_LOCATIONS[@]} " ; do
5859 if [ -d " $LOC " ]; then
60+ echo " Checking $LOC ..."
5961 SIZE_CHECK=$( du -s " $LOC " | cut -f1)
60- if [ " $SIZE_CHECK " -gt 1000000 ]; then
61- echo " INFO : Found models at $LOC . Copying to temp dir ..."
62+ if [ " $SIZE_CHECK " -gt 1000000 ]; then # Check if > 1GB
63+ echo " SUCCESS : Found valid models at $LOC ! Copying..."
6264 cp -r " ${LOC} /." " ${TARGET_MODEL_DIR} /"
6365 MODEL_FOUND=true
6466 break
6567 fi
6668 fi
6769done
6870
69- echo " --> Downloading Ollama binary..."
70- curl -fL " https://github.com/ollama/ollama/releases/download/v0.1.32/ollama-linux-amd64" -o " ${AI_BUILD_DIR} /ollama"
71- chmod +x " ${AI_BUILD_DIR} /ollama"
72-
73- # Start Temp Server (Used for both downloading AND cleaning)
74- export OLLAMA_MODELS=" ${TARGET_MODEL_DIR} "
75- export HOME=" ${AI_BUILD_DIR} " # Redirect home to keep things clean
76-
77- echo " --> Starting temporary Ollama server to manage models..."
78- " ${AI_BUILD_DIR} /ollama" serve > " ${AI_BUILD_DIR} /server.log" 2>&1 &
79- OLLAMA_PID=$!
80- echo " Waiting 10s for server..."
81- sleep 10
82-
83- # Strategy B: Download if missing
71+ # Strategy B: DOWNLOAD if not found
8472if [ " $MODEL_FOUND " = false ]; then
85- echo " --> Model not found locally. Downloading ${REQUIRED_MODEL} ..."
86- " ${AI_BUILD_DIR} /ollama" pull ${REQUIRED_MODEL}
87- else
88- # Check if the specific required model is actually there
89- if ! " ${AI_BUILD_DIR} /ollama" list | grep -q " ${REQUIRED_MODEL} " ; then
90- echo " --> Local cache found, but ${REQUIRED_MODEL} is missing. Downloading..."
91- " ${AI_BUILD_DIR} /ollama" pull ${REQUIRED_MODEL}
73+ echo " --> Model not found locally. Downloading..."
74+
75+ echo " --> Downloading Ollama binary..."
76+ curl -fL " https://github.com/ollama/ollama/releases/download/v0.1.32/ollama-linux-amd64" -o " ${AI_BUILD_DIR} /ollama"
77+ chmod +x " ${AI_BUILD_DIR} /ollama"
78+
79+ # Force HOME to our temp dir to control where models go
80+ export HOME=" ${AI_BUILD_DIR} "
81+
82+ echo " --> Starting temporary Ollama server..."
83+ " ${AI_BUILD_DIR} /ollama" serve > " ${AI_BUILD_DIR} /server.log" 2>&1 &
84+ OLLAMA_PID=$!
85+ echo " Waiting 10s for server..."
86+ sleep 10
87+
88+ echo " --> Pulling base model (llama3)..."
89+ " ${AI_BUILD_DIR} /ollama" pull llama3
90+
91+ echo " --> Stopping server..."
92+ kill ${OLLAMA_PID} || true
93+
94+ # Move from the temp HOME structure to our target
95+ if [ -d " ${AI_BUILD_DIR} /.ollama/models" ]; then
96+ cp -r " ${AI_BUILD_DIR} /.ollama/models/." " ${TARGET_MODEL_DIR} /"
9297 fi
9398fi
9499
95- # --- CLEANUP STEP (New in v6.2) ---
96- echo " --> Cleaning up extraneous models to save ISO space..."
97- # List all models, filter out the required one, and remove the rest
98- EXISTING_MODELS=$( " ${AI_BUILD_DIR} /ollama" list | awk ' NR>1 {print $1}' )
99-
100- for model in $EXISTING_MODELS ; do
101- # Check if model matches required (allowing for :latest tag)
102- if [[ " $model " != " ${REQUIRED_MODEL} " && " $model " != " ${REQUIRED_MODEL} :latest" ]]; then
103- echo " --> Removing unused model from ISO build: $model "
104- " ${AI_BUILD_DIR} /ollama" rm " $model "
105- else
106- echo " --> Keeping core model: $model "
107- fi
108- done
109- # ----------------------------------
110-
111- echo " --> Stopping temporary Ollama server..."
112- kill ${OLLAMA_PID} || true
113- wait ${OLLAMA_PID} || true
114-
115-
116100# Final Verification
117101SIZE_CHECK=$( du -s " ${TARGET_MODEL_DIR} " | cut -f1)
118102if [ " $SIZE_CHECK " -lt 1000000 ]; then
119103 echo " ERROR: Model preparation failed. Target directory is too small ($SIZE_CHECK KB)."
120104 exit 1
121105else
122- echo " SUCCESS: AI Models prepared and cleaned (${SIZE_CHECK} KB)."
106+ echo " SUCCESS: AI Models prepared (${SIZE_CHECK} KB)."
123107fi
124108
125109
@@ -152,10 +136,15 @@ mkdir -p "${CHROOT_DIR}/usr/share/wallpapers/luminos"
152136cp " ${BASE_DIR} /assets/" * " ${CHROOT_DIR} /usr/share/wallpapers/luminos/"
153137
154138echo " --> Injecting AI files into system..."
139+ # Ensure binary exists
140+ if [ ! -f " ${AI_BUILD_DIR} /ollama" ]; then
141+ curl -fL " https://github.com/ollama/ollama/releases/download/v0.1.32/ollama-linux-amd64" -o " ${AI_BUILD_DIR} /ollama"
142+ chmod +x " ${AI_BUILD_DIR} /ollama"
143+ fi
155144cp " ${AI_BUILD_DIR} /ollama" " ${CHROOT_DIR} /usr/local/bin/"
156- # Create the directory structure exactly as Ollama expects
145+
146+ # Copy models
157147mkdir -p " ${CHROOT_DIR} /usr/share/ollama/.ollama"
158- # Copy the cleaned models
159148cp -r " ${TARGET_MODEL_DIR} " " ${CHROOT_DIR} /usr/share/ollama/.ollama/"
160149echo " --> AI Injection Complete."
161150
0 commit comments