Skip to content

Commit e1480e8

Browse files
authored
fix(build): Smart search for AI models in user and root paths
1 parent d3a25b8 commit e1480e8

File tree

1 file changed

+44
-55
lines changed

1 file changed

+44
-55
lines changed

build.sh

Lines changed: 44 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
#!/bin/bash
22
set -e
33

4-
echo "====== LUMINOS MASTER BUILD SCRIPT (v6.2 - Model Cleaner) ======"
5-
if [ "$(id -u)" -ne 0 ]; then echo "ERROR: This script must be run as root."; exit 1; fi
4+
echo "====== LUMINOS MASTER BUILD SCRIPT (v6.1 - Smart Path Search) ======"
5+
if [ "$(id -u)" -ne 0 ]; then echo "ERROR: This script must be run as root"; exit 1; fi
66

77
# --- 1. Define Directories & Vars ---
88
BASE_DIR=$(dirname "$(readlink -f "$0")")
@@ -11,7 +11,6 @@ CHROOT_DIR="${WORK_DIR}/chroot"
1111
ISO_DIR="${WORK_DIR}/iso"
1212
AI_BUILD_DIR="${WORK_DIR}/ai_build"
1313
ISO_NAME="LuminOS-0.2.1-amd64.iso"
14-
REQUIRED_MODEL="llama3"
1514

1615
# --- 2. Clean Up ---
1716
echo "--> Cleaning up previous build artifacts..."
@@ -33,93 +32,78 @@ echo "--> Installing build dependencies..."
3332
apt-get update
3433
apt-get install -y debootstrap squashfs-tools xorriso grub-pc-bin grub-efi-amd64-bin mtools curl rsync
3534

36-
# --- 4. PREPARE AI (Smart & Clean) ---
35+
# --- 4. PREPARE AI (ON HOST) ---
3736
echo "====================================================="
3837
echo "PHASE 0: Preparing AI Models"
3938
echo "====================================================="
4039
TARGET_MODEL_DIR="${AI_BUILD_DIR}/models"
4140
mkdir -p "${TARGET_MODEL_DIR}"
4241

43-
# Detect User Home
42+
# Detect the real user behind sudo to find their home
4443
REAL_USER="${SUDO_USER:-$USER}"
4544
USER_HOME=$(getent passwd "$REAL_USER" | cut -d: -f6)
4645

46+
# List of all possible places the model might be
4747
POSSIBLE_LOCATIONS=(
4848
"${USER_HOME}/.ollama/models"
49+
"/root/.ollama/models"
4950
"/usr/share/ollama/.ollama/models"
5051
"/var/lib/ollama/.ollama/models"
51-
"/root/.ollama/models"
5252
)
5353

5454
MODEL_FOUND=false
5555

56-
# Strategy A: Copy existing models (Draft)
56+
# Strategy A: SEARCH existing models
57+
echo "--> Searching for existing models..."
5758
for LOC in "${POSSIBLE_LOCATIONS[@]}"; do
5859
if [ -d "$LOC" ]; then
60+
echo " Checking $LOC..."
5961
SIZE_CHECK=$(du -s "$LOC" | cut -f1)
60-
if [ "$SIZE_CHECK" -gt 1000000 ]; then
61-
echo "INFO: Found models at $LOC. Copying to temp dir..."
62+
if [ "$SIZE_CHECK" -gt 1000000 ]; then # Check if > 1GB
63+
echo "SUCCESS: Found valid models at $LOC! Copying..."
6264
cp -r "${LOC}/." "${TARGET_MODEL_DIR}/"
6365
MODEL_FOUND=true
6466
break
6567
fi
6668
fi
6769
done
6870

69-
echo "--> Downloading Ollama binary..."
70-
curl -fL "https://github.com/ollama/ollama/releases/download/v0.1.32/ollama-linux-amd64" -o "${AI_BUILD_DIR}/ollama"
71-
chmod +x "${AI_BUILD_DIR}/ollama"
72-
73-
# Start Temp Server (Used for both downloading AND cleaning)
74-
export OLLAMA_MODELS="${TARGET_MODEL_DIR}"
75-
export HOME="${AI_BUILD_DIR}" # Redirect home to keep things clean
76-
77-
echo "--> Starting temporary Ollama server to manage models..."
78-
"${AI_BUILD_DIR}/ollama" serve > "${AI_BUILD_DIR}/server.log" 2>&1 &
79-
OLLAMA_PID=$!
80-
echo "Waiting 10s for server..."
81-
sleep 10
82-
83-
# Strategy B: Download if missing
71+
# Strategy B: DOWNLOAD if not found
8472
if [ "$MODEL_FOUND" = false ]; then
85-
echo "--> Model not found locally. Downloading ${REQUIRED_MODEL}..."
86-
"${AI_BUILD_DIR}/ollama" pull ${REQUIRED_MODEL}
87-
else
88-
# Check if the specific required model is actually there
89-
if ! "${AI_BUILD_DIR}/ollama" list | grep -q "${REQUIRED_MODEL}"; then
90-
echo "--> Local cache found, but ${REQUIRED_MODEL} is missing. Downloading..."
91-
"${AI_BUILD_DIR}/ollama" pull ${REQUIRED_MODEL}
73+
echo "--> Model not found locally. Downloading..."
74+
75+
echo "--> Downloading Ollama binary..."
76+
curl -fL "https://github.com/ollama/ollama/releases/download/v0.1.32/ollama-linux-amd64" -o "${AI_BUILD_DIR}/ollama"
77+
chmod +x "${AI_BUILD_DIR}/ollama"
78+
79+
# Force HOME to our temp dir to control where models go
80+
export HOME="${AI_BUILD_DIR}"
81+
82+
echo "--> Starting temporary Ollama server..."
83+
"${AI_BUILD_DIR}/ollama" serve > "${AI_BUILD_DIR}/server.log" 2>&1 &
84+
OLLAMA_PID=$!
85+
echo "Waiting 10s for server..."
86+
sleep 10
87+
88+
echo "--> Pulling base model (llama3)..."
89+
"${AI_BUILD_DIR}/ollama" pull llama3
90+
91+
echo "--> Stopping server..."
92+
kill ${OLLAMA_PID} || true
93+
94+
# Move from the temp HOME structure to our target
95+
if [ -d "${AI_BUILD_DIR}/.ollama/models" ]; then
96+
cp -r "${AI_BUILD_DIR}/.ollama/models/." "${TARGET_MODEL_DIR}/"
9297
fi
9398
fi
9499

95-
# --- CLEANUP STEP (New in v6.2) ---
96-
echo "--> Cleaning up extraneous models to save ISO space..."
97-
# List all models, filter out the required one, and remove the rest
98-
EXISTING_MODELS=$("${AI_BUILD_DIR}/ollama" list | awk 'NR>1 {print $1}')
99-
100-
for model in $EXISTING_MODELS; do
101-
# Check if model matches required (allowing for :latest tag)
102-
if [[ "$model" != "${REQUIRED_MODEL}" && "$model" != "${REQUIRED_MODEL}:latest" ]]; then
103-
echo "--> Removing unused model from ISO build: $model"
104-
"${AI_BUILD_DIR}/ollama" rm "$model"
105-
else
106-
echo "--> Keeping core model: $model"
107-
fi
108-
done
109-
# ----------------------------------
110-
111-
echo "--> Stopping temporary Ollama server..."
112-
kill ${OLLAMA_PID} || true
113-
wait ${OLLAMA_PID} || true
114-
115-
116100
# Final Verification
117101
SIZE_CHECK=$(du -s "${TARGET_MODEL_DIR}" | cut -f1)
118102
if [ "$SIZE_CHECK" -lt 1000000 ]; then
119103
echo "ERROR: Model preparation failed. Target directory is too small ($SIZE_CHECK KB)."
120104
exit 1
121105
else
122-
echo "SUCCESS: AI Models prepared and cleaned (${SIZE_CHECK} KB)."
106+
echo "SUCCESS: AI Models prepared (${SIZE_CHECK} KB)."
123107
fi
124108

125109

@@ -152,10 +136,15 @@ mkdir -p "${CHROOT_DIR}/usr/share/wallpapers/luminos"
152136
cp "${BASE_DIR}/assets/"* "${CHROOT_DIR}/usr/share/wallpapers/luminos/"
153137

154138
echo "--> Injecting AI files into system..."
139+
# Ensure binary exists
140+
if [ ! -f "${AI_BUILD_DIR}/ollama" ]; then
141+
curl -fL "https://github.com/ollama/ollama/releases/download/v0.1.32/ollama-linux-amd64" -o "${AI_BUILD_DIR}/ollama"
142+
chmod +x "${AI_BUILD_DIR}/ollama"
143+
fi
155144
cp "${AI_BUILD_DIR}/ollama" "${CHROOT_DIR}/usr/local/bin/"
156-
# Create the directory structure exactly as Ollama expects
145+
146+
# Copy models
157147
mkdir -p "${CHROOT_DIR}/usr/share/ollama/.ollama"
158-
# Copy the cleaned models
159148
cp -r "${TARGET_MODEL_DIR}" "${CHROOT_DIR}/usr/share/ollama/.ollama/"
160149
echo "--> AI Injection Complete."
161150

0 commit comments

Comments
 (0)