Skip to content

Commit 209306c

Browse files
committed
Refactor Makefile to use GGML_PREFIX for library paths and installation
1 parent ddc3cb6 commit 209306c

File tree

1 file changed

+25
-20
lines changed

1 file changed

+25
-20
lines changed

Makefile

Lines changed: 25 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,7 @@ BUILD_DIR := build
3838
DIST_DIR := dist
3939
LLAMA_DIR := modules/llama.cpp
4040
LLAMA_BUILD := $(LLAMA_DIR)/build
41+
GGML_PREFIX := $(BUILD_DIR)/ggml
4142
TEST_DIR := test
4243

4344
# Version from header
@@ -152,7 +153,6 @@ else ifeq ($(PLATFORM),ios-sim)
152153
LDFLAGS := -dynamiclib -isysroot $(SDK) -arch arm64 -arch x86_64 -miphonesimulator-version-min=14.0 -framework Security
153154
endif
154155

155-
# Base llama.cpp cmake options (minimal build - no curl, httplib, server, rpc)
156156
LLAMA_OPTIONS := $(LLAMA) \
157157
-DCMAKE_BUILD_TYPE=Release \
158158
-DBUILD_SHARED_LIBS=OFF \
@@ -167,22 +167,22 @@ LLAMA_OPTIONS := $(LLAMA) \
167167
# Conditional: Local embedding engine (llama.cpp)
168168
ifeq ($(OMIT_LOCAL_ENGINE),0)
169169
# Include llama.cpp
170-
INCLUDES += -I$(LLAMA_DIR)/include -I$(LLAMA_DIR)/ggml/include
170+
INCLUDES += -I$(GGML_PREFIX)/include
171171
C_SOURCES += $(SRC_DIR)/dbmem-lembed.c
172172

173-
# llama.cpp static libraries (base set)
174-
LLAMA_LIBS := $(LLAMA_BUILD)/src/libllama.a \
175-
$(LLAMA_BUILD)/ggml/src/libggml.a \
176-
$(LLAMA_BUILD)/ggml/src/libggml-base.a \
177-
$(LLAMA_BUILD)/ggml/src/libggml-cpu.a \
173+
# llama.cpp static libraries
174+
LLAMA_LIBS := $(GGML_PREFIX)/lib/libllama.a \
175+
$(GGML_PREFIX)/lib/libggml.a \
176+
$(GGML_PREFIX)/lib/libggml-base.a \
177+
$(GGML_PREFIX)/lib/libggml-cpu.a \
178178
$(LLAMA_BUILD)/common/libcommon.a
179179

180180
# Platform-specific llama.cpp settings
181181
ifeq ($(PLATFORM),macos)
182182
LLAMA_OPTIONS += -DGGML_NATIVE=OFF -DGGML_OPENMP=OFF -DCMAKE_OSX_DEPLOYMENT_TARGET=11.0
183183
# Add Metal and BLAS libraries for macOS (cmake auto-detects and builds these)
184-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-metal/libggml-metal.a
185-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-blas/libggml-blas.a
184+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-metal.a
185+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-blas.a
186186
ifeq ($(ARCH),x86_64)
187187
LLAMA_OPTIONS += -DCMAKE_OSX_ARCHITECTURES="x86_64"
188188
else ifeq ($(ARCH),arm64)
@@ -220,24 +220,24 @@ ifeq ($(OMIT_LOCAL_ENGINE),0)
220220
else ifeq ($(PLATFORM),ios)
221221
LLAMA_OPTIONS += -DGGML_NATIVE=OFF -DGGML_OPENMP=OFF -DCMAKE_SYSTEM_NAME=iOS -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0
222222
# Add Metal and BLAS libraries for iOS
223-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-metal/libggml-metal.a
224-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-blas/libggml-blas.a
223+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-metal.a
224+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-blas.a
225225
LDFLAGS := -dynamiclib -isysroot $(SDK) -arch arm64 -miphoneos-version-min=14.0 \
226226
-framework Metal -framework Foundation -framework Accelerate -framework CoreFoundation -framework Security \
227227
-ldl -lpthread -lm -headerpad_max_install_names
228228
else ifeq ($(PLATFORM),ios-sim)
229229
LLAMA_OPTIONS += -DGGML_NATIVE=OFF -DGGML_OPENMP=OFF -DCMAKE_SYSTEM_NAME=iOS -DCMAKE_OSX_SYSROOT=iphonesimulator -DCMAKE_OSX_DEPLOYMENT_TARGET=14.0 '-DCMAKE_OSX_ARCHITECTURES=x86_64;arm64'
230230
# Add Metal and BLAS libraries for iOS simulator
231-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-metal/libggml-metal.a
232-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-blas/libggml-blas.a
231+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-metal.a
232+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-blas.a
233233
LDFLAGS := -dynamiclib -isysroot $(SDK) -arch arm64 -arch x86_64 -miphonesimulator-version-min=14.0 \
234234
-framework Metal -framework Foundation -framework Accelerate -framework CoreFoundation -framework Security \
235235
-ldl -lpthread -lm -headerpad_max_install_names
236236
endif
237237

238238
# Backend-specific libraries (detected from LLAMA cmake flags for explicit overrides)
239239
ifneq (,$(findstring GGML_VULKAN=ON,$(LLAMA)))
240-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-vulkan/libggml-vulkan.a
240+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-vulkan.a
241241
ifeq ($(PLATFORM),windows)
242242
ifdef VULKAN_SDK
243243
LDFLAGS += -L$(VULKAN_SDK)/lib -lvulkan-1
@@ -253,7 +253,7 @@ ifeq ($(OMIT_LOCAL_ENGINE),0)
253253
endif
254254
endif
255255
ifneq (,$(findstring GGML_OPENCL=ON,$(LLAMA)))
256-
LLAMA_LIBS += $(LLAMA_BUILD)/ggml/src/ggml-opencl/libggml-opencl.a
256+
LLAMA_LIBS += $(GGML_PREFIX)/lib/libggml-opencl.a
257257
LDFLAGS += -lOpenCL
258258
endif
259259

@@ -295,17 +295,22 @@ extension: $(BUILD_DEPS) $(TARGET)
295295

296296
# Build llama.cpp (only if not omitted)
297297
.PHONY: llama
298-
llama: $(LLAMA_BUILD)/src/libllama.a
298+
llama: $(GGML_PREFIX)/lib/libllama.a
299299

300-
$(LLAMA_BUILD)/src/libllama.a:
300+
$(GGML_PREFIX)/lib/libllama.a:
301301
@echo "Building llama.cpp with options: $(LLAMA_OPTIONS)"
302-
@mkdir -p $(LLAMA_BUILD)
302+
@mkdir -p $(LLAMA_BUILD) $(GGML_PREFIX)
303303
cmake -B $(LLAMA_BUILD) $(LLAMA_OPTIONS) $(LLAMA_DIR)
304304
cmake --build $(LLAMA_BUILD) --config Release -j$(CPUS)
305+
cmake --install $(LLAMA_BUILD) --prefix $(GGML_PREFIX)
305306
@echo "llama.cpp build complete"
306307

307-
# All LLAMA_LIBS are built by the same cmake command as libllama.a
308-
$(LLAMA_BUILD)/ggml/src/libggml.a $(LLAMA_BUILD)/ggml/src/libggml-base.a $(LLAMA_BUILD)/ggml/src/libggml-cpu.a $(LLAMA_BUILD)/common/libcommon.a $(LLAMA_BUILD)/ggml/src/ggml-metal/libggml-metal.a $(LLAMA_BUILD)/ggml/src/ggml-blas/libggml-blas.a $(LLAMA_BUILD)/ggml/src/ggml-vulkan/libggml-vulkan.a $(LLAMA_BUILD)/ggml/src/ggml-opencl/libggml-opencl.a: $(LLAMA_BUILD)/src/libllama.a
308+
# All LLAMA_LIBS are installed by cmake --install
309+
$(GGML_PREFIX)/lib/libggml.a $(GGML_PREFIX)/lib/libggml-base.a $(GGML_PREFIX)/lib/libggml-cpu.a $(GGML_PREFIX)/lib/libggml-metal.a $(GGML_PREFIX)/lib/libggml-blas.a $(GGML_PREFIX)/lib/libggml-vulkan.a $(GGML_PREFIX)/lib/libggml-opencl.a: $(GGML_PREFIX)/lib/libllama.a
310+
@:
311+
312+
# libcommon.a is not installed, reference it from build dir
313+
$(LLAMA_BUILD)/common/libcommon.a: $(GGML_PREFIX)/lib/libllama.a
309314
@:
310315

311316
# Create directories

0 commit comments

Comments
 (0)