Skip to content

Commit 81a343c

Browse files
committed
Add Windows platform support for llama.cpp build with cmake --install paths
1 parent 127af1d commit 81a343c

File tree

1 file changed

+27
-0
lines changed

1 file changed

+27
-0
lines changed

Makefile

Lines changed: 27 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -204,6 +204,13 @@ ifeq ($(OMIT_LOCAL_ENGINE),0)
204204
# Target Windows 7+ (0x0601) - llama.cpp core doesn't need newer APIs
205205
LLAMA_OPTIONS += -DGGML_NATIVE=OFF -DGGML_OPENMP=OFF -DCMAKE_CXX_FLAGS="-D_WIN32_WINNT=0x0601"
206206
LDFLAGS := -shared -lbcrypt -static-libgcc -Wl,--push-state,-Bstatic,-lstdc++,-lwinpthread,--pop-state
207+
# Windows: use cmake --install paths
208+
GGML_PREFIX := $(BUILD_DIR)/ggml
209+
LLAMA_LIBS := $(GGML_PREFIX)/lib/libllama.a \
210+
$(GGML_PREFIX)/lib/ggml.a \
211+
$(GGML_PREFIX)/lib/ggml-base.a \
212+
$(GGML_PREFIX)/lib/ggml-cpu.a \
213+
$(LLAMA_BUILD)/common/libcommon.a
207214
else ifeq ($(PLATFORM),android)
208215
# Android NDK cmake toolchain
209216
ANDROID_OPTIONS := -DCMAKE_TOOLCHAIN_FILE=$(ANDROID_NDK)/build/cmake/android.toolchain.cmake \
@@ -295,6 +302,25 @@ extension: $(BUILD_DEPS) $(TARGET)
295302

296303
# Build llama.cpp (only if not omitted)
297304
.PHONY: llama
305+
ifeq ($(PLATFORM),windows)
306+
# Windows: use cmake --install for consistent library paths
307+
llama: $(GGML_PREFIX)/lib/libllama.a
308+
309+
$(GGML_PREFIX)/lib/libllama.a:
310+
@echo "Building llama.cpp with options: $(LLAMA_OPTIONS)"
311+
@mkdir -p $(LLAMA_BUILD) $(GGML_PREFIX)
312+
cmake -B $(LLAMA_BUILD) $(LLAMA_OPTIONS) $(LLAMA_DIR)
313+
cmake --build $(LLAMA_BUILD) --config Release -j$(CPUS)
314+
cmake --install $(LLAMA_BUILD) --prefix $(GGML_PREFIX)
315+
@echo "llama.cpp build complete"
316+
317+
# Windows LLAMA_LIBS dependencies
318+
$(GGML_PREFIX)/lib/ggml.a $(GGML_PREFIX)/lib/ggml-base.a $(GGML_PREFIX)/lib/ggml-cpu.a: $(GGML_PREFIX)/lib/libllama.a
319+
@:
320+
$(LLAMA_BUILD)/common/libcommon.a: $(GGML_PREFIX)/lib/libllama.a
321+
@:
322+
else
323+
# Unix: use build directory paths directly
298324
llama: $(LLAMA_BUILD)/src/libllama.a
299325

300326
$(LLAMA_BUILD)/src/libllama.a:
@@ -307,6 +333,7 @@ $(LLAMA_BUILD)/src/libllama.a:
307333
# All LLAMA_LIBS are built by the same cmake command as libllama.a
308334
$(LLAMA_BUILD)/ggml/src/libggml.a $(LLAMA_BUILD)/ggml/src/libggml-base.a $(LLAMA_BUILD)/ggml/src/libggml-cpu.a $(LLAMA_BUILD)/common/libcommon.a $(LLAMA_BUILD)/ggml/src/ggml-metal/libggml-metal.a $(LLAMA_BUILD)/ggml/src/ggml-blas/libggml-blas.a $(LLAMA_BUILD)/ggml/src/ggml-vulkan/libggml-vulkan.a $(LLAMA_BUILD)/ggml/src/ggml-opencl/libggml-opencl.a: $(LLAMA_BUILD)/src/libllama.a
309335
@:
336+
endif
310337

311338
# Create directories
312339
$(BUILD_DIR):

0 commit comments

Comments
 (0)