@@ -89,7 +89,7 @@ ifeq ($(BUILD_TYPE),hipblas)
89
89
ROCM_HOME ?= /opt/rocm
90
90
export CXX=$(ROCM_HOME)/llvm/bin/clang++
91
91
export CC=$(ROCM_HOME)/llvm/bin/clang
92
- # Llama-stable has no hipblas support, so override it here.
92
+ # llama-ggml has no hipblas support, so override it here.
93
93
export STABLE_BUILD_TYPE=
94
94
GPU_TARGETS ?= gfx900,gfx90a,gfx1030,gfx1031,gfx1100
95
95
AMDGPU_TARGETS ?= "$(GPU_TARGETS)"
@@ -124,7 +124,7 @@ ifeq ($(findstring tts,$(GO_TAGS)),tts)
124
124
OPTIONAL_GRPC+=backend-assets/grpc/piper
125
125
endif
126
126
127
- ALL_GRPC_BACKENDS =backend-assets/grpc/langchain-huggingface backend-assets/grpc/falcon-ggml backend-assets/grpc/bert-embeddings backend-assets/grpc/llama backend-assets/grpc/llama-cpp backend-assets/grpc/llama-stable backend-assets/grpc/gpt4all backend-assets/grpc/dolly backend-assets/grpc/gpt2 backend-assets/grpc/gptj backend-assets/grpc/gptneox backend-assets/grpc/mpt backend-assets/grpc/replit backend-assets/grpc/starcoder backend-assets/grpc/rwkv backend-assets/grpc/whisper $(OPTIONAL_GRPC )
127
+ ALL_GRPC_BACKENDS =backend-assets/grpc/langchain-huggingface backend-assets/grpc/falcon-ggml backend-assets/grpc/bert-embeddings backend-assets/grpc/llama backend-assets/grpc/llama-cpp backend-assets/grpc/llama-ggml backend-assets/grpc/gpt4all backend-assets/grpc/dolly backend-assets/grpc/gpt2 backend-assets/grpc/gptj backend-assets/grpc/gptneox backend-assets/grpc/mpt backend-assets/grpc/replit backend-assets/grpc/starcoder backend-assets/grpc/rwkv backend-assets/grpc/whisper $(OPTIONAL_GRPC )
128
128
GRPC_BACKENDS? =$(ALL_GRPC_BACKENDS ) $(OPTIONAL_GRPC )
129
129
130
130
# If empty, then we build all
@@ -203,20 +203,20 @@ go-llama:
203
203
git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama
204
204
cd go-llama && git checkout -b build $(GOLLAMA_VERSION ) && git submodule update --init --recursive --depth 1
205
205
206
- go-llama-stable :
207
- git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama-stable
208
- cd go-llama-stable && git checkout -b build $(GOLLAMA_STABLE_VERSION ) && git submodule update --init --recursive --depth 1
206
+ go-llama-ggml :
207
+ git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama-ggml
208
+ cd go-llama-ggml && git checkout -b build $(GOLLAMA_STABLE_VERSION ) && git submodule update --init --recursive --depth 1
209
209
210
210
go-llama/libbinding.a : go-llama
211
211
$(MAKE ) -C go-llama BUILD_TYPE=$(BUILD_TYPE ) libbinding.a
212
212
213
- go-llama-stable /libbinding.a : go-llama-stable
214
- $(MAKE ) -C go-llama-stable BUILD_TYPE=$(STABLE_BUILD_TYPE ) libbinding.a
213
+ go-llama-ggml /libbinding.a : go-llama-ggml
214
+ $(MAKE ) -C go-llama-ggml BUILD_TYPE=$(STABLE_BUILD_TYPE ) libbinding.a
215
215
216
216
go-piper/libpiper_binding.a : go-piper
217
217
$(MAKE ) -C go-piper libpiper_binding.a example/main
218
218
219
- get-sources : go-llama go-llama-stable go-ggml-transformers gpt4all go-piper go-rwkv whisper.cpp go-bert go-stable-diffusion
219
+ get-sources : go-llama go-llama-ggml go-ggml-transformers gpt4all go-piper go-rwkv whisper.cpp go-bert go-stable-diffusion
220
220
touch $@
221
221
222
222
replace :
@@ -235,7 +235,7 @@ prepare-sources: get-sources replace
235
235
rebuild : # # Rebuilds the project
236
236
$(GOCMD ) clean -cache
237
237
$(MAKE ) -C go-llama clean
238
- $(MAKE ) -C go-llama-stable clean
238
+ $(MAKE ) -C go-llama-ggml clean
239
239
$(MAKE ) -C gpt4all/gpt4all-bindings/golang/ clean
240
240
$(MAKE ) -C go-ggml-transformers clean
241
241
$(MAKE ) -C go-rwkv clean
@@ -253,7 +253,7 @@ clean: ## Remove build related file
253
253
rm -f prepare
254
254
rm -rf ./go-llama
255
255
rm -rf ./gpt4all
256
- rm -rf ./go-llama-stable
256
+ rm -rf ./go-llama-ggml
257
257
rm -rf ./go-gpt2
258
258
rm -rf ./go-stable-diffusion
259
259
rm -rf ./go-ggml-transformers
@@ -440,10 +440,10 @@ ifeq ($(BUILD_TYPE),metal)
440
440
cp backend/cpp/llama/llama.cpp/build/bin/ggml-metal.metal backend-assets/grpc/
441
441
endif
442
442
443
- backend-assets/grpc/llama-stable : backend-assets/grpc go-llama-stable /libbinding.a
444
- $(GOCMD ) mod edit -replace github.com/go-skynet/go-llama.cpp=$(shell pwd) /go-llama-stable
445
- CGO_LDFLAGS=" $( CGO_LDFLAGS) " C_INCLUDE_PATH=$(shell pwd) /go-llama-stable LIBRARY_PATH=$(shell pwd) /go-llama \
446
- $(GOCMD ) build -ldflags " $( LD_FLAGS) " -tags " $( GO_TAGS) " -o backend-assets/grpc/llama-stable ./backend/go/llm/llama-stable /
443
+ backend-assets/grpc/llama-ggml : backend-assets/grpc go-llama-ggml /libbinding.a
444
+ $(GOCMD ) mod edit -replace github.com/go-skynet/go-llama.cpp=$(shell pwd) /go-llama-ggml
445
+ CGO_LDFLAGS=" $( CGO_LDFLAGS) " C_INCLUDE_PATH=$(shell pwd) /go-llama-ggml LIBRARY_PATH=$(shell pwd) /go-llama \
446
+ $(GOCMD ) build -ldflags " $( LD_FLAGS) " -tags " $( GO_TAGS) " -o backend-assets/grpc/llama-ggml ./backend/go/llm/llama-ggml /
447
447
448
448
backend-assets/grpc/gpt4all : backend-assets/grpc backend-assets/gpt4all gpt4all/gpt4all-bindings/golang/libgpt4all.a
449
449
CGO_LDFLAGS=" $( CGO_LDFLAGS) " C_INCLUDE_PATH=$(shell pwd) /gpt4all/gpt4all-bindings/golang/ LIBRARY_PATH=$(shell pwd) /gpt4all/gpt4all-bindings/golang/ \
0 commit comments