Skip to content

Commit 3d532d0

Browse files
committed
refactor: rename llama-stable to llama-ggml
1 parent ad0e30b commit 3d532d0

File tree

6 files changed

+20
-20
lines changed

6 files changed

+20
-20
lines changed

.gitignore

+1-1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
# go-llama build artifacts
22
go-llama
3-
go-llama-stable
3+
go-llama-ggml
44
/gpt4all
55
go-stable-diffusion
66
go-piper

Makefile

+14-14
Original file line numberDiff line numberDiff line change
@@ -89,7 +89,7 @@ ifeq ($(BUILD_TYPE),hipblas)
8989
ROCM_HOME ?= /opt/rocm
9090
export CXX=$(ROCM_HOME)/llvm/bin/clang++
9191
export CC=$(ROCM_HOME)/llvm/bin/clang
92-
# Llama-stable has no hipblas support, so override it here.
92+
# llama-ggml has no hipblas support, so override it here.
9393
export STABLE_BUILD_TYPE=
9494
GPU_TARGETS ?= gfx900,gfx90a,gfx1030,gfx1031,gfx1100
9595
AMDGPU_TARGETS ?= "$(GPU_TARGETS)"
@@ -124,7 +124,7 @@ ifeq ($(findstring tts,$(GO_TAGS)),tts)
124124
OPTIONAL_GRPC+=backend-assets/grpc/piper
125125
endif
126126

127-
ALL_GRPC_BACKENDS=backend-assets/grpc/langchain-huggingface backend-assets/grpc/falcon-ggml backend-assets/grpc/bert-embeddings backend-assets/grpc/llama backend-assets/grpc/llama-cpp backend-assets/grpc/llama-stable backend-assets/grpc/gpt4all backend-assets/grpc/dolly backend-assets/grpc/gpt2 backend-assets/grpc/gptj backend-assets/grpc/gptneox backend-assets/grpc/mpt backend-assets/grpc/replit backend-assets/grpc/starcoder backend-assets/grpc/rwkv backend-assets/grpc/whisper $(OPTIONAL_GRPC)
127+
ALL_GRPC_BACKENDS=backend-assets/grpc/langchain-huggingface backend-assets/grpc/falcon-ggml backend-assets/grpc/bert-embeddings backend-assets/grpc/llama backend-assets/grpc/llama-cpp backend-assets/grpc/llama-ggml backend-assets/grpc/gpt4all backend-assets/grpc/dolly backend-assets/grpc/gpt2 backend-assets/grpc/gptj backend-assets/grpc/gptneox backend-assets/grpc/mpt backend-assets/grpc/replit backend-assets/grpc/starcoder backend-assets/grpc/rwkv backend-assets/grpc/whisper $(OPTIONAL_GRPC)
128128
GRPC_BACKENDS?=$(ALL_GRPC_BACKENDS) $(OPTIONAL_GRPC)
129129

130130
# If empty, then we build all
@@ -203,20 +203,20 @@ go-llama:
203203
git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama
204204
cd go-llama && git checkout -b build $(GOLLAMA_VERSION) && git submodule update --init --recursive --depth 1
205205

206-
go-llama-stable:
207-
git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama-stable
208-
cd go-llama-stable && git checkout -b build $(GOLLAMA_STABLE_VERSION) && git submodule update --init --recursive --depth 1
206+
go-llama-ggml:
207+
git clone --recurse-submodules https://github.com/go-skynet/go-llama.cpp go-llama-ggml
208+
cd go-llama-ggml && git checkout -b build $(GOLLAMA_STABLE_VERSION) && git submodule update --init --recursive --depth 1
209209

210210
go-llama/libbinding.a: go-llama
211211
$(MAKE) -C go-llama BUILD_TYPE=$(BUILD_TYPE) libbinding.a
212212

213-
go-llama-stable/libbinding.a: go-llama-stable
214-
$(MAKE) -C go-llama-stable BUILD_TYPE=$(STABLE_BUILD_TYPE) libbinding.a
213+
go-llama-ggml/libbinding.a: go-llama-ggml
214+
$(MAKE) -C go-llama-ggml BUILD_TYPE=$(STABLE_BUILD_TYPE) libbinding.a
215215

216216
go-piper/libpiper_binding.a: go-piper
217217
$(MAKE) -C go-piper libpiper_binding.a example/main
218218

219-
get-sources: go-llama go-llama-stable go-ggml-transformers gpt4all go-piper go-rwkv whisper.cpp go-bert go-stable-diffusion
219+
get-sources: go-llama go-llama-ggml go-ggml-transformers gpt4all go-piper go-rwkv whisper.cpp go-bert go-stable-diffusion
220220
touch $@
221221

222222
replace:
@@ -235,7 +235,7 @@ prepare-sources: get-sources replace
235235
rebuild: ## Rebuilds the project
236236
$(GOCMD) clean -cache
237237
$(MAKE) -C go-llama clean
238-
$(MAKE) -C go-llama-stable clean
238+
$(MAKE) -C go-llama-ggml clean
239239
$(MAKE) -C gpt4all/gpt4all-bindings/golang/ clean
240240
$(MAKE) -C go-ggml-transformers clean
241241
$(MAKE) -C go-rwkv clean
@@ -253,7 +253,7 @@ clean: ## Remove build related file
253253
rm -f prepare
254254
rm -rf ./go-llama
255255
rm -rf ./gpt4all
256-
rm -rf ./go-llama-stable
256+
rm -rf ./go-llama-ggml
257257
rm -rf ./go-gpt2
258258
rm -rf ./go-stable-diffusion
259259
rm -rf ./go-ggml-transformers
@@ -440,10 +440,10 @@ ifeq ($(BUILD_TYPE),metal)
440440
cp backend/cpp/llama/llama.cpp/build/bin/ggml-metal.metal backend-assets/grpc/
441441
endif
442442

443-
backend-assets/grpc/llama-stable: backend-assets/grpc go-llama-stable/libbinding.a
444-
$(GOCMD) mod edit -replace github.com/go-skynet/go-llama.cpp=$(shell pwd)/go-llama-stable
445-
CGO_LDFLAGS="$(CGO_LDFLAGS)" C_INCLUDE_PATH=$(shell pwd)/go-llama-stable LIBRARY_PATH=$(shell pwd)/go-llama \
446-
$(GOCMD) build -ldflags "$(LD_FLAGS)" -tags "$(GO_TAGS)" -o backend-assets/grpc/llama-stable ./backend/go/llm/llama-stable/
443+
backend-assets/grpc/llama-ggml: backend-assets/grpc go-llama-ggml/libbinding.a
444+
$(GOCMD) mod edit -replace github.com/go-skynet/go-llama.cpp=$(shell pwd)/go-llama-ggml
445+
CGO_LDFLAGS="$(CGO_LDFLAGS)" C_INCLUDE_PATH=$(shell pwd)/go-llama-ggml LIBRARY_PATH=$(shell pwd)/go-llama \
446+
$(GOCMD) build -ldflags "$(LD_FLAGS)" -tags "$(GO_TAGS)" -o backend-assets/grpc/llama-ggml ./backend/go/llm/llama-ggml/
447447

448448
backend-assets/grpc/gpt4all: backend-assets/grpc backend-assets/gpt4all gpt4all/gpt4all-bindings/golang/libgpt4all.a
449449
CGO_LDFLAGS="$(CGO_LDFLAGS)" C_INCLUDE_PATH=$(shell pwd)/gpt4all/gpt4all-bindings/golang/ LIBRARY_PATH=$(shell pwd)/gpt4all/gpt4all-bindings/golang/ \

api/api_test.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -301,7 +301,7 @@ var _ = Describe("API test", func() {
301301
response := postModelApplyRequest("http://127.0.0.1:9090/models/apply", modelApplyRequest{
302302
URL: "github:go-skynet/model-gallery/openllama_3b.yaml",
303303
Name: "openllama_3b",
304-
Overrides: map[string]interface{}{"backend": "llama-stable", "mmap": true, "f16": true, "context_size": 128},
304+
Overrides: map[string]interface{}{"backend": "llama-ggml", "mmap": true, "f16": true, "context_size": 128},
305305
})
306306

307307
Expect(response["uuid"]).ToNot(BeEmpty(), fmt.Sprint(response))

pkg/model/initializers.go

+4-4
Original file line numberDiff line numberDiff line change
@@ -15,8 +15,8 @@ import (
1515
)
1616

1717
const (
18-
LlamaBackend = "llama"
19-
LlamaStableBackend = "llama-stable"
18+
GoLlamaBackend = "llama"
19+
LlamaGGML = "llama-ggml"
2020
LLamaCPP = "llama-cpp"
2121
StarcoderBackend = "starcoder"
2222
GPTJBackend = "gptj"
@@ -41,8 +41,8 @@ const (
4141

4242
var AutoLoadBackends []string = []string{
4343
LLamaCPP,
44-
LlamaStableBackend,
45-
LlamaBackend,
44+
LlamaGGML,
45+
GoLlamaBackend,
4646
Gpt4All,
4747
GPTNeoXBackend,
4848
BertEmbeddingsBackend,

0 commit comments

Comments
 (0)