gnu: llama-cpp: Update to 0.0.0-2.fed0108.
As last update was in February and new changes for new models and memory leaks fixes since then. * gnu/packages/machine-learning.scm (llama-cpp): Update to 0.0.0-2.fed0108. [arguments]<#:phases>: Add 'disable-unrunable-tests' phase. Change replacement of 'install to be a post-install 'install-main' phase so libraries and header files are normally installed. Change-Id: Ia6532945f588747c4d58fdb7611d6a9c3c596705 Signed-off-by: Christopher Baines <mail@cbaines.net>master
parent
cc212af83e
commit
0b3f2f28c6
|
@ -524,8 +524,8 @@ Performance is achieved by using the LLVM JIT compiler.")
|
||||||
(deprecated-package "guile-aiscm-next" guile-aiscm))
|
(deprecated-package "guile-aiscm-next" guile-aiscm))
|
||||||
|
|
||||||
(define-public llama-cpp
|
(define-public llama-cpp
|
||||||
(let ((commit "03bf161eb6dea6400ee49c6dc6b69bdcfa9fd3fc")
|
(let ((commit "fed0108491a3a3cbec6c6480dc8667ffff9d7659")
|
||||||
(revision "1"))
|
(revision "2"))
|
||||||
(package
|
(package
|
||||||
(name "llama-cpp")
|
(name "llama-cpp")
|
||||||
(version (git-version "0.0.0" revision commit))
|
(version (git-version "0.0.0" revision commit))
|
||||||
|
@ -537,7 +537,7 @@ Performance is achieved by using the LLVM JIT compiler.")
|
||||||
(commit commit)))
|
(commit commit)))
|
||||||
(file-name (git-file-name name version))
|
(file-name (git-file-name name version))
|
||||||
(sha256
|
(sha256
|
||||||
(base32 "1ag1jash84hasz10h0piw72a8ginm8kzvhihbzzljz96gq2kjm88"))))
|
(base32 "16rm9gy0chd6k07crm8rkl2j3hg7y7h0km7k6c8q7bmm2jrd64la"))))
|
||||||
(build-system cmake-build-system)
|
(build-system cmake-build-system)
|
||||||
(arguments
|
(arguments
|
||||||
(list
|
(list
|
||||||
|
@ -559,6 +559,14 @@ Performance is achieved by using the LLVM JIT compiler.")
|
||||||
(guix build python-build-system))
|
(guix build python-build-system))
|
||||||
#:phases
|
#:phases
|
||||||
#~(modify-phases %standard-phases
|
#~(modify-phases %standard-phases
|
||||||
|
(add-after 'unpack 'disable-unrunable-tests
|
||||||
|
;; test-eval-callback downloads ML model from network, cannot
|
||||||
|
;; run in Guix build environment
|
||||||
|
(lambda _
|
||||||
|
(substitute* '("examples/eval-callback/CMakeLists.txt")
|
||||||
|
(("add_test") "#add_test"))
|
||||||
|
(substitute* '("examples/eval-callback/CMakeLists.txt")
|
||||||
|
(("set_property") "#set_property"))))
|
||||||
(add-before 'install 'install-python-scripts
|
(add-before 'install 'install-python-scripts
|
||||||
(lambda _
|
(lambda _
|
||||||
(let ((bin (string-append #$output "/bin/")))
|
(let ((bin (string-append #$output "/bin/")))
|
||||||
|
@ -581,7 +589,7 @@ Performance is achieved by using the LLVM JIT compiler.")
|
||||||
(make-script "convert"))))
|
(make-script "convert"))))
|
||||||
(add-after 'install-python-scripts 'wrap-python-scripts
|
(add-after 'install-python-scripts 'wrap-python-scripts
|
||||||
(assoc-ref python:%standard-phases 'wrap))
|
(assoc-ref python:%standard-phases 'wrap))
|
||||||
(replace 'install
|
(add-after 'install 'install-main
|
||||||
(lambda _
|
(lambda _
|
||||||
(copy-file "bin/main" (string-append #$output "/bin/llama")))))))
|
(copy-file "bin/main" (string-append #$output "/bin/llama")))))))
|
||||||
(inputs (list python))
|
(inputs (list python))
|
||||||
|
|
Reference in New Issue