public inbox for gentoo-commits@lists.gentoo.org
 help / color / mirror / Atom feed
* [gentoo-commits] repo/proj/guru:dev commit in: sci-misc/llama-cpp/
@ 2025-01-27 11:52 Sergey Alirzaev
  0 siblings, 0 replies; 6+ messages in thread
From: Sergey Alirzaev @ 2025-01-27 11:52 UTC (permalink / raw
  To: gentoo-commits

commit:     e933540939add62cd4de6a464ddf73f912c423e6
Author:     Sergey Alirzaev <l29ah <AT> riseup <DOT> net>
AuthorDate: Mon Jan 27 11:52:32 2025 +0000
Commit:     Sergey Alirzaev <zl29ah <AT> gmail <DOT> com>
CommitDate: Mon Jan 27 11:52:32 2025 +0000
URL:        https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=e9335409

sci-misc/llama-cpp: new package, add 0_pre4564, 9999

Signed-off-by: Sergey Alirzaev <l29ah <AT> riseup.net>

 sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild | 43 +++++++++++++++++++++++++++
 sci-misc/llama-cpp/llama-cpp-9999.ebuild      | 43 +++++++++++++++++++++++++++
 2 files changed, 86 insertions(+)

diff --git a/sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild b/sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild
new file mode 100644
index 000000000..5d2ad01ea
--- /dev/null
+++ b/sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild
@@ -0,0 +1,43 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+inherit cmake llvm
+
+LLVM_MAX_SLOT=16
+
+EGIT_REPO_URI="https://github.com/ggerganov/llama.cpp.git"
+inherit git-r3
+
+if [[ "${PV}" != "9999" ]]; then
+	KEYWORDS="~amd64 ~arm64"
+	EGIT_COMMIT="b${PV#0_pre}"
+fi
+
+DESCRIPTION="Port of Facebook's LLaMA model in C/C++"
+HOMEPAGE="https://github.com/ggerganov/llama.cpp"
+
+LICENSE="MIT"
+SLOT="0"
+IUSE="cublas tests tools"
+CPU_FLAGS_X86=( avx avx2 f16c )
+
+DEPEND="
+	cublas? ( dev-util/nvidia-cuda-toolkit )"
+RDEPEND="${DEPEND}"
+BDEPEND="${DEPEND}"
+
+src_configure() {
+	local mycmakeargs=(
+		-DLLAMA_CUBLAS="$(usex cublas)"
+		-DLLAMA_BUILD_TESTS="$(usex tests)"
+		-DLLAMA_BUILD_SERVER=OFF
+		-DCMAKE_SKIP_BUILD_RPATH=ON
+		-DBUILD_NUMBER="1"
+	)
+	if use cublas ; then
+		addpredict /dev/nvidiactl
+	fi
+	cmake_src_configure
+}

diff --git a/sci-misc/llama-cpp/llama-cpp-9999.ebuild b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
new file mode 100644
index 000000000..5d2ad01ea
--- /dev/null
+++ b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
@@ -0,0 +1,43 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+inherit cmake llvm
+
+LLVM_MAX_SLOT=16
+
+EGIT_REPO_URI="https://github.com/ggerganov/llama.cpp.git"
+inherit git-r3
+
+if [[ "${PV}" != "9999" ]]; then
+	KEYWORDS="~amd64 ~arm64"
+	EGIT_COMMIT="b${PV#0_pre}"
+fi
+
+DESCRIPTION="Port of Facebook's LLaMA model in C/C++"
+HOMEPAGE="https://github.com/ggerganov/llama.cpp"
+
+LICENSE="MIT"
+SLOT="0"
+IUSE="cublas tests tools"
+CPU_FLAGS_X86=( avx avx2 f16c )
+
+DEPEND="
+	cublas? ( dev-util/nvidia-cuda-toolkit )"
+RDEPEND="${DEPEND}"
+BDEPEND="${DEPEND}"
+
+src_configure() {
+	local mycmakeargs=(
+		-DLLAMA_CUBLAS="$(usex cublas)"
+		-DLLAMA_BUILD_TESTS="$(usex tests)"
+		-DLLAMA_BUILD_SERVER=OFF
+		-DCMAKE_SKIP_BUILD_RPATH=ON
+		-DBUILD_NUMBER="1"
+	)
+	if use cublas ; then
+		addpredict /dev/nvidiactl
+	fi
+	cmake_src_configure
+}


^ permalink raw reply related	[flat|nested] 6+ messages in thread

* [gentoo-commits] repo/proj/guru:dev commit in: sci-misc/llama-cpp/
@ 2025-01-27 15:10 Takuya Wakazono
  0 siblings, 0 replies; 6+ messages in thread
From: Takuya Wakazono @ 2025-01-27 15:10 UTC (permalink / raw
  To: gentoo-commits

commit:     db9e23974f8408184fce018d6d95643883bb8dab
Author:     Takuya Wakazono <pastalian46 <AT> gmail <DOT> com>
AuthorDate: Mon Jan 27 15:06:01 2025 +0000
Commit:     Takuya Wakazono <pastalian46 <AT> gmail <DOT> com>
CommitDate: Mon Jan 27 15:10:33 2025 +0000
URL:        https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=db9e2397

Revert "sci-misc/llama-cpp: new package, add 0_pre4564, 9999"

This reverts commit e933540939add62cd4de6a464ddf73f912c423e6.

This ebuild has too many issues.
- missing manifest
- use of deprecated eclass
- broken dependency
- undescribed USE flags
- visible keyword for vcs version

Please get advice on a GitHub PR or IRC before committing

Signed-off-by: Takuya Wakazono <pastalian46 <AT> gmail.com>

 sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild | 43 ---------------------------
 sci-misc/llama-cpp/llama-cpp-9999.ebuild      | 43 ---------------------------
 2 files changed, 86 deletions(-)

diff --git a/sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild b/sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild
deleted file mode 100644
index 5d2ad01ea..000000000
--- a/sci-misc/llama-cpp/llama-cpp-0_pre4564.ebuild
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2023 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=8
-
-inherit cmake llvm
-
-LLVM_MAX_SLOT=16
-
-EGIT_REPO_URI="https://github.com/ggerganov/llama.cpp.git"
-inherit git-r3
-
-if [[ "${PV}" != "9999" ]]; then
-	KEYWORDS="~amd64 ~arm64"
-	EGIT_COMMIT="b${PV#0_pre}"
-fi
-
-DESCRIPTION="Port of Facebook's LLaMA model in C/C++"
-HOMEPAGE="https://github.com/ggerganov/llama.cpp"
-
-LICENSE="MIT"
-SLOT="0"
-IUSE="cublas tests tools"
-CPU_FLAGS_X86=( avx avx2 f16c )
-
-DEPEND="
-	cublas? ( dev-util/nvidia-cuda-toolkit )"
-RDEPEND="${DEPEND}"
-BDEPEND="${DEPEND}"
-
-src_configure() {
-	local mycmakeargs=(
-		-DLLAMA_CUBLAS="$(usex cublas)"
-		-DLLAMA_BUILD_TESTS="$(usex tests)"
-		-DLLAMA_BUILD_SERVER=OFF
-		-DCMAKE_SKIP_BUILD_RPATH=ON
-		-DBUILD_NUMBER="1"
-	)
-	if use cublas ; then
-		addpredict /dev/nvidiactl
-	fi
-	cmake_src_configure
-}

diff --git a/sci-misc/llama-cpp/llama-cpp-9999.ebuild b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
deleted file mode 100644
index 5d2ad01ea..000000000
--- a/sci-misc/llama-cpp/llama-cpp-9999.ebuild
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2023 Gentoo Authors
-# Distributed under the terms of the GNU General Public License v2
-
-EAPI=8
-
-inherit cmake llvm
-
-LLVM_MAX_SLOT=16
-
-EGIT_REPO_URI="https://github.com/ggerganov/llama.cpp.git"
-inherit git-r3
-
-if [[ "${PV}" != "9999" ]]; then
-	KEYWORDS="~amd64 ~arm64"
-	EGIT_COMMIT="b${PV#0_pre}"
-fi
-
-DESCRIPTION="Port of Facebook's LLaMA model in C/C++"
-HOMEPAGE="https://github.com/ggerganov/llama.cpp"
-
-LICENSE="MIT"
-SLOT="0"
-IUSE="cublas tests tools"
-CPU_FLAGS_X86=( avx avx2 f16c )
-
-DEPEND="
-	cublas? ( dev-util/nvidia-cuda-toolkit )"
-RDEPEND="${DEPEND}"
-BDEPEND="${DEPEND}"
-
-src_configure() {
-	local mycmakeargs=(
-		-DLLAMA_CUBLAS="$(usex cublas)"
-		-DLLAMA_BUILD_TESTS="$(usex tests)"
-		-DLLAMA_BUILD_SERVER=OFF
-		-DCMAKE_SKIP_BUILD_RPATH=ON
-		-DBUILD_NUMBER="1"
-	)
-	if use cublas ; then
-		addpredict /dev/nvidiactl
-	fi
-	cmake_src_configure
-}


^ permalink raw reply related	[flat|nested] 6+ messages in thread

* [gentoo-commits] repo/proj/guru:dev commit in: sci-misc/llama-cpp/
@ 2025-01-29  0:11 Sergey Alirzaev
  0 siblings, 0 replies; 6+ messages in thread
From: Sergey Alirzaev @ 2025-01-29  0:11 UTC (permalink / raw
  To: gentoo-commits

commit:     ab2a79180fb2b2d9bbe7531d0280d7099c773937
Author:     Sergey Alirzaev <l29ah <AT> riseup <DOT> net>
AuthorDate: Wed Jan 29 00:11:11 2025 +0000
Commit:     Sergey Alirzaev <zl29ah <AT> gmail <DOT> com>
CommitDate: Wed Jan 29 00:11:11 2025 +0000
URL:        https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=ab2a7918

sci-misc/llama-cpp: new package, add 0_pre4576, 9999

Signed-off-by: Sergey Alirzaev <l29ah <AT> riseup.net>

 sci-misc/llama-cpp/Manifest                   |  1 +
 sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild | 33 +++++++++++++++++++++++++++
 sci-misc/llama-cpp/llama-cpp-9999.ebuild      | 33 +++++++++++++++++++++++++++
 sci-misc/llama-cpp/metadata.xml               | 11 +++++++++
 4 files changed, 78 insertions(+)

diff --git a/sci-misc/llama-cpp/Manifest b/sci-misc/llama-cpp/Manifest
new file mode 100644
index 000000000..b7c2a37bb
--- /dev/null
+++ b/sci-misc/llama-cpp/Manifest
@@ -0,0 +1 @@
+DIST llama-cpp-0_pre4576.tar.gz 20506059 BLAKE2B 8f011811e4df1f8d0c26b19f96a709980e078dc7e769b33cbbb03a852a29b489f80c8a1e298fecea53997068f6b7897e4536ba5db289aa445a1a6f16f98adce3 SHA512 21150721524283454ab53e370fdaf4e766f89fbb8d4b43072b10657d8c8b686630616cddbae7954147a2ba0360ad20c4643761f3774481e13a7b180812935c4e

diff --git a/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
new file mode 100644
index 000000000..18a0a72d9
--- /dev/null
+++ b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
@@ -0,0 +1,33 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+inherit cmake
+
+if [[ "${PV}" != "9999" ]]; then
+	KEYWORDS="~amd64"
+	MY_PV="b${PV#0_pre}"
+	S="${WORKDIR}/llama.cpp-${MY_PV}"
+	SRC_URI="https://github.com/ggerganov/llama.cpp/archive/refs/tags/${MY_PV}.tar.gz -> ${P}.tar.gz"
+else
+	inherit git-r3
+	EGIT_REPO_URI="https://github.com/ggerganov/llama.cpp.git"
+fi
+
+DESCRIPTION="Port of Facebook's LLaMA model in C/C++"
+HOMEPAGE="https://github.com/ggerganov/llama.cpp"
+
+LICENSE="MIT"
+SLOT="0"
+CPU_FLAGS_X86=( avx avx2 f16c )
+
+src_configure() {
+	local mycmakeargs=(
+		-DLLAMA_BUILD_TESTS=OFF
+		-DLLAMA_BUILD_SERVER=ON
+		-DCMAKE_SKIP_BUILD_RPATH=ON
+		-DBUILD_NUMBER="1"
+	)
+	cmake_src_configure
+}

diff --git a/sci-misc/llama-cpp/llama-cpp-9999.ebuild b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
new file mode 100644
index 000000000..18a0a72d9
--- /dev/null
+++ b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
@@ -0,0 +1,33 @@
+# Copyright 2023 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+inherit cmake
+
+if [[ "${PV}" != "9999" ]]; then
+	KEYWORDS="~amd64"
+	MY_PV="b${PV#0_pre}"
+	S="${WORKDIR}/llama.cpp-${MY_PV}"
+	SRC_URI="https://github.com/ggerganov/llama.cpp/archive/refs/tags/${MY_PV}.tar.gz -> ${P}.tar.gz"
+else
+	inherit git-r3
+	EGIT_REPO_URI="https://github.com/ggerganov/llama.cpp.git"
+fi
+
+DESCRIPTION="Port of Facebook's LLaMA model in C/C++"
+HOMEPAGE="https://github.com/ggerganov/llama.cpp"
+
+LICENSE="MIT"
+SLOT="0"
+CPU_FLAGS_X86=( avx avx2 f16c )
+
+src_configure() {
+	local mycmakeargs=(
+		-DLLAMA_BUILD_TESTS=OFF
+		-DLLAMA_BUILD_SERVER=ON
+		-DCMAKE_SKIP_BUILD_RPATH=ON
+		-DBUILD_NUMBER="1"
+	)
+	cmake_src_configure
+}

diff --git a/sci-misc/llama-cpp/metadata.xml b/sci-misc/llama-cpp/metadata.xml
new file mode 100644
index 000000000..af5a5e03e
--- /dev/null
+++ b/sci-misc/llama-cpp/metadata.xml
@@ -0,0 +1,11 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE pkgmetadata SYSTEM "https://www.gentoo.org/dtd/metadata.dtd">
+<pkgmetadata>
+	<upstream>
+		<remote-id type="github">ggerganov/llama.cpp</remote-id>
+	</upstream>
+	<maintainer type="person">
+		<email>l29ah@riseup.net</email>
+		<name>Sergey Alirzaev</name>
+	</maintainer>
+</pkgmetadata>


^ permalink raw reply related	[flat|nested] 6+ messages in thread

* [gentoo-commits] repo/proj/guru:dev commit in: sci-misc/llama-cpp/
@ 2025-01-29  0:14 Sergey Alirzaev
  0 siblings, 0 replies; 6+ messages in thread
From: Sergey Alirzaev @ 2025-01-29  0:14 UTC (permalink / raw
  To: gentoo-commits

commit:     76de14abfe565eb3b06b1925f1995436f1dc322d
Author:     Sergey Alirzaev <l29ah <AT> riseup <DOT> net>
AuthorDate: Wed Jan 29 00:14:22 2025 +0000
Commit:     Sergey Alirzaev <zl29ah <AT> gmail <DOT> com>
CommitDate: Wed Jan 29 00:14:22 2025 +0000
URL:        https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=76de14ab

sci-misc/llama-cpp: hopefully satisfy email-checker.py

Signed-off-by: Sergey Alirzaev <l29ah <AT> riseup.net>

 sci-misc/llama-cpp/metadata.xml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/sci-misc/llama-cpp/metadata.xml b/sci-misc/llama-cpp/metadata.xml
index af5a5e03e..53f72119c 100644
--- a/sci-misc/llama-cpp/metadata.xml
+++ b/sci-misc/llama-cpp/metadata.xml
@@ -5,7 +5,7 @@
 		<remote-id type="github">ggerganov/llama.cpp</remote-id>
 	</upstream>
 	<maintainer type="person">
-		<email>l29ah@riseup.net</email>
+		<email>zl29ah@gmail.com</email>
 		<name>Sergey Alirzaev</name>
 	</maintainer>
 </pkgmetadata>


^ permalink raw reply related	[flat|nested] 6+ messages in thread

* [gentoo-commits] repo/proj/guru:dev commit in: sci-misc/llama-cpp/
@ 2025-02-01 11:21 Sergey Alirzaev
  0 siblings, 0 replies; 6+ messages in thread
From: Sergey Alirzaev @ 2025-02-01 11:21 UTC (permalink / raw
  To: gentoo-commits

commit:     b228272c6c4d9a2d51f473a6357c7341c1531396
Author:     Sergey Alirzaev <l29ah <AT> riseup <DOT> net>
AuthorDate: Sat Feb  1 11:19:51 2025 +0000
Commit:     Sergey Alirzaev <zl29ah <AT> gmail <DOT> com>
CommitDate: Sat Feb  1 11:19:51 2025 +0000
URL:        https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=b228272c

sci-misc/llama-cpp: strip march=native

Closes: https://bugs.gentoo.org/949130
Signed-off-by: Sergey Alirzaev <l29ah <AT> riseup.net>

 sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild | 1 +
 sci-misc/llama-cpp/llama-cpp-9999.ebuild      | 1 +
 2 files changed, 2 insertions(+)

diff --git a/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
index 18a0a72d9..39210dddf 100644
--- a/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
+++ b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
@@ -27,6 +27,7 @@ src_configure() {
 		-DLLAMA_BUILD_TESTS=OFF
 		-DLLAMA_BUILD_SERVER=ON
 		-DCMAKE_SKIP_BUILD_RPATH=ON
+		-DGGML_NATIVE=0	# don't set march
 		-DBUILD_NUMBER="1"
 	)
 	cmake_src_configure

diff --git a/sci-misc/llama-cpp/llama-cpp-9999.ebuild b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
index 18a0a72d9..39210dddf 100644
--- a/sci-misc/llama-cpp/llama-cpp-9999.ebuild
+++ b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
@@ -27,6 +27,7 @@ src_configure() {
 		-DLLAMA_BUILD_TESTS=OFF
 		-DLLAMA_BUILD_SERVER=ON
 		-DCMAKE_SKIP_BUILD_RPATH=ON
+		-DGGML_NATIVE=0	# don't set march
 		-DBUILD_NUMBER="1"
 	)
 	cmake_src_configure


^ permalink raw reply related	[flat|nested] 6+ messages in thread

* [gentoo-commits] repo/proj/guru:dev commit in: sci-misc/llama-cpp/
@ 2025-02-08 16:59 Alexey Korepanov
  0 siblings, 0 replies; 6+ messages in thread
From: Alexey Korepanov @ 2025-02-08 16:59 UTC (permalink / raw
  To: gentoo-commits

commit:     fb105c916a2ee882686351f01f149e71594261cf
Author:     Alexey Korepanov <kaikaikai <AT> yandex <DOT> ru>
AuthorDate: Sat Feb  8 16:49:15 2025 +0000
Commit:     Alexey Korepanov <kaikaikai <AT> yandex <DOT> ru>
CommitDate: Sat Feb  8 16:49:15 2025 +0000
URL:        https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=fb105c91

sci-misc/llama-cpp: add dependencies on curl and numpy

Signed-off-by: Alexey Korepanov <kaikaikai <AT> yandex.ru>

 sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild | 9 +++++++++
 sci-misc/llama-cpp/llama-cpp-9999.ebuild      | 9 +++++++++
 2 files changed, 18 insertions(+)

diff --git a/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
index 39210dddf..8b9c1438d 100644
--- a/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
+++ b/sci-misc/llama-cpp/llama-cpp-0_pre4576.ebuild
@@ -21,6 +21,14 @@ HOMEPAGE="https://github.com/ggerganov/llama.cpp"
 LICENSE="MIT"
 SLOT="0"
 CPU_FLAGS_X86=( avx avx2 f16c )
+IUSE="curl"
+
+# curl is needed for pulling models from huggingface
+# numpy is used by convert_hf_to_gguf.py
+DEPEND="curl? ( net-misc/curl:= )"
+RDEPEND="${DEPEND}
+	dev-python/numpy
+"
 
 src_configure() {
 	local mycmakeargs=(
@@ -28,6 +36,7 @@ src_configure() {
 		-DLLAMA_BUILD_SERVER=ON
 		-DCMAKE_SKIP_BUILD_RPATH=ON
 		-DGGML_NATIVE=0	# don't set march
+		-DLLAMA_CURL=$(usex curl ON OFF)
 		-DBUILD_NUMBER="1"
 	)
 	cmake_src_configure

diff --git a/sci-misc/llama-cpp/llama-cpp-9999.ebuild b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
index 39210dddf..8b9c1438d 100644
--- a/sci-misc/llama-cpp/llama-cpp-9999.ebuild
+++ b/sci-misc/llama-cpp/llama-cpp-9999.ebuild
@@ -21,6 +21,14 @@ HOMEPAGE="https://github.com/ggerganov/llama.cpp"
 LICENSE="MIT"
 SLOT="0"
 CPU_FLAGS_X86=( avx avx2 f16c )
+IUSE="curl"
+
+# curl is needed for pulling models from huggingface
+# numpy is used by convert_hf_to_gguf.py
+DEPEND="curl? ( net-misc/curl:= )"
+RDEPEND="${DEPEND}
+	dev-python/numpy
+"
 
 src_configure() {
 	local mycmakeargs=(
@@ -28,6 +36,7 @@ src_configure() {
 		-DLLAMA_BUILD_SERVER=ON
 		-DCMAKE_SKIP_BUILD_RPATH=ON
 		-DGGML_NATIVE=0	# don't set march
+		-DLLAMA_CURL=$(usex curl ON OFF)
 		-DBUILD_NUMBER="1"
 	)
 	cmake_src_configure


^ permalink raw reply related	[flat|nested] 6+ messages in thread

end of thread, other threads:[~2025-02-08 16:59 UTC | newest]

Thread overview: 6+ messages (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2025-02-08 16:59 [gentoo-commits] repo/proj/guru:dev commit in: sci-misc/llama-cpp/ Alexey Korepanov
  -- strict thread matches above, loose matches on Subject: below --
2025-02-01 11:21 Sergey Alirzaev
2025-01-29  0:14 Sergey Alirzaev
2025-01-29  0:11 Sergey Alirzaev
2025-01-27 15:10 Takuya Wakazono
2025-01-27 11:52 Sergey Alirzaev

This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox