* [gentoo-commits] repo/proj/guru:master commit in: dev-ml/ollama/files/, dev-ml/ollama/
@ 2024-06-19 7:15 Florian Schmaus
0 siblings, 0 replies; only message in thread
From: Florian Schmaus @ 2024-06-19 7:15 UTC (permalink / raw
To: gentoo-commits
commit: 7a33665aae8f672c7013b6e1939eece68f77b3af
Author: Vitaly Zdanevich <zdanevich.vitaly <AT> ya <DOT> ru>
AuthorDate: Tue Jun 18 22:49:59 2024 +0000
Commit: Florian Schmaus <flow <AT> gentoo <DOT> org>
CommitDate: Tue Jun 18 22:49:59 2024 +0000
URL: https://gitweb.gentoo.org/repo/proj/guru.git/commit/?id=7a33665a
dev-ml/ollama: new package
From https://gpo.zugaina.org/dev-ml/ollama
Signed-off-by: Vitaly Zdanevich <zdanevich.vitaly <AT> ya.ru>
dev-ml/ollama/files/ollama | 33 +++++++++++++++++++++++
dev-ml/ollama/metadata.xml | 15 +++++++++++
dev-ml/ollama/ollama-9999.ebuild | 56 ++++++++++++++++++++++++++++++++++++++++
3 files changed, 104 insertions(+)
diff --git a/dev-ml/ollama/files/ollama b/dev-ml/ollama/files/ollama
new file mode 100644
index 000000000..ea8442e32
--- /dev/null
+++ b/dev-ml/ollama/files/ollama
@@ -0,0 +1,33 @@
+#!/sbin/openrc-run
+
+description="Ollama Service"
+command="/usr/bin/ollama"
+command_args="serve"
+command_user="root"
+command_group="users"
+command_background="yes"
+pidfile="/run/ollama.pid"
+log="/var/log/ollama.log"
+
+# Environment variable pointing to models location
+export OLLAMA_MODELS="/usr/share/ollama/models"
+
+# Ollama allows cross-origin requests from 127.0.0.1 and 0.0.0.0 by default.
+# Additional origins can be configured with OLLAMA_ORIGINS.
+# export OLLAMA_ORIGINS="<ip>"
+
+start() {
+ ebegin "Starting $description"
+ exec >> >(logger -t "$RC_SVCNAME Start daemon" -p daemon.info)
+ start-stop-daemon --start --background --user "$command_user" --group "$command_group" \
+ --pidfile "$pidfile" --make-pidfile --exec $command $command_args -1 $log -2 $log
+ eend $?
+}
+
+stop() {
+ ebegin "Stopping $description"
+ exec >> >(logger -t "$RC_SVCNAME Stop daemon" -p daemon.info)
+ start-stop-daemon --stop --pidfile "$pidfile"
+ eend $?
+}
+
diff --git a/dev-ml/ollama/metadata.xml b/dev-ml/ollama/metadata.xml
new file mode 100644
index 000000000..64c220557
--- /dev/null
+++ b/dev-ml/ollama/metadata.xml
@@ -0,0 +1,15 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!DOCTYPE pkgmetadata SYSTEM "https://www.gentoo.org/dtd/metadata.dtd">
+<pkgmetadata>
+ <maintainer type="person" proxied="yes">
+ <email>zdanevich.vitaly@ya.ru</email>
+ <name>Vitaly Zdanevich</name>
+ </maintainer>
+ <use>
+ <flag name="nvidia">Add support of nvidia</flag>
+ <flag name="amd">Add support of amd</flag>
+ </use>
+ <upstream>
+ <remote-id type="github">nulloy/nulloy</remote-id>
+ </upstream>
+</pkgmetadata>
diff --git a/dev-ml/ollama/ollama-9999.ebuild b/dev-ml/ollama/ollama-9999.ebuild
new file mode 100644
index 000000000..5e58c7a4f
--- /dev/null
+++ b/dev-ml/ollama/ollama-9999.ebuild
@@ -0,0 +1,56 @@
+# Copyright 2024 Gentoo Authors
+# Distributed under the terms of the GNU General Public License v2
+
+EAPI=8
+
+inherit git-r3 go-module
+
+DESCRIPTION="Get up and running with Llama 3, Mistral, Gemma, and other language models."
+HOMEPAGE="https://ollama.com"
+EGIT_REPO_URI="https://github.com/ollama/ollama.git"
+LICENSE="MIT"
+SLOT="0"
+
+IUSE="nvidia amd"
+
+BDEPEND="
+ >=dev-lang/go-1.21.0
+ >=dev-build/cmake-3.24
+ >=sys-devel/gcc-11.4.0
+ nvidia? ( dev-util/nvidia-cuda-toolkit )
+ amd? (
+ sci-libs/clblast
+ dev-libs/rocm-opencl-runtime
+ )
+"
+
+DEPEND="${BDEPEND}"
+
+pkg_pretend() {
+ if use amd; then
+ ewarn "WARNING: AMD & Nvidia support in this ebuild are experimental"
+ einfo "If you run into issues, especially compiling dev-libs/rocm-opencl-runtime"
+ einfo "you may try the docker image here https://github.com/ROCm/ROCm-docker"
+ einfo "and follow instructions here"
+ einfo "https://rocm.docs.amd.com/projects/install-on-linux/en/latest/how-to/docker.html"
+ fi
+}
+
+src_unpack() {
+ git-r3_src_unpack
+ go-module_live_vendor
+}
+
+src_compile() {
+ ego generate ./...
+ ego build .
+}
+
+src_install() {
+ dobin ollama
+ doinitd "${FILESDIR}"/ollama
+}
+
+pkg_postinst() {
+ chmod 0755 /etc/init.d/ollama || die
+}
^ permalink raw reply related [flat|nested] only message in thread
only message in thread, other threads:[~2024-06-19 7:15 UTC | newest]
Thread overview: (only message) (download: mbox.gz follow: Atom feed
-- links below jump to the message on this page --
2024-06-19 7:15 [gentoo-commits] repo/proj/guru:master commit in: dev-ml/ollama/files/, dev-ml/ollama/ Florian Schmaus
This is a public inbox, see mirroring instructions
for how to clone and mirror all data and code used for this inbox