Skip to content

Commit e4cf551

Browse files
authored
Merge pull request #428 from SouthernMethodistUniversity/update_ollama
Update ollama
2 parents 904060a + 359ebea commit e4cf551

7 files changed

Lines changed: 133 additions & 1 deletion

File tree

containers/ollama/build_container.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44

55
# TODO: MP should pull a CUDA enabled version instead?
66
# specify version
7-
VERSION="0.13.2"
7+
VERSION="0.17.7"
88
TAG=${VERSION}
99
IMAGE_NAME="ollama"
1010
MODULE_FOLDER="testing/ollama"
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
help([[
3+
Name: Ollama
4+
Version: 0.13.2
5+
Website: https://ollama.com/
6+
7+
Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+
]])
10+
whatis("Name: Ollama")
11+
whatis("Version: ollama:0.13.2")
12+
whatis("Category: Ollama")
13+
whatis("URL: https://hub.docker.com/r/ollama/ollama")
14+
whatis("Description: Provides access to Ollama through a container built with Apptainer")
15+
family("Ollama")
16+
17+
always_load('apptainer')
18+
local sif_file = '/hpc/m3/containers/ollama/ollama_0.13.2.sif'
19+
20+
setenv('CONTAINER_IMAGE', sif_file)
21+
source_sh("bash", "/hpc/m3/hpc_docs/utils/ollama_scripts/ollama.sh")
22+
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
help([[
3+
Name: Ollama
4+
Version: 0.15.1
5+
Website: https://ollama.com/
6+
7+
Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+
]])
10+
whatis("Name: Ollama")
11+
whatis("Version: ollama:0.15.1")
12+
whatis("Category: Ollama")
13+
whatis("URL: https://hub.docker.com/r/ollama/ollama")
14+
whatis("Description: Provides access to Ollama through a container built with Apptainer")
15+
family("Ollama")
16+
17+
always_load('apptainer')
18+
local sif_file = '/hpc/m3/containers/ollama/ollama_0.15.1.sif'
19+
20+
setenv('CONTAINER_IMAGE', sif_file)
21+
source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh")
22+
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
help([[
3+
Name: Ollama
4+
Version: 0.17.7
5+
Website: https://ollama.com/
6+
7+
Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+
]])
10+
whatis("Name: Ollama")
11+
whatis("Version: ollama:0.17.7")
12+
whatis("Category: Ollama")
13+
whatis("URL: https://hub.docker.com/r/ollama/ollama")
14+
whatis("Description: Provides access to Ollama through a container built with Apptainer")
15+
family("Ollama")
16+
17+
always_load('apptainer')
18+
local sif_file = '/hpc/m3/containers/ollama/ollama_0.17.7.sif'
19+
20+
setenv('CONTAINER_IMAGE', sif_file)
21+
source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh")
22+
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
help([[
3+
Name: Ollama
4+
Version: 0.13.2
5+
Website: https://ollama.com/
6+
7+
Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+
]])
10+
whatis("Name: Ollama")
11+
whatis("Version: ollama:0.13.2")
12+
whatis("Category: Ollama")
13+
whatis("URL: https://hub.docker.com/r/ollama/ollama")
14+
whatis("Description: Provides access to Ollama through a container built with Apptainer")
15+
family("Ollama")
16+
17+
always_load('apptainer')
18+
local sif_file = '/hpc/m3/containers/ollama/ollama_0.13.2.sif'
19+
20+
setenv('CONTAINER_IMAGE', sif_file)
21+
source_sh("bash", "/hpc/m3/hpc_docs/utils/ollama_scripts/ollama.sh")
22+
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
help([[
3+
Name: Ollama
4+
Version: 0.15.1
5+
Website: https://ollama.com/
6+
7+
Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+
]])
10+
whatis("Name: Ollama")
11+
whatis("Version: ollama:0.15.1")
12+
whatis("Category: Ollama")
13+
whatis("URL: https://hub.docker.com/r/ollama/ollama")
14+
whatis("Description: Provides access to Ollama through a container built with Apptainer")
15+
family("Ollama")
16+
17+
always_load('apptainer')
18+
local sif_file = '/hpc/m3/containers/ollama/ollama_0.15.1.sif'
19+
20+
setenv('CONTAINER_IMAGE', sif_file)
21+
source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh")
22+
Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
2+
help([[
3+
Name: Ollama
4+
Version: 0.17.7
5+
Website: https://ollama.com/
6+
7+
Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+
]])
10+
whatis("Name: Ollama")
11+
whatis("Version: ollama:0.17.7")
12+
whatis("Category: Ollama")
13+
whatis("URL: https://hub.docker.com/r/ollama/ollama")
14+
whatis("Description: Provides access to Ollama through a container built with Apptainer")
15+
family("Ollama")
16+
17+
always_load('apptainer')
18+
local sif_file = '/hpc/m3/containers/ollama/ollama_0.17.7.sif'
19+
20+
setenv('CONTAINER_IMAGE', sif_file)
21+
source_sh("bash", "/hpc/m3/apps/ollama/helper_scripts/ollama.sh")
22+

0 commit comments

Comments
 (0)