File tree Expand file tree Collapse file tree
m3/applications/testing/ollama Expand file tree Collapse file tree Original file line number Diff line number Diff line change 44
55# TODO: MP should pull a CUDA enabled version instead?
66# specify version
7- VERSION=" 0.13.2 "
7+ VERSION=" 0.17.7 "
88TAG=${VERSION}
99IMAGE_NAME=" ollama"
1010MODULE_FOLDER=" testing/ollama"
Original file line number Diff line number Diff line change 1+
2+ help ([[
3+ Name: Ollama
4+ Version: 0.13.2
5+ Website: https://ollama.com/
6+
7+ Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+ ]] )
10+ whatis (" Name: Ollama" )
11+ whatis (" Version: ollama:0.13.2" )
12+ whatis (" Category: Ollama" )
13+ whatis (" URL: https://hub.docker.com/r/ollama/ollama" )
14+ whatis (" Description: Provides access to Ollama through a container built with Apptainer" )
15+ family (" Ollama" )
16+
17+ always_load (' apptainer' )
18+ local sif_file = ' /hpc/m3/containers/ollama/ollama_0.13.2.sif'
19+
20+ setenv (' CONTAINER_IMAGE' , sif_file )
21+ source_sh (" bash" , " /hpc/m3/hpc_docs/utils/ollama_scripts/ollama.sh" )
22+
Original file line number Diff line number Diff line change 1+
2+ help ([[
3+ Name: Ollama
4+ Version: 0.15.1
5+ Website: https://ollama.com/
6+
7+ Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+ ]] )
10+ whatis (" Name: Ollama" )
11+ whatis (" Version: ollama:0.15.1" )
12+ whatis (" Category: Ollama" )
13+ whatis (" URL: https://hub.docker.com/r/ollama/ollama" )
14+ whatis (" Description: Provides access to Ollama through a container built with Apptainer" )
15+ family (" Ollama" )
16+
17+ always_load (' apptainer' )
18+ local sif_file = ' /hpc/m3/containers/ollama/ollama_0.15.1.sif'
19+
20+ setenv (' CONTAINER_IMAGE' , sif_file )
21+ source_sh (" bash" , " /hpc/m3/apps/ollama/helper_scripts/ollama.sh" )
22+
Original file line number Diff line number Diff line change 1+
2+ help ([[
3+ Name: Ollama
4+ Version: 0.17.7
5+ Website: https://ollama.com/
6+
7+ Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+ ]] )
10+ whatis (" Name: Ollama" )
11+ whatis (" Version: ollama:0.17.7" )
12+ whatis (" Category: Ollama" )
13+ whatis (" URL: https://hub.docker.com/r/ollama/ollama" )
14+ whatis (" Description: Provides access to Ollama through a container built with Apptainer" )
15+ family (" Ollama" )
16+
17+ always_load (' apptainer' )
18+ local sif_file = ' /hpc/m3/containers/ollama/ollama_0.17.7.sif'
19+
20+ setenv (' CONTAINER_IMAGE' , sif_file )
21+ source_sh (" bash" , " /hpc/m3/apps/ollama/helper_scripts/ollama.sh" )
22+
Original file line number Diff line number Diff line change 1+
2+ help ([[
3+ Name: Ollama
4+ Version: 0.13.2
5+ Website: https://ollama.com/
6+
7+ Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+ ]] )
10+ whatis (" Name: Ollama" )
11+ whatis (" Version: ollama:0.13.2" )
12+ whatis (" Category: Ollama" )
13+ whatis (" URL: https://hub.docker.com/r/ollama/ollama" )
14+ whatis (" Description: Provides access to Ollama through a container built with Apptainer" )
15+ family (" Ollama" )
16+
17+ always_load (' apptainer' )
18+ local sif_file = ' /hpc/m3/containers/ollama/ollama_0.13.2.sif'
19+
20+ setenv (' CONTAINER_IMAGE' , sif_file )
21+ source_sh (" bash" , " /hpc/m3/hpc_docs/utils/ollama_scripts/ollama.sh" )
22+
Original file line number Diff line number Diff line change 1+
2+ help ([[
3+ Name: Ollama
4+ Version: 0.15.1
5+ Website: https://ollama.com/
6+
7+ Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+ ]] )
10+ whatis (" Name: Ollama" )
11+ whatis (" Version: ollama:0.15.1" )
12+ whatis (" Category: Ollama" )
13+ whatis (" URL: https://hub.docker.com/r/ollama/ollama" )
14+ whatis (" Description: Provides access to Ollama through a container built with Apptainer" )
15+ family (" Ollama" )
16+
17+ always_load (' apptainer' )
18+ local sif_file = ' /hpc/m3/containers/ollama/ollama_0.15.1.sif'
19+
20+ setenv (' CONTAINER_IMAGE' , sif_file )
21+ source_sh (" bash" , " /hpc/m3/apps/ollama/helper_scripts/ollama.sh" )
22+
Original file line number Diff line number Diff line change 1+
2+ help ([[
3+ Name: Ollama
4+ Version: 0.17.7
5+ Website: https://ollama.com/
6+
7+ Ollama is an open-source tool that allows you to run large language models (LLMs) like Llama 3 and Mistral directly on your local machine
8+
9+ ]] )
10+ whatis (" Name: Ollama" )
11+ whatis (" Version: ollama:0.17.7" )
12+ whatis (" Category: Ollama" )
13+ whatis (" URL: https://hub.docker.com/r/ollama/ollama" )
14+ whatis (" Description: Provides access to Ollama through a container built with Apptainer" )
15+ family (" Ollama" )
16+
17+ always_load (' apptainer' )
18+ local sif_file = ' /hpc/m3/containers/ollama/ollama_0.17.7.sif'
19+
20+ setenv (' CONTAINER_IMAGE' , sif_file )
21+ source_sh (" bash" , " /hpc/m3/apps/ollama/helper_scripts/ollama.sh" )
22+
You can’t perform that action at this time.
0 commit comments