_____ _ _ ______ _____ _____ _____ ____ ___ | _ | | | | ___ \_ _/ ___/ __ \ / ___| / | | | | | | | __ _ _ __ ___ __ _ | |_/ / | | \ `--.| / \/_ _/ /___ / /| | | | | | | |/ _` | '_ ` _ \ / _` | | / | | `--. \ | \ \ / / ___ \/ /_| | \ \_/ / | | (_| | | | | | | (_| | | |\ \ _| |_/\__/ / \__/\\ V /| \_/ |\___ | \___/|_|_|\__,_|_| |_| |_|\__,_| \_| \_|\___/\____/ \____/ \_/ \_____/ |_/ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% # Building seems to be straight forward on the Ubuntu 24.04 LTS git clone https://github.com/ollama/ollama/ cd ollama make -j8 # Once build is finished run a server instance in one shell session ./ollama serve # Run the DeepSeek model # https://ollama.com/library/deepseek-r1 # Will download about 4.7 Gigs first ./ollama run deepseek-r1:7b # Dont expect super fast AI, but it works quite well