{ "cells": [ { "cell_type": "markdown", "metadata": { "colab_type": "text", "id": "view-in-github" }, "source": [] }, { "cell_type": "code", "execution_count": null, "metadata": { "cellView": "form", "id": "uJS9i_Dltv8Y" }, "outputs": [], "source": [ "#@title v-- Enter your model below and then click this to start Koboldcpp\r\n", "\r\n", "Model = \"https://huggingface.co/TheBloke/Airoboros-L2-13B-2.2-GGUF/resolve/main/airoboros-l2-13b-2.2.Q4_K_M.gguf\" #@param [\"\"]{allow-input: true}\r\n", "Layers = 43 #@param [43]{allow-input: true}\r\n", "\r\n", "%cd /content\r\n", "!git clone https://github.com/LostRuins/koboldcpp\r\n", "%cd /content/koboldcpp\r\n", "!make LLAMA_CUBLAS=1\r\n", "\r\n", "!wget $Model -O model.ggml\r\n", "!wget -c https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64\r\n", "!chmod +x cloudflared-linux-amd64\r\n", "!nohup ./cloudflared-linux-amd64 tunnel --url http://localhost:5001 &\r\n", "!sleep 10\r\n", "!cat nohup.out\r\n", "!python koboldcpp.py model.ggml --usecublas 0 mmq --gpulayers $Layers --hordeconfig concedo\r\n" ] } ], "metadata": { "accelerator": "GPU", "colab": { "authorship_tag": "", "gpuType": "T4", "include_colab_link": true, "private_outputs": true, "provenance": [] }, "kernelspec": { "display_name": "Python 3", "name": "python3" }, "language_info": { "name": "python" } }, "nbformat": 4, "nbformat_minor": 0 }