-
Notifications
You must be signed in to change notification settings - Fork 313
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
1 changed file
with
255 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,255 @@ | ||
{ | ||
"cells": [ | ||
{ | ||
"cell_type": "markdown", | ||
"id": "d465baef-080f-4cdb-b1c3-465e4e1ce305", | ||
"metadata": {}, | ||
"source": [ | ||
"# Running a local LLM with ollama\n", | ||
"\n", | ||
"[Ollama](https://ollama.com/) is probably the easiest way to run a LLM on your local machine.\n", | ||
"\n", | ||
"To run the code of this notebook on your machine, you will need [ollama-python](https://github.com/ollama/ollama-python).\n", | ||
"\n", | ||
"If you want to run this notebook on Google Colab, you can follow the instructions below." | ||
] | ||
}, | ||
{ | ||
"cell_type": "markdown", | ||
"id": "720847c4-1c9a-4ea9-8de5-2a0d198a4af8", | ||
"metadata": {}, | ||
"source": [ | ||
"## Instructions to run Ollama on Colab:\n", | ||
"- first install xterm\n", | ||
"```\n", | ||
"!pip install colab-xterm #https://pypi.org/project/colab-xterm/\n", | ||
"%load_ext colabxterm\n", | ||
"```\n", | ||
"- start a xterm and inside the terminal start an ollama server:\n", | ||
"```\n", | ||
"%xterm\n", | ||
"# curl https://ollama.ai/install.sh | sh\n", | ||
" # ollama serve & ollama pull mistral\n", | ||
"```\n", | ||
"- in a cell, check that you have your model\n", | ||
"```\n", | ||
"!ollama list\n", | ||
"```\n", | ||
"- in a cell, install ollama-python\n", | ||
"```\n", | ||
"!pip install ollama\n", | ||
"```" | ||
] | ||
}, | ||
{ | ||
"cell_type": "markdown", | ||
"id": "13e9b490-6c0a-4563-bbf7-03c2bb268449", | ||
"metadata": {}, | ||
"source": [ | ||
"## Check that everything is running fine" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "dd16d74c-ed5e-4826-952e-f1809e48fae6", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"from ollama import chat\n", | ||
"from ollama import ChatResponse\n", | ||
"from IPython import display" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "1f804638-4101-46d4-86eb-c58e4c005394", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"response: ChatResponse = chat(model='mistral', messages=[\n", | ||
" {\n", | ||
" 'role': 'user',\n", | ||
" 'content': 'Why is the sky blue?',\n", | ||
" },\n", | ||
"])\n", | ||
"\n", | ||
"print(response.message.content)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "markdown", | ||
"id": "0c81abfc-3a45-4d7a-ae0c-ad2f73f63783", | ||
"metadata": {}, | ||
"source": [ | ||
"## Choose your model (you will need to pull it before) and make a nice website!" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "fd80bb2c-d17f-4957-93ea-4abf89a1579f", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"model = 'mistral'" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "99eb2596-ad7a-4e12-9dab-957c4d2e3819", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"prompt_0 = \"Write a webpage for the course: Large Language Models : Introduction and Applications for Code given by Nathanaël FIJALKOW and Marc LELARGE in the Master MVA. I will pipe your output directly to a file, so just give me ```html\\n[page here]\\n```.\"" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "1e4c077a-fd5c-4f20-b1df-c33d506ec61c", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"response_0: ChatResponse = chat(model=model, messages=[\n", | ||
" {\n", | ||
" 'role': 'user',\n", | ||
" 'content': prompt_0,\n", | ||
" },\n", | ||
"])" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "ccef4c65-7df9-45dc-95eb-4fe001f46177", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"print(response_0.message.content)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "43f79f75-f0c9-40c6-8686-7aedde090f12", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"page = response_0.message.content.split(\"```\")[1].partition(\"\\n\")[2]" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "a729c73f-1b21-475f-b381-bd25fc2158c6", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"display.HTML(page)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "6ad3fa84-d37b-45fc-95e2-afaf8618fc5f", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"def parse_html(response):\n", | ||
" return response.message.content.split(\"```\")[1].partition(\"\\n\")[2]\n", | ||
"\n", | ||
"def write_html_file(file_name, content):\n", | ||
" try:\n", | ||
" with open(file_name, 'w') as file:\n", | ||
" file.write(content)\n", | ||
" print(f\"HTML file '{file_name}' created successfully.\")\n", | ||
" except Exception as e:\n", | ||
" print(f\"An error occurred while writing the HTML file: {e}\")" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "5446b7bf-67c7-4de2-a06e-543bc93af478", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"write_html_file('webpage_mistral_v0.html', page)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "60eddf60-b0ba-4f3c-a977-c916a9706e91", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"prompt_1 = \"Add more detail and better html and css. I will pipe your output directly to a file, so just give me ```html\\n[page here]\\n``` so that it fits in one file.\"" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "b3e07184-84ae-48c9-ac04-4c912bbeb484", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"response_1: ChatResponse = chat(model=model, messages=[\n", | ||
" {\n", | ||
" 'role': 'user',\n", | ||
" 'content': prompt_0,\n", | ||
" },\n", | ||
" response_0.message,\n", | ||
" {\n", | ||
" 'role': 'user',\n", | ||
" 'content': prompt_1\n", | ||
" },\n", | ||
"])" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "fe439346-e76e-4381-a7d5-df88472203c8", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"new_page = parse_html(response_1)" | ||
] | ||
}, | ||
{ | ||
"cell_type": "code", | ||
"execution_count": null, | ||
"id": "5360dbf9-7782-4b90-845e-ec759ca8a9ea", | ||
"metadata": {}, | ||
"outputs": [], | ||
"source": [ | ||
"write_html_file('webpage_mistral_v1.html', new_page)" | ||
] | ||
} | ||
], | ||
"metadata": { | ||
"kernelspec": { | ||
"display_name": "chess", | ||
"language": "python", | ||
"name": "chess" | ||
}, | ||
"language_info": { | ||
"codemirror_mode": { | ||
"name": "ipython", | ||
"version": 3 | ||
}, | ||
"file_extension": ".py", | ||
"mimetype": "text/x-python", | ||
"name": "python", | ||
"nbconvert_exporter": "python", | ||
"pygments_lexer": "ipython3", | ||
"version": "3.12.4" | ||
} | ||
}, | ||
"nbformat": 4, | ||
"nbformat_minor": 5 | ||
} |