srush HF staff commited on
Commit
f5ec828
1 Parent(s): 0e4ded8

Upload with huggingface_hub

Browse files
__pycache__/bash.cpython-310.pyc CHANGED
Binary files a/__pycache__/bash.cpython-310.pyc and b/__pycache__/bash.cpython-310.pyc differ
 
__pycache__/chat.cpython-310.pyc CHANGED
Binary files a/__pycache__/chat.cpython-310.pyc and b/__pycache__/chat.cpython-310.pyc differ
 
__pycache__/gatsby.cpython-310.pyc CHANGED
Binary files a/__pycache__/gatsby.cpython-310.pyc and b/__pycache__/gatsby.cpython-310.pyc differ
 
__pycache__/math_demo.cpython-310.pyc CHANGED
Binary files a/__pycache__/math_demo.cpython-310.pyc and b/__pycache__/math_demo.cpython-310.pyc differ
 
__pycache__/ner.cpython-310.pyc CHANGED
Binary files a/__pycache__/ner.cpython-310.pyc and b/__pycache__/ner.cpython-310.pyc differ
 
__pycache__/pal.cpython-310.pyc CHANGED
Binary files a/__pycache__/pal.cpython-310.pyc and b/__pycache__/pal.cpython-310.pyc differ
 
__pycache__/qa.cpython-310.pyc CHANGED
Binary files a/__pycache__/qa.cpython-310.pyc and b/__pycache__/qa.cpython-310.pyc differ
 
__pycache__/selfask.cpython-310.pyc ADDED
Binary file (1.92 kB). View file
 
__pycache__/stats.cpython-310.pyc CHANGED
Binary files a/__pycache__/stats.cpython-310.pyc and b/__pycache__/stats.cpython-310.pyc differ
 
app.ipynb ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "6d7e0fa0",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "6bc68605",
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "import gradio as gr\n",
22
+ "from chat import gradio as chat\n",
23
+ "from ner import gradio as ner\n",
24
+ "from math_demo import gradio as math_demo\n",
25
+ "from bash import gradio as bash\n",
26
+ "from pal import gradio as pal\n",
27
+ "from gatsby import gradio as gatsby\n",
28
+ "from qa import gradio as qa\n",
29
+ "from stats import gradio as stats\n",
30
+ "from selfask import gradio as selfask\n",
31
+ "from backtrack import gradio as backtrack"
32
+ ]
33
+ },
34
+ {
35
+ "cell_type": "code",
36
+ "execution_count": null,
37
+ "id": "a0a69443",
38
+ "metadata": {},
39
+ "outputs": [],
40
+ "source": [
41
+ "CSS = \"\"\"\n",
42
+ "#clean div.form {border: 0px} \n",
43
+ "#response {border: 0px; background: #ffeec6} \n",
44
+ "#prompt {border: 0px;background: aliceblue} \n",
45
+ "#json {border: 0px} \n",
46
+ "#result {border: 0px; background: #c5e0e5} \n",
47
+ "#inner {margin: 10px; padding: 10px; font-size: 20px; } \n",
48
+ "#inner textarea {border: 0px}\n",
49
+ "div.gradio-container {color: black}\n",
50
+ "span.head {font-size: 60pt; font-family: cursive;}\n",
51
+ "body {\n",
52
+ " --text-sm: 15px;\n",
53
+ " --text-md: 18px;\n",
54
+ " --text-lg: 20px;\n",
55
+ " --input-text-size: 20px;\n",
56
+ " --section-text-size: 20px;\n",
57
+ "}\n",
58
+ "\"\"\""
59
+ ]
60
+ },
61
+ {
62
+ "cell_type": "code",
63
+ "execution_count": null,
64
+ "id": "7eabd4da",
65
+ "metadata": {},
66
+ "outputs": [],
67
+ "source": []
68
+ },
69
+ {
70
+ "cell_type": "code",
71
+ "execution_count": null,
72
+ "id": "ea4326bb",
73
+ "metadata": {},
74
+ "outputs": [],
75
+ "source": [
76
+ "with gr.Blocks(css=CSS, theme=gr.themes.Monochrome()) as demo:\n",
77
+ " gr.HTML(\"<center style='background:#B6B7BA'> <span class='head'>Mini</span><img src='https://user-images.githubusercontent.com/35882/227017900-0cacdfb7-37e2-47b1-9347-a233810d3544.png' width='20%' style='display:inline'><span class='head'>Chain</span></center><center> <br><a href='https://github.com/srush/minichain'>[library]</a> </center>\")\n",
78
+ "\n",
79
+ " gr.TabbedInterface([math_demo, qa, chat, gatsby, ner, bash, pal, stats, selfask, backtrack],\n",
80
+ " [\"Math\", \"QA\", \"Chat\", \"Book\", \"NER\", \"Bash\", \"PAL\", \"Stats\", \"SelfAsk\", \"Backtrack\"],\n",
81
+ " css = CSS)"
82
+ ]
83
+ },
84
+ {
85
+ "cell_type": "code",
86
+ "execution_count": null,
87
+ "id": "a7c58fbd",
88
+ "metadata": {
89
+ "lines_to_next_cell": 2
90
+ },
91
+ "outputs": [],
92
+ "source": [
93
+ "demo.launch()"
94
+ ]
95
+ }
96
+ ],
97
+ "metadata": {
98
+ "jupytext": {
99
+ "cell_metadata_filter": "-all",
100
+ "main_language": "python",
101
+ "notebook_metadata_filter": "-all"
102
+ }
103
+ },
104
+ "nbformat": 4,
105
+ "nbformat_minor": 5
106
+ }
app.py CHANGED
@@ -8,6 +8,7 @@ from gatsby import gradio as gatsby
8
  from qa import gradio as qa
9
  from stats import gradio as stats
10
  from selfask import gradio as selfask
 
11
 
12
  CSS = """
13
  #clean div.form {border: 0px}
@@ -21,8 +22,8 @@ div.gradio-container {color: black}
21
  span.head {font-size: 60pt; font-family: cursive;}
22
  body {
23
  --text-sm: 15px;
24
- --text-md: 20px;
25
- --text-lg: 22px;
26
  --input-text-size: 20px;
27
  --section-text-size: 20px;
28
  }
@@ -31,10 +32,10 @@ body {
31
 
32
 
33
  with gr.Blocks(css=CSS, theme=gr.themes.Monochrome()) as demo:
34
- gr.HTML("<center style='background:#B6B7BA'> <span class='head'>Mini</span><img src='https://user-images.githubusercontent.com/35882/227017900-0cacdfb7-37e2-47b1-9347-a233810d3544.png' width='20%' style='display:inline'><span class='head'>Chain</span> <br><a href='https://github.com/srush/minichain'>[code]</a> <a href='https://user-images.githubusercontent.com/35882/218286642-67985b6f-d483-49be-825b-f62b72c469cd.png'>[docs]</a></center>")
35
 
36
- gr.TabbedInterface([math_demo, qa, chat, gatsby, ner, bash, pal, stats, selfask],
37
- ["Math", "QA", "Chat", "Book", "NER", "Bash", "PAL", "Stats", "SelfAsk"],
38
  css = CSS)
39
 
40
  demo.launch()
 
8
  from qa import gradio as qa
9
  from stats import gradio as stats
10
  from selfask import gradio as selfask
11
+ from backtrack import gradio as backtrack
12
 
13
  CSS = """
14
  #clean div.form {border: 0px}
 
22
  span.head {font-size: 60pt; font-family: cursive;}
23
  body {
24
  --text-sm: 15px;
25
+ --text-md: 18px;
26
+ --text-lg: 20px;
27
  --input-text-size: 20px;
28
  --section-text-size: 20px;
29
  }
 
32
 
33
 
34
  with gr.Blocks(css=CSS, theme=gr.themes.Monochrome()) as demo:
35
+ gr.HTML("<center style='background:#B6B7BA'> <span class='head'>Mini</span><img src='https://user-images.githubusercontent.com/35882/227017900-0cacdfb7-37e2-47b1-9347-a233810d3544.png' width='20%' style='display:inline'><span class='head'>Chain</span></center><center> <br><a href='https://github.com/srush/minichain'>[library]</a> </center>")
36
 
37
+ gr.TabbedInterface([math_demo, qa, chat, gatsby, ner, bash, pal, stats, selfask, backtrack],
38
+ ["Math", "QA", "Chat", "Book", "NER", "Bash", "PAL", "Stats", "SelfAsk", "Backtrack"],
39
  css = CSS)
40
 
41
  demo.launch()
backtrack.ipynb ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "20a65655",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "261f764b",
18
+ "metadata": {
19
+ "tags": [
20
+ "hide_inp"
21
+ ]
22
+ },
23
+ "outputs": [],
24
+ "source": [
25
+ "desc = \"\"\"\n",
26
+ "### Backtrack on Failure\n",
27
+ "\n",
28
+ "Chain that backtracks on failure. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/backtrack.ipynb)\n",
29
+ "\n",
30
+ "\"\"\""
31
+ ]
32
+ },
33
+ {
34
+ "cell_type": "code",
35
+ "execution_count": null,
36
+ "id": "b6617fa6",
37
+ "metadata": {
38
+ "lines_to_next_cell": 1
39
+ },
40
+ "outputs": [],
41
+ "source": [
42
+ "from minichain import prompt, Mock, show, OpenAI\n",
43
+ "import minichain"
44
+ ]
45
+ },
46
+ {
47
+ "cell_type": "code",
48
+ "execution_count": null,
49
+ "id": "36c42ea3",
50
+ "metadata": {
51
+ "lines_to_next_cell": 1
52
+ },
53
+ "outputs": [],
54
+ "source": [
55
+ "@prompt(Mock([\"dog\", \"blue\", \"cat\"]))\n",
56
+ "def prompt_generation(model):\n",
57
+ " return model(\"\")"
58
+ ]
59
+ },
60
+ {
61
+ "cell_type": "code",
62
+ "execution_count": null,
63
+ "id": "283167b5",
64
+ "metadata": {
65
+ "lines_to_next_cell": 1
66
+ },
67
+ "outputs": [],
68
+ "source": [
69
+ "@prompt(OpenAI(), template=\"Answer 'yes' is {{query}} is a color. Answer:\")\n",
70
+ "def prompt_validation(model, x):\n",
71
+ " out = model(dict(query=x))\n",
72
+ " if out.strip().lower().startswith(\"yes\"):\n",
73
+ " return x\n",
74
+ " return model.fail(1)"
75
+ ]
76
+ },
77
+ {
78
+ "cell_type": "code",
79
+ "execution_count": null,
80
+ "id": "76e99eee",
81
+ "metadata": {
82
+ "lines_to_next_cell": 1
83
+ },
84
+ "outputs": [],
85
+ "source": [
86
+ "def run():\n",
87
+ " x = prompt_generation()\n",
88
+ " return prompt_validation(x)"
89
+ ]
90
+ },
91
+ {
92
+ "cell_type": "code",
93
+ "execution_count": null,
94
+ "id": "45a2e713",
95
+ "metadata": {},
96
+ "outputs": [],
97
+ "source": [
98
+ "gradio = show(run,\n",
99
+ " examples = [],\n",
100
+ " subprompts=[prompt_generation, prompt_validation],\n",
101
+ " out_type=\"markdown\"\n",
102
+ " )"
103
+ ]
104
+ },
105
+ {
106
+ "cell_type": "code",
107
+ "execution_count": null,
108
+ "id": "24791bf0",
109
+ "metadata": {},
110
+ "outputs": [],
111
+ "source": [
112
+ "if __name__ == \"__main__\":\n",
113
+ " gradio.launch()"
114
+ ]
115
+ }
116
+ ],
117
+ "metadata": {
118
+ "jupytext": {
119
+ "cell_metadata_filter": "tags,-all",
120
+ "main_language": "python",
121
+ "notebook_metadata_filter": "-all"
122
+ }
123
+ },
124
+ "nbformat": 4,
125
+ "nbformat_minor": 5
126
+ }
backtrack.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # + tags=["hide_inp"]
2
+ desc = """
3
+ ### Backtrack on Failure
4
+
5
+ Chain that backtracks on failure. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/backtrack.ipynb)
6
+
7
+ """
8
+ # -
9
+
10
+ # $
11
+ from minichain import prompt, Mock, show, OpenAI
12
+ import minichain
13
+
14
+ @prompt(Mock(["dog", "blue", "cat"]))
15
+ def prompt_generation(model):
16
+ return model("")
17
+
18
+ @prompt(OpenAI(), template="Answer 'yes' is {{query}} is a color. Answer:")
19
+ def prompt_validation(model, x):
20
+ out = model(dict(query=x))
21
+ if out.strip().lower().startswith("yes"):
22
+ return x
23
+ return model.fail(1)
24
+
25
+ def run():
26
+ x = prompt_generation()
27
+ return prompt_validation(x)
28
+ # $
29
+
30
+ gradio = show(run,
31
+ examples = [],
32
+ subprompts=[prompt_generation, prompt_validation],
33
+ code=open("backtrack.py", "r").read().split("$")[1].strip().strip("#").strip(),
34
+ out_type="markdown"
35
+ )
36
+
37
+ if __name__ == "__main__":
38
+ gradio.launch()
backtrack.py~ ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # + tags=["hide_inp"]
2
+ desc = """
3
+ ### Backtrack on Failure
4
+
5
+ Chain that backtracks on failure. [[Code](https://github.com/srush/MiniChain/blob/main/examples/backtrack.py)]
6
+
7
+ """
8
+ # -
9
+
10
+
11
+ from minichain import prompt, Mock, show
12
+ import minichain
13
+
14
+ @prompt(Mock(["red", "blue"]))
15
+ def prompt_function1(model, x):
16
+ return model(x)
17
+
18
+ @prompt(Mock(["b"]), template_file="test.pmpt.tpl")
19
+ def prompt_function2(model, x):
20
+ if x == "red":
21
+ return model.fail(1)
22
+ return model(dict(x=x))
23
+
24
+ def run(query):
25
+ x = prompt_function1(query)
26
+ return prompt_function2(prompt_function2(x))
27
+
28
+
29
+ demo = show(run,
30
+ examples=["a"],
31
+ subprompts=[prompt_function1, prompt_function2, prompt_function2])
32
+
33
+ if __name__ == "__main__":
34
+ demo.launch()
base.ipynb ADDED
@@ -0,0 +1,113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "99422218",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "markdown",
16
+ "id": "f8d55ae3",
17
+ "metadata": {},
18
+ "source": [
19
+ "Prompt from ...\n"
20
+ ]
21
+ },
22
+ {
23
+ "cell_type": "code",
24
+ "execution_count": null,
25
+ "id": "a2c96300",
26
+ "metadata": {},
27
+ "outputs": [],
28
+ "source": [
29
+ "prompt = \"\"\"\n",
30
+ "Question: Who lived longer, Muhammad Ali or Alan Turing?\n",
31
+ "Are follow up questions needed here: Yes.\n",
32
+ "Follow up: How old was Muhammad Ali when he died?\n",
33
+ "Intermediate answer: Muhammad Ali was 74 years old when he died.\n",
34
+ "Follow up: How old was Alan Turing when he died?\n",
35
+ "Intermediate answer: Alan Turing was 41 years old when he died.\n",
36
+ "So the final answer is: Muhammad Ali\n",
37
+ "\n",
38
+ "Question: When was the founder of craigslist born?\n",
39
+ "Are follow up questions needed here: Yes.\n",
40
+ "Follow up: Who was the founder of craigslist?\n",
41
+ "Intermediate answer: Craigslist was founded by Craig Newmark.\n",
42
+ "Follow up: When was Craig Newmark born?\n",
43
+ "Intermediate answer: Craig Newmark was born on December 6, 1952.\n",
44
+ "So the final answer is: December 6, 1952\n",
45
+ "\n",
46
+ "Question: Who was the maternal grandfather of George Washington?\n",
47
+ "Are follow up questions needed here: Yes.\n",
48
+ "Follow up: Who was the mother of George Washington?\n",
49
+ "Intermediate answer: The mother of George Washington was Mary Ball Washington.\n",
50
+ "Follow up: Who was the father of Mary Ball Washington?\n",
51
+ "Intermediate answer: The father of Mary Ball Washington was Joseph Ball.\n",
52
+ "So the final answer is: Joseph Ball\n",
53
+ "\n",
54
+ "Question: Are both the directors of Jaws and Casino Royale from the same country?\n",
55
+ "Are follow up questions needed here: Yes.\n",
56
+ "Follow up: Who is the director of Jaws?\n",
57
+ "Intermediate answer: The director of Jaws is Steven Spielberg.\n",
58
+ "Follow up: Where is Steven Spielberg from?\n",
59
+ "Intermediate answer: The United States.\n",
60
+ "Follow up: Who is the director of Casino Royale?\n",
61
+ "Intermediate answer: The director of Casino Royale is Martin Campbell.\n",
62
+ "Follow up: Where is Martin Campbell from?\n",
63
+ "Intermediate answer: New Zealand.\n",
64
+ "So the final answer is: No\n",
65
+ "\n",
66
+ "Question: {{input}}\n",
67
+ "Are followup questions needed here: {% if agent_scratchpad %}Yes{%else%}No{% endif %}.\n",
68
+ "\"\"\""
69
+ ]
70
+ },
71
+ {
72
+ "cell_type": "code",
73
+ "execution_count": null,
74
+ "id": "2c142da2",
75
+ "metadata": {
76
+ "lines_to_next_cell": 1
77
+ },
78
+ "outputs": [],
79
+ "source": [
80
+ "import jinja2"
81
+ ]
82
+ },
83
+ {
84
+ "cell_type": "code",
85
+ "execution_count": null,
86
+ "id": "a26d9cd8",
87
+ "metadata": {},
88
+ "outputs": [],
89
+ "source": [
90
+ "class SelfAsk:\n",
91
+ " def render(self, input: str, agent_scratchpad: bool):\n",
92
+ " return jinja.render(prompt, dict(input=input,\n",
93
+ " agent_scatchpad=agent_scratchpad))\n",
94
+ "\n",
95
+ " def parse(self, response: str):\n",
96
+ " pass\n",
97
+ "\n",
98
+ "\n",
99
+ " def stop(self):\n",
100
+ " return []"
101
+ ]
102
+ }
103
+ ],
104
+ "metadata": {
105
+ "jupytext": {
106
+ "cell_metadata_filter": "-all",
107
+ "main_language": "python",
108
+ "notebook_metadata_filter": "-all"
109
+ }
110
+ },
111
+ "nbformat": 4,
112
+ "nbformat_minor": 5
113
+ }
bash.ipynb CHANGED
@@ -2,15 +2,21 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
- "id": "d1d5ec44",
 
 
 
 
 
 
 
 
 
 
 
7
  "metadata": {
8
- "execution": {
9
- "iopub.execute_input": "2023-03-22T17:00:17.499299Z",
10
- "iopub.status.busy": "2023-03-22T17:00:17.498973Z",
11
- "iopub.status.idle": "2023-03-22T17:00:17.506839Z",
12
- "shell.execute_reply": "2023-03-22T17:00:17.506087Z"
13
- },
14
  "tags": [
15
  "hide_inp"
16
  ]
@@ -21,32 +27,17 @@
21
  "desc = \"\"\"\n",
22
  "### Bash Command Suggestion\n",
23
  "\n",
24
- "Chain that ask for a command-line question and then runs the bash command. [[Code](https://github.com/srush/MiniChain/blob/main/examples/bash.py)]\n",
25
  "\n",
26
  "(Adapted from LangChain [BashChain](https://langchain.readthedocs.io/en/latest/modules/chains/examples/llm_bash.html))\n",
27
  "\"\"\""
28
  ]
29
  },
30
- {
31
- "cell_type": "markdown",
32
- "id": "94100dff",
33
- "metadata": {},
34
- "source": [
35
- "$"
36
- ]
37
- },
38
  {
39
  "cell_type": "code",
40
- "execution_count": 2,
41
- "id": "b2ae061e",
42
- "metadata": {
43
- "execution": {
44
- "iopub.execute_input": "2023-03-22T17:00:17.509606Z",
45
- "iopub.status.busy": "2023-03-22T17:00:17.509377Z",
46
- "iopub.status.idle": "2023-03-22T17:00:18.808162Z",
47
- "shell.execute_reply": "2023-03-22T17:00:18.807504Z"
48
- }
49
- },
50
  "outputs": [],
51
  "source": [
52
  "from minichain import show, prompt, OpenAI, Bash"
@@ -54,15 +45,9 @@
54
  },
55
  {
56
  "cell_type": "code",
57
- "execution_count": 3,
58
- "id": "9c440e40",
59
  "metadata": {
60
- "execution": {
61
- "iopub.execute_input": "2023-03-22T17:00:18.810785Z",
62
- "iopub.status.busy": "2023-03-22T17:00:18.810465Z",
63
- "iopub.status.idle": "2023-03-22T17:00:18.814003Z",
64
- "shell.execute_reply": "2023-03-22T17:00:18.813511Z"
65
- },
66
  "lines_to_next_cell": 1
67
  },
68
  "outputs": [],
@@ -75,15 +60,9 @@
75
  },
76
  {
77
  "cell_type": "code",
78
- "execution_count": 4,
79
- "id": "74115a3f",
80
  "metadata": {
81
- "execution": {
82
- "iopub.execute_input": "2023-03-22T17:00:18.816250Z",
83
- "iopub.status.busy": "2023-03-22T17:00:18.815868Z",
84
- "iopub.status.idle": "2023-03-22T17:00:18.818697Z",
85
- "shell.execute_reply": "2023-03-22T17:00:18.818263Z"
86
- },
87
  "lines_to_next_cell": 1
88
  },
89
  "outputs": [],
@@ -95,16 +74,9 @@
95
  },
96
  {
97
  "cell_type": "code",
98
- "execution_count": 5,
99
- "id": "88210a7d",
100
- "metadata": {
101
- "execution": {
102
- "iopub.execute_input": "2023-03-22T17:00:18.820762Z",
103
- "iopub.status.busy": "2023-03-22T17:00:18.820577Z",
104
- "iopub.status.idle": "2023-03-22T17:00:18.823279Z",
105
- "shell.execute_reply": "2023-03-22T17:00:18.822838Z"
106
- }
107
- },
108
  "outputs": [],
109
  "source": [
110
  "def bash(query):\n",
@@ -112,49 +84,21 @@
112
  ]
113
  },
114
  {
115
- "cell_type": "markdown",
116
- "id": "99d969c2",
 
117
  "metadata": {},
118
- "source": [
119
- "$"
120
- ]
121
  },
122
  {
123
  "cell_type": "code",
124
- "execution_count": 6,
125
- "id": "fe4ffa7a",
126
  "metadata": {
127
- "execution": {
128
- "iopub.execute_input": "2023-03-22T17:00:18.825360Z",
129
- "iopub.status.busy": "2023-03-22T17:00:18.825177Z",
130
- "iopub.status.idle": "2023-03-22T17:00:19.158113Z",
131
- "shell.execute_reply": "2023-03-22T17:00:19.157509Z"
132
- },
133
  "lines_to_next_cell": 2
134
  },
135
- "outputs": [
136
- {
137
- "name": "stdout",
138
- "output_type": "stream",
139
- "text": [
140
- "Running on local URL: http://127.0.0.1:7861\n",
141
- "\n",
142
- "To create a public link, set `share=True` in `launch()`.\n"
143
- ]
144
- },
145
- {
146
- "data": {
147
- "text/html": [
148
- "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
149
- ],
150
- "text/plain": [
151
- "<IPython.core.display.HTML object>"
152
- ]
153
- },
154
- "metadata": {},
155
- "output_type": "display_data"
156
- }
157
- ],
158
  "source": [
159
  "gradio = show(bash,\n",
160
  " subprompts=[cli_prompt, bash_run],\n",
@@ -163,7 +107,6 @@
163
  " \"Please write a bash script that prints 'Hello World' to the console.\"],\n",
164
  " out_type=\"markdown\",\n",
165
  " description=desc,\n",
166
- " code=open(\"bash.py\", \"r\").read().split(\"$\")[1].strip().strip(\"#\").strip(),\n",
167
  " )\n",
168
  "if __name__ == \"__main__\":\n",
169
  " gradio.launch()"
@@ -172,24 +115,9 @@
172
  ],
173
  "metadata": {
174
  "jupytext": {
175
- "cell_metadata_filter": "tags,-all"
176
- },
177
- "kernelspec": {
178
- "display_name": "minichain",
179
- "language": "python",
180
- "name": "minichain"
181
- },
182
- "language_info": {
183
- "codemirror_mode": {
184
- "name": "ipython",
185
- "version": 3
186
- },
187
- "file_extension": ".py",
188
- "mimetype": "text/x-python",
189
- "name": "python",
190
- "nbconvert_exporter": "python",
191
- "pygments_lexer": "ipython3",
192
- "version": "3.10.6"
193
  }
194
  },
195
  "nbformat": 4,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "4ed306df",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "1c9aac71",
18
  "metadata": {
19
+ "lines_to_next_cell": 2,
 
 
 
 
 
20
  "tags": [
21
  "hide_inp"
22
  ]
 
27
  "desc = \"\"\"\n",
28
  "### Bash Command Suggestion\n",
29
  "\n",
30
+ "Chain that ask for a command-line question and then runs the bash command. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/bash.ipynb)\n",
31
  "\n",
32
  "(Adapted from LangChain [BashChain](https://langchain.readthedocs.io/en/latest/modules/chains/examples/llm_bash.html))\n",
33
  "\"\"\""
34
  ]
35
  },
 
 
 
 
 
 
 
 
36
  {
37
  "cell_type": "code",
38
+ "execution_count": null,
39
+ "id": "780fdf4d",
40
+ "metadata": {},
 
 
 
 
 
 
 
41
  "outputs": [],
42
  "source": [
43
  "from minichain import show, prompt, OpenAI, Bash"
 
45
  },
46
  {
47
  "cell_type": "code",
48
+ "execution_count": null,
49
+ "id": "6e8d844b",
50
  "metadata": {
 
 
 
 
 
 
51
  "lines_to_next_cell": 1
52
  },
53
  "outputs": [],
 
60
  },
61
  {
62
  "cell_type": "code",
63
+ "execution_count": null,
64
+ "id": "e529c0da",
65
  "metadata": {
 
 
 
 
 
 
66
  "lines_to_next_cell": 1
67
  },
68
  "outputs": [],
 
74
  },
75
  {
76
  "cell_type": "code",
77
+ "execution_count": null,
78
+ "id": "9894b4b8",
79
+ "metadata": {},
 
 
 
 
 
 
 
80
  "outputs": [],
81
  "source": [
82
  "def bash(query):\n",
 
84
  ]
85
  },
86
  {
87
+ "cell_type": "code",
88
+ "execution_count": null,
89
+ "id": "721196f5",
90
  "metadata": {},
91
+ "outputs": [],
92
+ "source": []
 
93
  },
94
  {
95
  "cell_type": "code",
96
+ "execution_count": null,
97
+ "id": "d18f754f",
98
  "metadata": {
 
 
 
 
 
 
99
  "lines_to_next_cell": 2
100
  },
101
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
102
  "source": [
103
  "gradio = show(bash,\n",
104
  " subprompts=[cli_prompt, bash_run],\n",
 
107
  " \"Please write a bash script that prints 'Hello World' to the console.\"],\n",
108
  " out_type=\"markdown\",\n",
109
  " description=desc,\n",
 
110
  " )\n",
111
  "if __name__ == \"__main__\":\n",
112
  " gradio.launch()"
 
115
  ],
116
  "metadata": {
117
  "jupytext": {
118
+ "cell_metadata_filter": "tags,-all",
119
+ "main_language": "python",
120
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
121
  }
122
  },
123
  "nbformat": 4,
bash.py CHANGED
@@ -3,7 +3,7 @@
3
  desc = """
4
  ### Bash Command Suggestion
5
 
6
- Chain that ask for a command-line question and then runs the bash command. [[Code](https://github.com/srush/MiniChain/blob/main/examples/bash.py)]
7
 
8
  (Adapted from LangChain [BashChain](https://langchain.readthedocs.io/en/latest/modules/chains/examples/llm_bash.html))
9
  """
 
3
  desc = """
4
  ### Bash Command Suggestion
5
 
6
+ Chain that ask for a command-line question and then runs the bash command. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/bash.ipynb)
7
 
8
  (Adapted from LangChain [BashChain](https://langchain.readthedocs.io/en/latest/modules/chains/examples/llm_bash.html))
9
  """
chat.ipynb ADDED
@@ -0,0 +1,173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "62e745e2",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "8b7656d3",
18
+ "metadata": {
19
+ "lines_to_next_cell": 0,
20
+ "tags": [
21
+ "hide_inp"
22
+ ]
23
+ },
24
+ "outputs": [],
25
+ "source": [
26
+ "desc = \"\"\"\n",
27
+ "### Chat\n",
28
+ "\n",
29
+ "A chat-like example for multi-turn chat with state. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/chat.ipynb)\n",
30
+ "\n",
31
+ "(Adapted from [LangChain](https://langchain.readthedocs.io/en/latest/modules/memory/examples/chatgpt_clone.html)'s version of this [blog post](https://www.engraved.blog/building-a-virtual-machine-inside/).)\n",
32
+ "\n",
33
+ "\"\"\""
34
+ ]
35
+ },
36
+ {
37
+ "cell_type": "code",
38
+ "execution_count": null,
39
+ "id": "1c551f62",
40
+ "metadata": {
41
+ "lines_to_next_cell": 2
42
+ },
43
+ "outputs": [],
44
+ "source": []
45
+ },
46
+ {
47
+ "cell_type": "code",
48
+ "execution_count": null,
49
+ "id": "4ad7d54a",
50
+ "metadata": {},
51
+ "outputs": [],
52
+ "source": [
53
+ "from dataclasses import dataclass, replace\n",
54
+ "from typing import List, Tuple\n",
55
+ "from minichain import OpenAI, prompt, show"
56
+ ]
57
+ },
58
+ {
59
+ "cell_type": "markdown",
60
+ "id": "46da3feb",
61
+ "metadata": {},
62
+ "source": [
63
+ "Generic stateful Memory"
64
+ ]
65
+ },
66
+ {
67
+ "cell_type": "code",
68
+ "execution_count": null,
69
+ "id": "3f3fdeaa",
70
+ "metadata": {
71
+ "lines_to_next_cell": 1
72
+ },
73
+ "outputs": [],
74
+ "source": [
75
+ "MEMORY = 2"
76
+ ]
77
+ },
78
+ {
79
+ "cell_type": "code",
80
+ "execution_count": null,
81
+ "id": "7bb9b6f9",
82
+ "metadata": {
83
+ "lines_to_next_cell": 1
84
+ },
85
+ "outputs": [],
86
+ "source": [
87
+ "@dataclass\n",
88
+ "class State:\n",
89
+ " memory: List[Tuple[str, str]]\n",
90
+ " human_input: str = \"\"\n",
91
+ "\n",
92
+ " def push(self, response: str) -> \"State\":\n",
93
+ " memory = self.memory if len(self.memory) < MEMORY else self.memory[1:]\n",
94
+ " return State(memory + [(self.human_input, response)])"
95
+ ]
96
+ },
97
+ {
98
+ "cell_type": "markdown",
99
+ "id": "06bfc54f",
100
+ "metadata": {},
101
+ "source": [
102
+ "Chat prompt with memory"
103
+ ]
104
+ },
105
+ {
106
+ "cell_type": "code",
107
+ "execution_count": null,
108
+ "id": "abc6322d",
109
+ "metadata": {},
110
+ "outputs": [],
111
+ "source": [
112
+ "@prompt(OpenAI(), template_file=\"chat.pmpt.tpl\")\n",
113
+ "def chat_prompt(model, state: State) -> State:\n",
114
+ " out = model(state)\n",
115
+ " result = out.split(\"Assistant:\")[-1]\n",
116
+ " return state.push(result)"
117
+ ]
118
+ },
119
+ {
120
+ "cell_type": "code",
121
+ "execution_count": null,
122
+ "id": "3e2d2251",
123
+ "metadata": {},
124
+ "outputs": [],
125
+ "source": [
126
+ "examples = [\n",
127
+ " \"ls ~\",\n",
128
+ " \"cd ~\",\n",
129
+ " \"{Please make a file jokes.txt inside and put some jokes inside}\",\n",
130
+ " \"\"\"echo -e \"x=lambda y:y*5+3;print('Result:' + str(x(6)))\" > run.py && python3 run.py\"\"\",\n",
131
+ " \"\"\"echo -e \"print(list(filter(lambda x: all(x%d for d in range(2,x)),range(2,3**10)))[:10])\" > run.py && python3 run.py\"\"\",\n",
132
+ " \"\"\"echo -e \"echo 'Hello from Docker\" > entrypoint.sh && echo -e \"FROM ubuntu:20.04\\nCOPY entrypoint.sh entrypoint.sh\\nENTRYPOINT [\\\"/bin/sh\\\",\\\"entrypoint.sh\\\"]\">Dockerfile && docker build . -t my_docker_image && docker run -t my_docker_image\"\"\",\n",
133
+ " \"nvidia-smi\"\n",
134
+ "]"
135
+ ]
136
+ },
137
+ {
138
+ "cell_type": "code",
139
+ "execution_count": null,
140
+ "id": "da9746f9",
141
+ "metadata": {},
142
+ "outputs": [],
143
+ "source": [
144
+ "gradio = show(lambda command, state: chat_prompt(replace(state, human_input=command)),\n",
145
+ " initial_state=State([]),\n",
146
+ " subprompts=[chat_prompt],\n",
147
+ " examples=examples,\n",
148
+ " out_type=\"json\",\n",
149
+ " description=desc,\n",
150
+ ")\n",
151
+ "if __name__ == \"__main__\":\n",
152
+ " gradio.launch()"
153
+ ]
154
+ },
155
+ {
156
+ "cell_type": "code",
157
+ "execution_count": null,
158
+ "id": "db848300",
159
+ "metadata": {},
160
+ "outputs": [],
161
+ "source": []
162
+ }
163
+ ],
164
+ "metadata": {
165
+ "jupytext": {
166
+ "cell_metadata_filter": "tags,-all",
167
+ "main_language": "python",
168
+ "notebook_metadata_filter": "-all"
169
+ }
170
+ },
171
+ "nbformat": 4,
172
+ "nbformat_minor": 5
173
+ }
chat.py CHANGED
@@ -2,7 +2,7 @@
2
  desc = """
3
  ### Chat
4
 
5
- A chat-like example for multi-turn chat with state. [[Code](https://github.com/srush/MiniChain/blob/main/examples/chat.py)]
6
 
7
  (Adapted from [LangChain](https://langchain.readthedocs.io/en/latest/modules/memory/examples/chatgpt_clone.html)'s version of this [blog post](https://www.engraved.blog/building-a-virtual-machine-inside/).)
8
 
 
2
  desc = """
3
  ### Chat
4
 
5
+ A chat-like example for multi-turn chat with state. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/chat.ipynb)
6
 
7
  (Adapted from [LangChain](https://langchain.readthedocs.io/en/latest/modules/memory/examples/chatgpt_clone.html)'s version of this [blog post](https://www.engraved.blog/building-a-virtual-machine-inside/).)
8
 
color.ipynb ADDED
@@ -0,0 +1,96 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "b56870fe",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "markdown",
16
+ "id": "bef4e04e",
17
+ "metadata": {},
18
+ "source": [
19
+ "Answer a math problem with code.\n",
20
+ "Adapted from Dust [maths-generate-code](https://dust.tt/spolu/a/d12ac33169)"
21
+ ]
22
+ },
23
+ {
24
+ "cell_type": "code",
25
+ "execution_count": null,
26
+ "id": "0bcf8e75",
27
+ "metadata": {},
28
+ "outputs": [],
29
+ "source": [
30
+ "from minichain import Backend, JinjaPrompt, Prompt, start_chain, SimplePrompt, show_log"
31
+ ]
32
+ },
33
+ {
34
+ "cell_type": "markdown",
35
+ "id": "715fd76e",
36
+ "metadata": {},
37
+ "source": [
38
+ "Prompt that asks LLM for code from math."
39
+ ]
40
+ },
41
+ {
42
+ "cell_type": "code",
43
+ "execution_count": null,
44
+ "id": "72e3d463",
45
+ "metadata": {
46
+ "lines_to_next_cell": 2
47
+ },
48
+ "outputs": [],
49
+ "source": [
50
+ "class ColorPrompt(Prompt[str, bool]):\n",
51
+ " def parse(inp: str) -> str:\n",
52
+ " return f\"Answer 'Yes' if this is a color, {inp}. Answer:\"\n",
53
+ "\n",
54
+ " def parse(out: str, inp) -> bool:\n",
55
+ " # Encode the parsing logic\n",
56
+ " return out.strip() == \"Yes\" \n",
57
+ "ColorPrompt().show({\"inp\": \"dog\"}, \"No\")"
58
+ ]
59
+ },
60
+ {
61
+ "cell_type": "code",
62
+ "execution_count": null,
63
+ "id": "77cbf319",
64
+ "metadata": {
65
+ "lines_to_next_cell": 2
66
+ },
67
+ "outputs": [],
68
+ "source": [
69
+ "with start_chain(\"color\") as backend:\n",
70
+ " question = 'What is the sum of the powers of 3 (3^i) that are smaller than 100?'\n",
71
+ " prompt = MathPrompt(backend.OpenAI()).chain(SimplePrompt(backend.Python()))\n",
72
+ " result = prompt({\"question\": question})\n",
73
+ " print(result)"
74
+ ]
75
+ },
76
+ {
77
+ "cell_type": "code",
78
+ "execution_count": null,
79
+ "id": "72cdff4c",
80
+ "metadata": {},
81
+ "outputs": [],
82
+ "source": [
83
+ "show_log(\"math.log\")"
84
+ ]
85
+ }
86
+ ],
87
+ "metadata": {
88
+ "jupytext": {
89
+ "cell_metadata_filter": "-all",
90
+ "main_language": "python",
91
+ "notebook_metadata_filter": "-all"
92
+ }
93
+ },
94
+ "nbformat": 4,
95
+ "nbformat_minor": 5
96
+ }
gatsby.ipynb CHANGED
@@ -2,15 +2,21 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
- "id": "5339272e",
 
 
 
 
 
 
 
 
 
 
 
7
  "metadata": {
8
- "execution": {
9
- "iopub.execute_input": "2023-03-22T17:00:42.285169Z",
10
- "iopub.status.busy": "2023-03-22T17:00:42.284973Z",
11
- "iopub.status.idle": "2023-03-22T17:00:42.291455Z",
12
- "shell.execute_reply": "2023-03-22T17:00:42.290871Z"
13
- },
14
  "tags": [
15
  "hide_inp"
16
  ]
@@ -20,32 +26,17 @@
20
  "desc = \"\"\"\n",
21
  "### Book QA\n",
22
  "\n",
23
- "Chain that does question answering with Hugging Face embeddings. [[Code](https://github.com/srush/MiniChain/blob/main/examples/gatsby.py)]\n",
24
  "\n",
25
  "(Adapted from the [LlamaIndex example](https://github.com/jerryjliu/gpt_index/blob/main/examples/gatsby/TestGatsby.ipynb).)\n",
26
  "\"\"\""
27
  ]
28
  },
29
- {
30
- "cell_type": "markdown",
31
- "id": "f966010d",
32
- "metadata": {},
33
- "source": [
34
- "$"
35
- ]
36
- },
37
  {
38
  "cell_type": "code",
39
- "execution_count": 2,
40
- "id": "5391c476",
41
- "metadata": {
42
- "execution": {
43
- "iopub.execute_input": "2023-03-22T17:00:42.293927Z",
44
- "iopub.status.busy": "2023-03-22T17:00:42.293738Z",
45
- "iopub.status.idle": "2023-03-22T17:00:43.695402Z",
46
- "shell.execute_reply": "2023-03-22T17:00:43.694722Z"
47
- }
48
- },
49
  "outputs": [],
50
  "source": [
51
  "import datasets\n",
@@ -55,7 +46,7 @@
55
  },
56
  {
57
  "cell_type": "markdown",
58
- "id": "5b1b2a82",
59
  "metadata": {},
60
  "source": [
61
  "Load data with embeddings (computed beforehand)"
@@ -63,46 +54,12 @@
63
  },
64
  {
65
  "cell_type": "code",
66
- "execution_count": 3,
67
- "id": "a54cf84e",
68
  "metadata": {
69
- "execution": {
70
- "iopub.execute_input": "2023-03-22T17:00:43.698121Z",
71
- "iopub.status.busy": "2023-03-22T17:00:43.697792Z",
72
- "iopub.status.idle": "2023-03-22T17:00:43.730349Z",
73
- "shell.execute_reply": "2023-03-22T17:00:43.729747Z"
74
- },
75
  "lines_to_next_cell": 1
76
  },
77
- "outputs": [
78
- {
79
- "data": {
80
- "application/vnd.jupyter.widget-view+json": {
81
- "model_id": "86d0a2ceb7ad4f99978e37c2719f2960",
82
- "version_major": 2,
83
- "version_minor": 0
84
- },
85
- "text/plain": [
86
- " 0%| | 0/1 [00:00<?, ?it/s]"
87
- ]
88
- },
89
- "metadata": {},
90
- "output_type": "display_data"
91
- },
92
- {
93
- "data": {
94
- "text/plain": [
95
- "Dataset({\n",
96
- " features: ['passages', 'embeddings'],\n",
97
- " num_rows: 52\n",
98
- "})"
99
- ]
100
- },
101
- "execution_count": 3,
102
- "metadata": {},
103
- "output_type": "execute_result"
104
- }
105
- ],
106
  "source": [
107
  "gatsby = datasets.load_from_disk(\"gatsby\")\n",
108
  "gatsby.add_faiss_index(\"embeddings\")"
@@ -110,7 +67,7 @@
110
  },
111
  {
112
  "cell_type": "markdown",
113
- "id": "a9a08061",
114
  "metadata": {},
115
  "source": [
116
  "Fast KNN retieval prompt"
@@ -118,15 +75,9 @@
118
  },
119
  {
120
  "cell_type": "code",
121
- "execution_count": 4,
122
- "id": "3e69cd6b",
123
  "metadata": {
124
- "execution": {
125
- "iopub.execute_input": "2023-03-22T17:00:43.735462Z",
126
- "iopub.status.busy": "2023-03-22T17:00:43.735139Z",
127
- "iopub.status.idle": "2023-03-22T17:00:43.738420Z",
128
- "shell.execute_reply": "2023-03-22T17:00:43.737964Z"
129
- },
130
  "lines_to_next_cell": 1
131
  },
132
  "outputs": [],
@@ -140,15 +91,9 @@
140
  },
141
  {
142
  "cell_type": "code",
143
- "execution_count": 5,
144
- "id": "14e22d0a",
145
  "metadata": {
146
- "execution": {
147
- "iopub.execute_input": "2023-03-22T17:00:43.740824Z",
148
- "iopub.status.busy": "2023-03-22T17:00:43.740339Z",
149
- "iopub.status.idle": "2023-03-22T17:00:43.743342Z",
150
- "shell.execute_reply": "2023-03-22T17:00:43.742905Z"
151
- },
152
  "lines_to_next_cell": 1
153
  },
154
  "outputs": [],
@@ -161,17 +106,9 @@
161
  },
162
  {
163
  "cell_type": "code",
164
- "execution_count": 6,
165
- "id": "bfca0bea",
166
- "metadata": {
167
- "execution": {
168
- "iopub.execute_input": "2023-03-22T17:00:43.745377Z",
169
- "iopub.status.busy": "2023-03-22T17:00:43.745056Z",
170
- "iopub.status.idle": "2023-03-22T17:00:43.747768Z",
171
- "shell.execute_reply": "2023-03-22T17:00:43.747352Z"
172
- },
173
- "lines_to_next_cell": 2
174
- },
175
  "outputs": [],
176
  "source": [
177
  "def gatsby(query):\n",
@@ -180,50 +117,21 @@
180
  ]
181
  },
182
  {
183
- "cell_type": "markdown",
184
- "id": "159b0b85",
 
185
  "metadata": {
186
  "lines_to_next_cell": 2
187
  },
188
- "source": [
189
- "$"
190
- ]
191
  },
192
  {
193
  "cell_type": "code",
194
- "execution_count": 7,
195
- "id": "8e3f74d0",
196
- "metadata": {
197
- "execution": {
198
- "iopub.execute_input": "2023-03-22T17:00:43.749935Z",
199
- "iopub.status.busy": "2023-03-22T17:00:43.749750Z",
200
- "iopub.status.idle": "2023-03-22T17:00:44.094814Z",
201
- "shell.execute_reply": "2023-03-22T17:00:44.094179Z"
202
- }
203
- },
204
- "outputs": [
205
- {
206
- "name": "stdout",
207
- "output_type": "stream",
208
- "text": [
209
- "Running on local URL: http://127.0.0.1:7861\n",
210
- "\n",
211
- "To create a public link, set `share=True` in `launch()`.\n"
212
- ]
213
- },
214
- {
215
- "data": {
216
- "text/html": [
217
- "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
218
- ],
219
- "text/plain": [
220
- "<IPython.core.display.HTML object>"
221
- ]
222
- },
223
- "metadata": {},
224
- "output_type": "display_data"
225
- }
226
- ],
227
  "source": [
228
  "gradio = show(gatsby,\n",
229
  " subprompts=[get_neighbors, ask],\n",
@@ -231,432 +139,17 @@
231
  " \"What did the narrator do after getting back to Chicago?\"],\n",
232
  " keys={\"HF_KEY\"},\n",
233
  " description=desc,\n",
234
- " code=open(\"gatsby.py\", \"r\").read().split(\"$\")[1].strip().strip(\"#\").strip()\n",
235
  " )\n",
236
  "if __name__ == \"__main__\":\n",
237
  " gradio.launch()"
238
  ]
239
- },
240
- {
241
- "cell_type": "code",
242
- "execution_count": null,
243
- "id": "fdbe3ca1",
244
- "metadata": {},
245
- "outputs": [],
246
- "source": []
247
- },
248
- {
249
- "cell_type": "code",
250
- "execution_count": 8,
251
- "id": "7e96a1dc",
252
- "metadata": {
253
- "execution": {
254
- "iopub.execute_input": "2023-03-22T17:00:44.097451Z",
255
- "iopub.status.busy": "2023-03-22T17:00:44.097193Z",
256
- "iopub.status.idle": "2023-03-22T17:00:44.100135Z",
257
- "shell.execute_reply": "2023-03-22T17:00:44.099538Z"
258
- },
259
- "tags": [
260
- "hide_inp"
261
- ]
262
- },
263
- "outputs": [],
264
- "source": [
265
- "# QAPrompt().show({\"question\": \"Who was Gatsby?\", \"docs\": [\"doc1\", \"doc2\", \"doc3\"]}, \"\")\n",
266
- "# # -\n",
267
- "\n",
268
- "# show_log(\"gatsby.log\")"
269
- ]
270
  }
271
  ],
272
  "metadata": {
273
  "jupytext": {
274
- "cell_metadata_filter": "tags,-all"
275
- },
276
- "kernelspec": {
277
- "display_name": "minichain",
278
- "language": "python",
279
- "name": "minichain"
280
- },
281
- "language_info": {
282
- "codemirror_mode": {
283
- "name": "ipython",
284
- "version": 3
285
- },
286
- "file_extension": ".py",
287
- "mimetype": "text/x-python",
288
- "name": "python",
289
- "nbconvert_exporter": "python",
290
- "pygments_lexer": "ipython3",
291
- "version": "3.10.6"
292
- },
293
- "widgets": {
294
- "application/vnd.jupyter.widget-state+json": {
295
- "state": {
296
- "044c0c7d8f264a629d9a834ad7a3d02b": {
297
- "model_module": "@jupyter-widgets/controls",
298
- "model_module_version": "2.0.0",
299
- "model_name": "FloatProgressModel",
300
- "state": {
301
- "_dom_classes": [],
302
- "_model_module": "@jupyter-widgets/controls",
303
- "_model_module_version": "2.0.0",
304
- "_model_name": "FloatProgressModel",
305
- "_view_count": null,
306
- "_view_module": "@jupyter-widgets/controls",
307
- "_view_module_version": "2.0.0",
308
- "_view_name": "ProgressView",
309
- "bar_style": "success",
310
- "description": "",
311
- "description_allow_html": false,
312
- "layout": "IPY_MODEL_71c01d11477848a0a65eb6d8fa5c290c",
313
- "max": 1.0,
314
- "min": 0.0,
315
- "orientation": "horizontal",
316
- "style": "IPY_MODEL_b2191e94a5124ecaa10fcbba69552cad",
317
- "tabbable": null,
318
- "tooltip": null,
319
- "value": 1.0
320
- }
321
- },
322
- "0d3429e6725e49a0a63d578b0b46a1b8": {
323
- "model_module": "@jupyter-widgets/base",
324
- "model_module_version": "2.0.0",
325
- "model_name": "LayoutModel",
326
- "state": {
327
- "_model_module": "@jupyter-widgets/base",
328
- "_model_module_version": "2.0.0",
329
- "_model_name": "LayoutModel",
330
- "_view_count": null,
331
- "_view_module": "@jupyter-widgets/base",
332
- "_view_module_version": "2.0.0",
333
- "_view_name": "LayoutView",
334
- "align_content": null,
335
- "align_items": null,
336
- "align_self": null,
337
- "border_bottom": null,
338
- "border_left": null,
339
- "border_right": null,
340
- "border_top": null,
341
- "bottom": null,
342
- "display": null,
343
- "flex": null,
344
- "flex_flow": null,
345
- "grid_area": null,
346
- "grid_auto_columns": null,
347
- "grid_auto_flow": null,
348
- "grid_auto_rows": null,
349
- "grid_column": null,
350
- "grid_gap": null,
351
- "grid_row": null,
352
- "grid_template_areas": null,
353
- "grid_template_columns": null,
354
- "grid_template_rows": null,
355
- "height": null,
356
- "justify_content": null,
357
- "justify_items": null,
358
- "left": null,
359
- "margin": null,
360
- "max_height": null,
361
- "max_width": null,
362
- "min_height": null,
363
- "min_width": null,
364
- "object_fit": null,
365
- "object_position": null,
366
- "order": null,
367
- "overflow": null,
368
- "padding": null,
369
- "right": null,
370
- "top": null,
371
- "visibility": null,
372
- "width": null
373
- }
374
- },
375
- "2d0cb296ceba4f28afac033d606f3f80": {
376
- "model_module": "@jupyter-widgets/base",
377
- "model_module_version": "2.0.0",
378
- "model_name": "LayoutModel",
379
- "state": {
380
- "_model_module": "@jupyter-widgets/base",
381
- "_model_module_version": "2.0.0",
382
- "_model_name": "LayoutModel",
383
- "_view_count": null,
384
- "_view_module": "@jupyter-widgets/base",
385
- "_view_module_version": "2.0.0",
386
- "_view_name": "LayoutView",
387
- "align_content": null,
388
- "align_items": null,
389
- "align_self": null,
390
- "border_bottom": null,
391
- "border_left": null,
392
- "border_right": null,
393
- "border_top": null,
394
- "bottom": null,
395
- "display": null,
396
- "flex": null,
397
- "flex_flow": null,
398
- "grid_area": null,
399
- "grid_auto_columns": null,
400
- "grid_auto_flow": null,
401
- "grid_auto_rows": null,
402
- "grid_column": null,
403
- "grid_gap": null,
404
- "grid_row": null,
405
- "grid_template_areas": null,
406
- "grid_template_columns": null,
407
- "grid_template_rows": null,
408
- "height": null,
409
- "justify_content": null,
410
- "justify_items": null,
411
- "left": null,
412
- "margin": null,
413
- "max_height": null,
414
- "max_width": null,
415
- "min_height": null,
416
- "min_width": null,
417
- "object_fit": null,
418
- "object_position": null,
419
- "order": null,
420
- "overflow": null,
421
- "padding": null,
422
- "right": null,
423
- "top": null,
424
- "visibility": null,
425
- "width": null
426
- }
427
- },
428
- "306271182a3b4f51bfd69ea6467b7394": {
429
- "model_module": "@jupyter-widgets/base",
430
- "model_module_version": "2.0.0",
431
- "model_name": "LayoutModel",
432
- "state": {
433
- "_model_module": "@jupyter-widgets/base",
434
- "_model_module_version": "2.0.0",
435
- "_model_name": "LayoutModel",
436
- "_view_count": null,
437
- "_view_module": "@jupyter-widgets/base",
438
- "_view_module_version": "2.0.0",
439
- "_view_name": "LayoutView",
440
- "align_content": null,
441
- "align_items": null,
442
- "align_self": null,
443
- "border_bottom": null,
444
- "border_left": null,
445
- "border_right": null,
446
- "border_top": null,
447
- "bottom": null,
448
- "display": null,
449
- "flex": null,
450
- "flex_flow": null,
451
- "grid_area": null,
452
- "grid_auto_columns": null,
453
- "grid_auto_flow": null,
454
- "grid_auto_rows": null,
455
- "grid_column": null,
456
- "grid_gap": null,
457
- "grid_row": null,
458
- "grid_template_areas": null,
459
- "grid_template_columns": null,
460
- "grid_template_rows": null,
461
- "height": null,
462
- "justify_content": null,
463
- "justify_items": null,
464
- "left": null,
465
- "margin": null,
466
- "max_height": null,
467
- "max_width": null,
468
- "min_height": null,
469
- "min_width": null,
470
- "object_fit": null,
471
- "object_position": null,
472
- "order": null,
473
- "overflow": null,
474
- "padding": null,
475
- "right": null,
476
- "top": null,
477
- "visibility": null,
478
- "width": null
479
- }
480
- },
481
- "71c01d11477848a0a65eb6d8fa5c290c": {
482
- "model_module": "@jupyter-widgets/base",
483
- "model_module_version": "2.0.0",
484
- "model_name": "LayoutModel",
485
- "state": {
486
- "_model_module": "@jupyter-widgets/base",
487
- "_model_module_version": "2.0.0",
488
- "_model_name": "LayoutModel",
489
- "_view_count": null,
490
- "_view_module": "@jupyter-widgets/base",
491
- "_view_module_version": "2.0.0",
492
- "_view_name": "LayoutView",
493
- "align_content": null,
494
- "align_items": null,
495
- "align_self": null,
496
- "border_bottom": null,
497
- "border_left": null,
498
- "border_right": null,
499
- "border_top": null,
500
- "bottom": null,
501
- "display": null,
502
- "flex": null,
503
- "flex_flow": null,
504
- "grid_area": null,
505
- "grid_auto_columns": null,
506
- "grid_auto_flow": null,
507
- "grid_auto_rows": null,
508
- "grid_column": null,
509
- "grid_gap": null,
510
- "grid_row": null,
511
- "grid_template_areas": null,
512
- "grid_template_columns": null,
513
- "grid_template_rows": null,
514
- "height": null,
515
- "justify_content": null,
516
- "justify_items": null,
517
- "left": null,
518
- "margin": null,
519
- "max_height": null,
520
- "max_width": null,
521
- "min_height": null,
522
- "min_width": null,
523
- "object_fit": null,
524
- "object_position": null,
525
- "order": null,
526
- "overflow": null,
527
- "padding": null,
528
- "right": null,
529
- "top": null,
530
- "visibility": null,
531
- "width": null
532
- }
533
- },
534
- "86d0a2ceb7ad4f99978e37c2719f2960": {
535
- "model_module": "@jupyter-widgets/controls",
536
- "model_module_version": "2.0.0",
537
- "model_name": "HBoxModel",
538
- "state": {
539
- "_dom_classes": [],
540
- "_model_module": "@jupyter-widgets/controls",
541
- "_model_module_version": "2.0.0",
542
- "_model_name": "HBoxModel",
543
- "_view_count": null,
544
- "_view_module": "@jupyter-widgets/controls",
545
- "_view_module_version": "2.0.0",
546
- "_view_name": "HBoxView",
547
- "box_style": "",
548
- "children": [
549
- "IPY_MODEL_f4226b39d8114ae291c8c1f4150b97b3",
550
- "IPY_MODEL_044c0c7d8f264a629d9a834ad7a3d02b",
551
- "IPY_MODEL_cf381c0f6e0d41408fdff78193c8e003"
552
- ],
553
- "layout": "IPY_MODEL_0d3429e6725e49a0a63d578b0b46a1b8",
554
- "tabbable": null,
555
- "tooltip": null
556
- }
557
- },
558
- "b2191e94a5124ecaa10fcbba69552cad": {
559
- "model_module": "@jupyter-widgets/controls",
560
- "model_module_version": "2.0.0",
561
- "model_name": "ProgressStyleModel",
562
- "state": {
563
- "_model_module": "@jupyter-widgets/controls",
564
- "_model_module_version": "2.0.0",
565
- "_model_name": "ProgressStyleModel",
566
- "_view_count": null,
567
- "_view_module": "@jupyter-widgets/base",
568
- "_view_module_version": "2.0.0",
569
- "_view_name": "StyleView",
570
- "bar_color": null,
571
- "description_width": ""
572
- }
573
- },
574
- "bb8de7546e2d4a3d8c4b53a3170a9aa3": {
575
- "model_module": "@jupyter-widgets/controls",
576
- "model_module_version": "2.0.0",
577
- "model_name": "HTMLStyleModel",
578
- "state": {
579
- "_model_module": "@jupyter-widgets/controls",
580
- "_model_module_version": "2.0.0",
581
- "_model_name": "HTMLStyleModel",
582
- "_view_count": null,
583
- "_view_module": "@jupyter-widgets/base",
584
- "_view_module_version": "2.0.0",
585
- "_view_name": "StyleView",
586
- "background": null,
587
- "description_width": "",
588
- "font_size": null,
589
- "text_color": null
590
- }
591
- },
592
- "cf381c0f6e0d41408fdff78193c8e003": {
593
- "model_module": "@jupyter-widgets/controls",
594
- "model_module_version": "2.0.0",
595
- "model_name": "HTMLModel",
596
- "state": {
597
- "_dom_classes": [],
598
- "_model_module": "@jupyter-widgets/controls",
599
- "_model_module_version": "2.0.0",
600
- "_model_name": "HTMLModel",
601
- "_view_count": null,
602
- "_view_module": "@jupyter-widgets/controls",
603
- "_view_module_version": "2.0.0",
604
- "_view_name": "HTMLView",
605
- "description": "",
606
- "description_allow_html": false,
607
- "layout": "IPY_MODEL_2d0cb296ceba4f28afac033d606f3f80",
608
- "placeholder": "​",
609
- "style": "IPY_MODEL_e239bdcb8e0f48f9b0cec18f6280bd8a",
610
- "tabbable": null,
611
- "tooltip": null,
612
- "value": " 1/1 [00:00&lt;00:00, 101.96it/s]"
613
- }
614
- },
615
- "e239bdcb8e0f48f9b0cec18f6280bd8a": {
616
- "model_module": "@jupyter-widgets/controls",
617
- "model_module_version": "2.0.0",
618
- "model_name": "HTMLStyleModel",
619
- "state": {
620
- "_model_module": "@jupyter-widgets/controls",
621
- "_model_module_version": "2.0.0",
622
- "_model_name": "HTMLStyleModel",
623
- "_view_count": null,
624
- "_view_module": "@jupyter-widgets/base",
625
- "_view_module_version": "2.0.0",
626
- "_view_name": "StyleView",
627
- "background": null,
628
- "description_width": "",
629
- "font_size": null,
630
- "text_color": null
631
- }
632
- },
633
- "f4226b39d8114ae291c8c1f4150b97b3": {
634
- "model_module": "@jupyter-widgets/controls",
635
- "model_module_version": "2.0.0",
636
- "model_name": "HTMLModel",
637
- "state": {
638
- "_dom_classes": [],
639
- "_model_module": "@jupyter-widgets/controls",
640
- "_model_module_version": "2.0.0",
641
- "_model_name": "HTMLModel",
642
- "_view_count": null,
643
- "_view_module": "@jupyter-widgets/controls",
644
- "_view_module_version": "2.0.0",
645
- "_view_name": "HTMLView",
646
- "description": "",
647
- "description_allow_html": false,
648
- "layout": "IPY_MODEL_306271182a3b4f51bfd69ea6467b7394",
649
- "placeholder": "​",
650
- "style": "IPY_MODEL_bb8de7546e2d4a3d8c4b53a3170a9aa3",
651
- "tabbable": null,
652
- "tooltip": null,
653
- "value": "100%"
654
- }
655
- }
656
- },
657
- "version_major": 2,
658
- "version_minor": 0
659
- }
660
  }
661
  },
662
  "nbformat": 4,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "bf80e010",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "a54429c0",
18
  "metadata": {
19
+ "lines_to_next_cell": 2,
 
 
 
 
 
20
  "tags": [
21
  "hide_inp"
22
  ]
 
26
  "desc = \"\"\"\n",
27
  "### Book QA\n",
28
  "\n",
29
+ "Chain that does question answering with Hugging Face embeddings. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/gatsby.ipynb)\n",
30
  "\n",
31
  "(Adapted from the [LlamaIndex example](https://github.com/jerryjliu/gpt_index/blob/main/examples/gatsby/TestGatsby.ipynb).)\n",
32
  "\"\"\""
33
  ]
34
  },
 
 
 
 
 
 
 
 
35
  {
36
  "cell_type": "code",
37
+ "execution_count": null,
38
+ "id": "71c343bb",
39
+ "metadata": {},
 
 
 
 
 
 
 
40
  "outputs": [],
41
  "source": [
42
  "import datasets\n",
 
46
  },
47
  {
48
  "cell_type": "markdown",
49
+ "id": "564552bf",
50
  "metadata": {},
51
  "source": [
52
  "Load data with embeddings (computed beforehand)"
 
54
  },
55
  {
56
  "cell_type": "code",
57
+ "execution_count": null,
58
+ "id": "75ba221e",
59
  "metadata": {
 
 
 
 
 
 
60
  "lines_to_next_cell": 1
61
  },
62
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  "source": [
64
  "gatsby = datasets.load_from_disk(\"gatsby\")\n",
65
  "gatsby.add_faiss_index(\"embeddings\")"
 
67
  },
68
  {
69
  "cell_type": "markdown",
70
+ "id": "ede57fd4",
71
  "metadata": {},
72
  "source": [
73
  "Fast KNN retieval prompt"
 
75
  },
76
  {
77
  "cell_type": "code",
78
+ "execution_count": null,
79
+ "id": "71300bf7",
80
  "metadata": {
 
 
 
 
 
 
81
  "lines_to_next_cell": 1
82
  },
83
  "outputs": [],
 
91
  },
92
  {
93
  "cell_type": "code",
94
+ "execution_count": null,
95
+ "id": "c311126e",
96
  "metadata": {
 
 
 
 
 
 
97
  "lines_to_next_cell": 1
98
  },
99
  "outputs": [],
 
106
  },
107
  {
108
  "cell_type": "code",
109
+ "execution_count": null,
110
+ "id": "18db52b6",
111
+ "metadata": {},
 
 
 
 
 
 
 
 
112
  "outputs": [],
113
  "source": [
114
  "def gatsby(query):\n",
 
117
  ]
118
  },
119
  {
120
+ "cell_type": "code",
121
+ "execution_count": null,
122
+ "id": "2ec09500",
123
  "metadata": {
124
  "lines_to_next_cell": 2
125
  },
126
+ "outputs": [],
127
+ "source": []
 
128
  },
129
  {
130
  "cell_type": "code",
131
+ "execution_count": null,
132
+ "id": "2b5a5f98",
133
+ "metadata": {},
134
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  "source": [
136
  "gradio = show(gatsby,\n",
137
  " subprompts=[get_neighbors, ask],\n",
 
139
  " \"What did the narrator do after getting back to Chicago?\"],\n",
140
  " keys={\"HF_KEY\"},\n",
141
  " description=desc,\n",
 
142
  " )\n",
143
  "if __name__ == \"__main__\":\n",
144
  " gradio.launch()"
145
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
146
  }
147
  ],
148
  "metadata": {
149
  "jupytext": {
150
+ "cell_metadata_filter": "tags,-all",
151
+ "main_language": "python",
152
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
153
  }
154
  },
155
  "nbformat": 4,
gatsby.py CHANGED
@@ -2,7 +2,7 @@
2
  desc = """
3
  ### Book QA
4
 
5
- Chain that does question answering with Hugging Face embeddings. [[Code](https://github.com/srush/MiniChain/blob/main/examples/gatsby.py)]
6
 
7
  (Adapted from the [LlamaIndex example](https://github.com/jerryjliu/gpt_index/blob/main/examples/gatsby/TestGatsby.ipynb).)
8
  """
@@ -50,11 +50,3 @@ gradio = show(gatsby,
50
  )
51
  if __name__ == "__main__":
52
  gradio.launch()
53
-
54
-
55
-
56
- # + tags=["hide_inp"]
57
- # QAPrompt().show({"question": "Who was Gatsby?", "docs": ["doc1", "doc2", "doc3"]}, "")
58
- # # -
59
-
60
- # show_log("gatsby.log")
 
2
  desc = """
3
  ### Book QA
4
 
5
+ Chain that does question answering with Hugging Face embeddings. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/gatsby.ipynb)
6
 
7
  (Adapted from the [LlamaIndex example](https://github.com/jerryjliu/gpt_index/blob/main/examples/gatsby/TestGatsby.ipynb).)
8
  """
 
50
  )
51
  if __name__ == "__main__":
52
  gradio.launch()
 
 
 
 
 
 
 
 
math_demo.ipynb ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "3284e5e6",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "3fbd7251",
18
+ "metadata": {
19
+ "lines_to_next_cell": 2,
20
+ "tags": [
21
+ "hide_inp"
22
+ ]
23
+ },
24
+ "outputs": [],
25
+ "source": [
26
+ "desc = \"\"\"\n",
27
+ "### Word Problem Solver\n",
28
+ "\n",
29
+ "Chain that solves a math word problem by first generating and then running Python code. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/math_demo.ipynb)\n",
30
+ "\n",
31
+ "(Adapted from Dust [maths-generate-code](https://dust.tt/spolu/a/d12ac33169))\n",
32
+ "\"\"\""
33
+ ]
34
+ },
35
+ {
36
+ "cell_type": "code",
37
+ "execution_count": null,
38
+ "id": "444f17d4",
39
+ "metadata": {},
40
+ "outputs": [],
41
+ "source": [
42
+ "from minichain import show, prompt, OpenAI, Python"
43
+ ]
44
+ },
45
+ {
46
+ "cell_type": "code",
47
+ "execution_count": null,
48
+ "id": "0a0b7432",
49
+ "metadata": {
50
+ "lines_to_next_cell": 1
51
+ },
52
+ "outputs": [],
53
+ "source": [
54
+ "@prompt(OpenAI(), template_file=\"math.pmpt.tpl\")\n",
55
+ "def math_prompt(model, question):\n",
56
+ " \"Prompt to call GPT with a Jinja template\"\n",
57
+ " return model(dict(question=question))"
58
+ ]
59
+ },
60
+ {
61
+ "cell_type": "code",
62
+ "execution_count": null,
63
+ "id": "78c1c973",
64
+ "metadata": {
65
+ "lines_to_next_cell": 1
66
+ },
67
+ "outputs": [],
68
+ "source": [
69
+ "@prompt(Python(), template=\"import math\\n{{code}}\")\n",
70
+ "def python(model, code):\n",
71
+ " \"Prompt to call Python interpreter\"\n",
72
+ " code = \"\\n\".join(code.strip().split(\"\\n\")[1:-1])\n",
73
+ " return int(model(dict(code=code)))"
74
+ ]
75
+ },
76
+ {
77
+ "cell_type": "code",
78
+ "execution_count": null,
79
+ "id": "94491cd9",
80
+ "metadata": {},
81
+ "outputs": [],
82
+ "source": [
83
+ "def math_demo(question):\n",
84
+ " \"Chain them together\"\n",
85
+ " return python(math_prompt(question))"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "code",
90
+ "execution_count": null,
91
+ "id": "d2e019aa",
92
+ "metadata": {
93
+ "lines_to_next_cell": 0,
94
+ "tags": [
95
+ "hide_inp"
96
+ ]
97
+ },
98
+ "outputs": [],
99
+ "source": [
100
+ "gradio = show(math_demo,\n",
101
+ " examples=[\"What is the sum of the powers of 3 (3^i) that are smaller than 100?\",\n",
102
+ " \"What is the sum of the 10 first positive integers?\",],\n",
103
+ " # \"Carla is downloading a 200 GB file. She can download 2 GB/minute, but 40% of the way through the download, the download fails. Then Carla has to restart the download from the beginning. How load did it take her to download the file in minutes?\"],\n",
104
+ " subprompts=[math_prompt, python],\n",
105
+ " out_type=\"json\",\n",
106
+ " description=desc,\n",
107
+ " )\n",
108
+ "if __name__ == \"__main__\":\n",
109
+ " gradio.launch()"
110
+ ]
111
+ },
112
+ {
113
+ "cell_type": "code",
114
+ "execution_count": null,
115
+ "id": "69b50b81",
116
+ "metadata": {},
117
+ "outputs": [],
118
+ "source": []
119
+ }
120
+ ],
121
+ "metadata": {
122
+ "jupytext": {
123
+ "cell_metadata_filter": "tags,-all",
124
+ "main_language": "python",
125
+ "notebook_metadata_filter": "-all"
126
+ }
127
+ },
128
+ "nbformat": 4,
129
+ "nbformat_minor": 5
130
+ }
math_demo.py CHANGED
@@ -2,7 +2,7 @@
2
  desc = """
3
  ### Word Problem Solver
4
 
5
- Chain that solves a math word problem by first generating and then running Python code. [[Code](https://github.com/srush/MiniChain/blob/main/examples/math.py)]
6
 
7
  (Adapted from Dust [maths-generate-code](https://dust.tt/spolu/a/d12ac33169))
8
  """
@@ -29,8 +29,8 @@ def math_demo(question):
29
  return python(math_prompt(question))
30
 
31
  # $
32
-
33
- # + tags=["hide_inp"]
34
  gradio = show(math_demo,
35
  examples=["What is the sum of the powers of 3 (3^i) that are smaller than 100?",
36
  "What is the sum of the 10 first positive integers?",],
@@ -43,4 +43,4 @@ gradio = show(math_demo,
43
  if __name__ == "__main__":
44
  gradio.launch()
45
  # -
46
-
 
2
  desc = """
3
  ### Word Problem Solver
4
 
5
+ Chain that solves a math word problem by first generating and then running Python code. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/math_demo.ipynb)
6
 
7
  (Adapted from Dust [maths-generate-code](https://dust.tt/spolu/a/d12ac33169))
8
  """
 
29
  return python(math_prompt(question))
30
 
31
  # $
32
+
33
+ # + tags=["hide_inp"]
34
  gradio = show(math_demo,
35
  examples=["What is the sum of the powers of 3 (3^i) that are smaller than 100?",
36
  "What is the sum of the 10 first positive integers?",],
 
43
  if __name__ == "__main__":
44
  gradio.launch()
45
  # -
46
+
math_prompts.ipynb ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "622575b8",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "markdown",
16
+ "id": "83603f3b",
17
+ "metadata": {
18
+ "lines_to_next_cell": 2
19
+ },
20
+ "source": [
21
+ "Copyright 2022 PAL Authors. All rights reserved.\n",
22
+ "\n",
23
+ "Licensed under the Apache License, Version 2.0 (the \"License\");\n",
24
+ "you may not use this file except in compliance with the License.\n",
25
+ "You may obtain a copy of the License at\n",
26
+ "\n",
27
+ " http://www.apache.org/licenses/LICENSE-2.0\n",
28
+ "\n",
29
+ "Unless required by applicable law or agreed to in writing, software\n",
30
+ "distributed under the License is distributed on an \"AS IS\" BASIS,\n",
31
+ "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n",
32
+ "See the License for the specific language governing permissions and\n",
33
+ "limitations under the License."
34
+ ]
35
+ },
36
+ {
37
+ "cell_type": "code",
38
+ "execution_count": null,
39
+ "id": "493da3c0",
40
+ "metadata": {},
41
+ "outputs": [],
42
+ "source": [
43
+ "MATH_PROMPT = '''\n",
44
+ "Q: Olivia has $23. She bought five bagels for $3 each. How much money does she have left?\n",
45
+ "\n",
46
+ "# solution in Python:\n",
47
+ "\n",
48
+ "\n",
49
+ "def solution():\n",
50
+ " \"\"\"Olivia has $23. She bought five bagels for $3 each. How much money does she have left?\"\"\"\n",
51
+ " money_initial = 23\n",
52
+ " bagels = 5\n",
53
+ " bagel_cost = 3\n",
54
+ " money_spent = bagels * bagel_cost\n",
55
+ " money_left = money_initial - money_spent\n",
56
+ " result = money_left\n",
57
+ " return result\n",
58
+ "\n",
59
+ "\n",
60
+ "\n",
61
+ "\n",
62
+ "\n",
63
+ "Q: Michael had 58 golf balls. On tuesday, he lost 23 golf balls. On wednesday, he lost 2 more. How many golf balls did he have at the end of wednesday?\n",
64
+ "\n",
65
+ "# solution in Python:\n",
66
+ "\n",
67
+ "\n",
68
+ "def solution():\n",
69
+ " \"\"\"Michael had 58 golf balls. On tuesday, he lost 23 golf balls. On wednesday, he lost 2 more. How many golf balls did he have at the end of wednesday?\"\"\"\n",
70
+ " golf_balls_initial = 58\n",
71
+ " golf_balls_lost_tuesday = 23\n",
72
+ " golf_balls_lost_wednesday = 2\n",
73
+ " golf_balls_left = golf_balls_initial - golf_balls_lost_tuesday - golf_balls_lost_wednesday\n",
74
+ " result = golf_balls_left\n",
75
+ " return result\n",
76
+ "\n",
77
+ "\n",
78
+ "\n",
79
+ "\n",
80
+ "\n",
81
+ "Q: There were nine computers in the server room. Five more computers were installed each day, from monday to thursday. How many computers are now in the server room?\n",
82
+ "\n",
83
+ "# solution in Python:\n",
84
+ "\n",
85
+ "\n",
86
+ "def solution():\n",
87
+ " \"\"\"There were nine computers in the server room. Five more computers were installed each day, from monday to thursday. How many computers are now in the server room?\"\"\"\n",
88
+ " computers_initial = 9\n",
89
+ " computers_per_day = 5\n",
90
+ " num_days = 4 # 4 days between monday and thursday\n",
91
+ " computers_added = computers_per_day * num_days\n",
92
+ " computers_total = computers_initial + computers_added\n",
93
+ " result = computers_total\n",
94
+ " return result\n",
95
+ "\n",
96
+ "\n",
97
+ "\n",
98
+ "\n",
99
+ "\n",
100
+ "Q: Shawn has five toys. For Christmas, he got two toys each from his mom and dad. How many toys does he have now?\n",
101
+ "\n",
102
+ "# solution in Python:\n",
103
+ "\n",
104
+ "\n",
105
+ "def solution():\n",
106
+ " \"\"\"Shawn has five toys. For Christmas, he got two toys each from his mom and dad. How many toys does he have now?\"\"\"\n",
107
+ " toys_initial = 5\n",
108
+ " mom_toys = 2\n",
109
+ " dad_toys = 2\n",
110
+ " total_received = mom_toys + dad_toys\n",
111
+ " total_toys = toys_initial + total_received\n",
112
+ " result = total_toys\n",
113
+ " return result\n",
114
+ "\n",
115
+ "\n",
116
+ "\n",
117
+ "\n",
118
+ "\n",
119
+ "Q: Jason had 20 lollipops. He gave Denny some lollipops. Now Jason has 12 lollipops. How many lollipops did Jason give to Denny?\n",
120
+ "\n",
121
+ "# solution in Python:\n",
122
+ "\n",
123
+ "\n",
124
+ "def solution():\n",
125
+ " \"\"\"Jason had 20 lollipops. He gave Denny some lollipops. Now Jason has 12 lollipops. How many lollipops did Jason give to Denny?\"\"\"\n",
126
+ " jason_lollipops_initial = 20\n",
127
+ " jason_lollipops_after = 12\n",
128
+ " denny_lollipops = jason_lollipops_initial - jason_lollipops_after\n",
129
+ " result = denny_lollipops\n",
130
+ " return result\n",
131
+ "\n",
132
+ "\n",
133
+ "\n",
134
+ "\n",
135
+ "\n",
136
+ "Q: Leah had 32 chocolates and her sister had 42. If they ate 35, how many pieces do they have left in total?\n",
137
+ "\n",
138
+ "# solution in Python:\n",
139
+ "\n",
140
+ "\n",
141
+ "def solution():\n",
142
+ " \"\"\"Leah had 32 chocolates and her sister had 42. If they ate 35, how many pieces do they have left in total?\"\"\"\n",
143
+ " leah_chocolates = 32\n",
144
+ " sister_chocolates = 42\n",
145
+ " total_chocolates = leah_chocolates + sister_chocolates\n",
146
+ " chocolates_eaten = 35\n",
147
+ " chocolates_left = total_chocolates - chocolates_eaten\n",
148
+ " result = chocolates_left\n",
149
+ " return result\n",
150
+ "\n",
151
+ "\n",
152
+ "\n",
153
+ "\n",
154
+ "\n",
155
+ "Q: If there are 3 cars in the parking lot and 2 more cars arrive, how many cars are in the parking lot?\n",
156
+ "\n",
157
+ "# solution in Python:\n",
158
+ "\n",
159
+ "\n",
160
+ "def solution():\n",
161
+ " \"\"\"If there are 3 cars in the parking lot and 2 more cars arrive, how many cars are in the parking lot?\"\"\"\n",
162
+ " cars_initial = 3\n",
163
+ " cars_arrived = 2\n",
164
+ " total_cars = cars_initial + cars_arrived\n",
165
+ " result = total_cars\n",
166
+ " return result\n",
167
+ "\n",
168
+ "\n",
169
+ "\n",
170
+ "\n",
171
+ "\n",
172
+ "Q: There are 15 trees in the grove. Grove workers will plant trees in the grove today. After they are done, there will be 21 trees. How many trees did the grove workers plant today?\n",
173
+ "\n",
174
+ "# solution in Python:\n",
175
+ "\n",
176
+ "\n",
177
+ "def solution():\n",
178
+ " \"\"\"There are 15 trees in the grove. Grove workers will plant trees in the grove today. After they are done, there will be 21 trees. How many trees did the grove workers plant today?\"\"\"\n",
179
+ " trees_initial = 15\n",
180
+ " trees_after = 21\n",
181
+ " trees_added = trees_after - trees_initial\n",
182
+ " result = trees_added\n",
183
+ " return result\n",
184
+ "\n",
185
+ "\n",
186
+ "\n",
187
+ "\n",
188
+ "\n",
189
+ "Q: {question}\n",
190
+ "\n",
191
+ "# solution in Python:\n",
192
+ "'''.strip() + '\\n\\n\\n'"
193
+ ]
194
+ }
195
+ ],
196
+ "metadata": {
197
+ "jupytext": {
198
+ "cell_metadata_filter": "-all",
199
+ "main_language": "python",
200
+ "notebook_metadata_filter": "-all"
201
+ }
202
+ },
203
+ "nbformat": 4,
204
+ "nbformat_minor": 5
205
+ }
ner.ipynb CHANGED
@@ -2,15 +2,21 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
- "id": "6b40c8cc",
 
 
 
 
 
 
 
 
 
 
 
7
  "metadata": {
8
- "execution": {
9
- "iopub.execute_input": "2023-03-22T17:00:46.670809Z",
10
- "iopub.status.busy": "2023-03-22T17:00:46.670088Z",
11
- "iopub.status.idle": "2023-03-22T17:00:46.679313Z",
12
- "shell.execute_reply": "2023-03-22T17:00:46.678767Z"
13
- },
14
  "tags": [
15
  "hide_inp"
16
  ]
@@ -21,31 +27,17 @@
21
  "desc = \"\"\"\n",
22
  "### Named Entity Recognition\n",
23
  "\n",
24
- "Chain that does named entity recognition with arbitrary labels. [[Code](https://github.com/srush/MiniChain/blob/main/examples/ner.py)]\n",
25
  "\n",
26
  "(Adapted from [promptify](https://github.com/promptslab/Promptify/blob/main/promptify/prompts/nlp/templates/ner.jinja)).\n",
27
  "\"\"\""
28
  ]
29
  },
30
- {
31
- "cell_type": "markdown",
32
- "id": "f6e33520",
33
- "metadata": {},
34
- "source": [
35
- "$"
36
- ]
37
- },
38
  {
39
  "cell_type": "code",
40
- "execution_count": 2,
41
- "id": "de258da8",
42
  "metadata": {
43
- "execution": {
44
- "iopub.execute_input": "2023-03-22T17:00:46.681769Z",
45
- "iopub.status.busy": "2023-03-22T17:00:46.681580Z",
46
- "iopub.status.idle": "2023-03-22T17:00:47.985872Z",
47
- "shell.execute_reply": "2023-03-22T17:00:47.985221Z"
48
- },
49
  "lines_to_next_cell": 1
50
  },
51
  "outputs": [],
@@ -55,36 +47,23 @@
55
  },
56
  {
57
  "cell_type": "code",
58
- "execution_count": 3,
59
- "id": "f517f9a5",
60
  "metadata": {
61
- "execution": {
62
- "iopub.execute_input": "2023-03-22T17:00:47.988564Z",
63
- "iopub.status.busy": "2023-03-22T17:00:47.988242Z",
64
- "iopub.status.idle": "2023-03-22T17:00:47.991610Z",
65
- "shell.execute_reply": "2023-03-22T17:00:47.991041Z"
66
- },
67
  "lines_to_next_cell": 1
68
  },
69
  "outputs": [],
70
  "source": [
71
  "@prompt(OpenAI(), template_file = \"ner.pmpt.tpl\", parser=\"json\")\n",
72
- "def ner_extract(model, **kwargs):\n",
73
  " return model(kwargs)"
74
  ]
75
  },
76
  {
77
  "cell_type": "code",
78
- "execution_count": 4,
79
- "id": "fe5ec878",
80
- "metadata": {
81
- "execution": {
82
- "iopub.execute_input": "2023-03-22T17:00:47.993777Z",
83
- "iopub.status.busy": "2023-03-22T17:00:47.993478Z",
84
- "iopub.status.idle": "2023-03-22T17:00:47.996957Z",
85
- "shell.execute_reply": "2023-03-22T17:00:47.996449Z"
86
- }
87
- },
88
  "outputs": [],
89
  "source": [
90
  "@prompt(OpenAI())\n",
@@ -96,16 +75,9 @@
96
  },
97
  {
98
  "cell_type": "code",
99
- "execution_count": 5,
100
- "id": "cf059e37",
101
- "metadata": {
102
- "execution": {
103
- "iopub.execute_input": "2023-03-22T17:00:47.999077Z",
104
- "iopub.status.busy": "2023-03-22T17:00:47.998839Z",
105
- "iopub.status.idle": "2023-03-22T17:00:48.002102Z",
106
- "shell.execute_reply": "2023-03-22T17:00:48.001662Z"
107
- }
108
- },
109
  "outputs": [],
110
  "source": [
111
  "def ner(text_input, labels, domain):\n",
@@ -114,137 +86,44 @@
114
  ]
115
  },
116
  {
117
- "cell_type": "markdown",
118
- "id": "fe198253",
 
119
  "metadata": {},
120
- "source": [
121
- "$"
122
- ]
123
  },
124
  {
125
  "cell_type": "code",
126
- "execution_count": 6,
127
- "id": "24ca918c",
128
- "metadata": {
129
- "execution": {
130
- "iopub.execute_input": "2023-03-22T17:00:48.004490Z",
131
- "iopub.status.busy": "2023-03-22T17:00:48.004142Z",
132
- "iopub.status.idle": "2023-03-22T17:00:48.254708Z",
133
- "shell.execute_reply": "2023-03-22T17:00:48.254069Z"
134
- }
135
- },
136
  "outputs": [],
137
  "source": [
138
  "gradio = show(ner,\n",
139
  " examples=[[\"An NBA playoff pairing a year ago, the 76ers (39-20) meet the Miami Heat (32-29) for the first time this season on Monday night at home.\", \"Team, Date\", \"Sports\"]],\n",
140
  " description=desc,\n",
141
  " subprompts=[ner_extract, team_describe],\n",
142
- " code=open(\"ner.py\", \"r\").read().split(\"$\")[1].strip().strip(\"#\").strip(),\n",
143
  " )"
144
  ]
145
  },
146
  {
147
  "cell_type": "code",
148
- "execution_count": 7,
149
- "id": "d5247097",
150
- "metadata": {
151
- "execution": {
152
- "iopub.execute_input": "2023-03-22T17:00:48.257352Z",
153
- "iopub.status.busy": "2023-03-22T17:00:48.257149Z",
154
- "iopub.status.idle": "2023-03-22T17:00:48.319870Z",
155
- "shell.execute_reply": "2023-03-22T17:00:48.319111Z"
156
- },
157
- "lines_to_next_cell": 2
158
- },
159
- "outputs": [
160
- {
161
- "name": "stdout",
162
- "output_type": "stream",
163
- "text": [
164
- "Running on local URL: http://127.0.0.1:7861\n",
165
- "\n",
166
- "To create a public link, set `share=True` in `launch()`.\n"
167
- ]
168
- },
169
- {
170
- "data": {
171
- "text/html": [
172
- "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
173
- ],
174
- "text/plain": [
175
- "<IPython.core.display.HTML object>"
176
- ]
177
- },
178
- "metadata": {},
179
- "output_type": "display_data"
180
- }
181
- ],
182
- "source": [
183
- "if __name__ == \"__main__\":\n",
184
- " gradio.launch()"
185
- ]
186
- },
187
- {
188
- "cell_type": "markdown",
189
- "id": "7cbb9856",
190
  "metadata": {},
191
- "source": [
192
- "View prompt examples."
193
- ]
194
- },
195
- {
196
- "cell_type": "code",
197
- "execution_count": 8,
198
- "id": "83d3ad7a",
199
- "metadata": {
200
- "execution": {
201
- "iopub.execute_input": "2023-03-22T17:00:48.322776Z",
202
- "iopub.status.busy": "2023-03-22T17:00:48.322504Z",
203
- "iopub.status.idle": "2023-03-22T17:00:48.326248Z",
204
- "shell.execute_reply": "2023-03-22T17:00:48.325611Z"
205
- },
206
- "tags": [
207
- "hide_inp"
208
- ]
209
- },
210
  "outputs": [],
211
  "source": [
212
- "# NERPrompt().show(\n",
213
- "# {\n",
214
- "# \"input\": \"I went to New York\",\n",
215
- "# \"domain\": \"Travel\",\n",
216
- "# \"labels\": [\"City\"]\n",
217
- "# },\n",
218
- "# '[{\"T\": \"City\", \"E\": \"New York\"}]',\n",
219
- "# )\n",
220
- "# # -\n",
221
- "\n",
222
- "# # View log.\n",
223
- "\n",
224
- "# minichain.show_log(\"ner.log\")"
225
  ]
226
  }
227
  ],
228
  "metadata": {
229
  "jupytext": {
230
- "cell_metadata_filter": "tags,-all"
231
- },
232
- "kernelspec": {
233
- "display_name": "minichain",
234
- "language": "python",
235
- "name": "minichain"
236
- },
237
- "language_info": {
238
- "codemirror_mode": {
239
- "name": "ipython",
240
- "version": 3
241
- },
242
- "file_extension": ".py",
243
- "mimetype": "text/x-python",
244
- "name": "python",
245
- "nbconvert_exporter": "python",
246
- "pygments_lexer": "ipython3",
247
- "version": "3.10.6"
248
  }
249
  },
250
  "nbformat": 4,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "609983df",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "1f7ee63a",
18
  "metadata": {
19
+ "lines_to_next_cell": 2,
 
 
 
 
 
20
  "tags": [
21
  "hide_inp"
22
  ]
 
27
  "desc = \"\"\"\n",
28
  "### Named Entity Recognition\n",
29
  "\n",
30
+ "Chain that does named entity recognition with arbitrary labels. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/ner.ipynb)\n",
31
  "\n",
32
  "(Adapted from [promptify](https://github.com/promptslab/Promptify/blob/main/promptify/prompts/nlp/templates/ner.jinja)).\n",
33
  "\"\"\""
34
  ]
35
  },
 
 
 
 
 
 
 
 
36
  {
37
  "cell_type": "code",
38
+ "execution_count": null,
39
+ "id": "bedc40eb",
40
  "metadata": {
 
 
 
 
 
 
41
  "lines_to_next_cell": 1
42
  },
43
  "outputs": [],
 
47
  },
48
  {
49
  "cell_type": "code",
50
+ "execution_count": null,
51
+ "id": "316a1cab",
52
  "metadata": {
 
 
 
 
 
 
53
  "lines_to_next_cell": 1
54
  },
55
  "outputs": [],
56
  "source": [
57
  "@prompt(OpenAI(), template_file = \"ner.pmpt.tpl\", parser=\"json\")\n",
58
+ "def ner_extract(model, kwargs):\n",
59
  " return model(kwargs)"
60
  ]
61
  },
62
  {
63
  "cell_type": "code",
64
+ "execution_count": null,
65
+ "id": "50d1c34a",
66
+ "metadata": {},
 
 
 
 
 
 
 
67
  "outputs": [],
68
  "source": [
69
  "@prompt(OpenAI())\n",
 
75
  },
76
  {
77
  "cell_type": "code",
78
+ "execution_count": null,
79
+ "id": "eaa2c029",
80
+ "metadata": {},
 
 
 
 
 
 
 
81
  "outputs": [],
82
  "source": [
83
  "def ner(text_input, labels, domain):\n",
 
86
  ]
87
  },
88
  {
89
+ "cell_type": "code",
90
+ "execution_count": null,
91
+ "id": "c4e8dfe8",
92
  "metadata": {},
93
+ "outputs": [],
94
+ "source": []
 
95
  },
96
  {
97
  "cell_type": "code",
98
+ "execution_count": null,
99
+ "id": "546df8e9",
100
+ "metadata": {},
 
 
 
 
 
 
 
101
  "outputs": [],
102
  "source": [
103
  "gradio = show(ner,\n",
104
  " examples=[[\"An NBA playoff pairing a year ago, the 76ers (39-20) meet the Miami Heat (32-29) for the first time this season on Monday night at home.\", \"Team, Date\", \"Sports\"]],\n",
105
  " description=desc,\n",
106
  " subprompts=[ner_extract, team_describe],\n",
 
107
  " )"
108
  ]
109
  },
110
  {
111
  "cell_type": "code",
112
+ "execution_count": null,
113
+ "id": "3a5993c3",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
114
  "metadata": {},
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
115
  "outputs": [],
116
  "source": [
117
+ "if __name__ == \"__main__\":\n",
118
+ " gradio.launch()"
 
 
 
 
 
 
 
 
 
 
 
119
  ]
120
  }
121
  ],
122
  "metadata": {
123
  "jupytext": {
124
+ "cell_metadata_filter": "tags,-all",
125
+ "main_language": "python",
126
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
127
  }
128
  },
129
  "nbformat": 4,
ner.py CHANGED
@@ -3,7 +3,7 @@
3
  desc = """
4
  ### Named Entity Recognition
5
 
6
- Chain that does named entity recognition with arbitrary labels. [[Code](https://github.com/srush/MiniChain/blob/main/examples/ner.py)]
7
 
8
  (Adapted from [promptify](https://github.com/promptslab/Promptify/blob/main/promptify/prompts/nlp/templates/ner.jinja)).
9
  """
@@ -40,21 +40,3 @@ gradio = show(ner,
40
 
41
  if __name__ == "__main__":
42
  gradio.launch()
43
-
44
-
45
- # View prompt examples.
46
-
47
- # + tags=["hide_inp"]
48
- # NERPrompt().show(
49
- # {
50
- # "input": "I went to New York",
51
- # "domain": "Travel",
52
- # "labels": ["City"]
53
- # },
54
- # '[{"T": "City", "E": "New York"}]',
55
- # )
56
- # # -
57
-
58
- # # View log.
59
-
60
- # minichain.show_log("ner.log")
 
3
  desc = """
4
  ### Named Entity Recognition
5
 
6
+ Chain that does named entity recognition with arbitrary labels. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/ner.ipynb)
7
 
8
  (Adapted from [promptify](https://github.com/promptslab/Promptify/blob/main/promptify/prompts/nlp/templates/ner.jinja)).
9
  """
 
40
 
41
  if __name__ == "__main__":
42
  gradio.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
pal.ipynb CHANGED
@@ -2,46 +2,38 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
- "id": "c38ce0f6",
 
 
 
 
 
 
 
 
 
 
 
7
  "metadata": {
8
- "execution": {
9
- "iopub.execute_input": "2023-03-22T17:00:50.962288Z",
10
- "iopub.status.busy": "2023-03-22T17:00:50.961690Z",
11
- "iopub.status.idle": "2023-03-22T17:00:50.980672Z",
12
- "shell.execute_reply": "2023-03-22T17:00:50.978710Z"
13
- }
14
  },
15
  "outputs": [],
16
  "source": [
17
  "desc = \"\"\"\n",
18
  "### Prompt-aided Language Models\n",
19
  "\n",
20
- "Chain for answering complex problems by code generation and execution. [[Code](https://github.com/srush/MiniChain/blob/main/examples/pal.py)]\n",
21
  "\n",
22
  "(Adapted from Prompt-aided Language Models [PAL](https://arxiv.org/pdf/2211.10435.pdf)).\n",
23
  "\"\"\""
24
  ]
25
  },
26
- {
27
- "cell_type": "markdown",
28
- "id": "feae8a53",
29
- "metadata": {},
30
- "source": [
31
- "$"
32
- ]
33
- },
34
  {
35
  "cell_type": "code",
36
- "execution_count": 2,
37
- "id": "50b61935",
38
  "metadata": {
39
- "execution": {
40
- "iopub.execute_input": "2023-03-22T17:00:50.988390Z",
41
- "iopub.status.busy": "2023-03-22T17:00:50.987754Z",
42
- "iopub.status.idle": "2023-03-22T17:00:52.281141Z",
43
- "shell.execute_reply": "2023-03-22T17:00:52.280472Z"
44
- },
45
  "lines_to_next_cell": 1
46
  },
47
  "outputs": [],
@@ -51,15 +43,9 @@
51
  },
52
  {
53
  "cell_type": "code",
54
- "execution_count": 3,
55
- "id": "a924b4d1",
56
  "metadata": {
57
- "execution": {
58
- "iopub.execute_input": "2023-03-22T17:00:52.283783Z",
59
- "iopub.status.busy": "2023-03-22T17:00:52.283453Z",
60
- "iopub.status.idle": "2023-03-22T17:00:52.286846Z",
61
- "shell.execute_reply": "2023-03-22T17:00:52.286410Z"
62
- },
63
  "lines_to_next_cell": 1
64
  },
65
  "outputs": [],
@@ -71,63 +57,34 @@
71
  },
72
  {
73
  "cell_type": "code",
74
- "execution_count": 4,
75
- "id": "0033b792",
76
  "metadata": {
77
- "execution": {
78
- "iopub.execute_input": "2023-03-22T17:00:52.288978Z",
79
- "iopub.status.busy": "2023-03-22T17:00:52.288707Z",
80
- "iopub.status.idle": "2023-03-22T17:00:52.291754Z",
81
- "shell.execute_reply": "2023-03-22T17:00:52.291315Z"
82
- },
83
  "lines_to_next_cell": 1
84
  },
85
  "outputs": [],
86
  "source": [
87
  "@prompt(Python())\n",
88
  "def python(model, inp):\n",
89
- " return int(model(inp + \"\\nprint(solution())\"))"
90
  ]
91
  },
92
  {
93
  "cell_type": "code",
94
- "execution_count": 5,
95
- "id": "60a5ddf7",
96
- "metadata": {
97
- "execution": {
98
- "iopub.execute_input": "2023-03-22T17:00:52.294112Z",
99
- "iopub.status.busy": "2023-03-22T17:00:52.293670Z",
100
- "iopub.status.idle": "2023-03-22T17:00:52.296744Z",
101
- "shell.execute_reply": "2023-03-22T17:00:52.296171Z"
102
- },
103
- "lines_to_next_cell": 1
104
- },
105
  "outputs": [],
106
  "source": [
107
  "def pal(question):\n",
108
  " return python(pal_prompt(question))"
109
  ]
110
  },
111
- {
112
- "cell_type": "markdown",
113
- "id": "c75fead5",
114
- "metadata": {},
115
- "source": [
116
- "$"
117
- ]
118
- },
119
  {
120
  "cell_type": "code",
121
- "execution_count": 6,
122
- "id": "b0d40bd2",
123
- "metadata": {
124
- "execution": {
125
- "iopub.execute_input": "2023-03-22T17:00:52.298984Z",
126
- "iopub.status.busy": "2023-03-22T17:00:52.298796Z",
127
- "iopub.status.idle": "2023-03-22T17:00:52.301777Z",
128
- "shell.execute_reply": "2023-03-22T17:00:52.301305Z"
129
- }
130
- },
131
  "outputs": [],
132
  "source": [
133
  "question = \"Melanie is a door-to-door saleswoman. She sold a third of her \" \\\n",
@@ -138,61 +95,25 @@
138
  },
139
  {
140
  "cell_type": "code",
141
- "execution_count": 7,
142
- "id": "3c4485a4",
143
- "metadata": {
144
- "execution": {
145
- "iopub.execute_input": "2023-03-22T17:00:52.304005Z",
146
- "iopub.status.busy": "2023-03-22T17:00:52.303713Z",
147
- "iopub.status.idle": "2023-03-22T17:00:52.573239Z",
148
- "shell.execute_reply": "2023-03-22T17:00:52.572557Z"
149
- }
150
- },
151
  "outputs": [],
152
  "source": [
153
  "gradio = show(pal,\n",
154
  " examples=[question],\n",
155
  " subprompts=[pal_prompt, python],\n",
156
  " description=desc,\n",
157
- " code=open(\"pal.py\", \"r\").read().split(\"$\")[1].strip().strip(\"#\").strip(),\n",
158
  " )"
159
  ]
160
  },
161
  {
162
  "cell_type": "code",
163
- "execution_count": 8,
164
- "id": "a5bebdcc",
165
- "metadata": {
166
- "execution": {
167
- "iopub.execute_input": "2023-03-22T17:00:52.575840Z",
168
- "iopub.status.busy": "2023-03-22T17:00:52.575635Z",
169
- "iopub.status.idle": "2023-03-22T17:00:52.646986Z",
170
- "shell.execute_reply": "2023-03-22T17:00:52.646403Z"
171
- }
172
- },
173
- "outputs": [
174
- {
175
- "name": "stdout",
176
- "output_type": "stream",
177
- "text": [
178
- "Running on local URL: http://127.0.0.1:7861\n",
179
- "\n",
180
- "To create a public link, set `share=True` in `launch()`.\n"
181
- ]
182
- },
183
- {
184
- "data": {
185
- "text/html": [
186
- "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
187
- ],
188
- "text/plain": [
189
- "<IPython.core.display.HTML object>"
190
- ]
191
- },
192
- "metadata": {},
193
- "output_type": "display_data"
194
- }
195
- ],
196
  "source": [
197
  "if __name__ == \"__main__\":\n",
198
  " gradio.launch()"
@@ -201,24 +122,9 @@
201
  ],
202
  "metadata": {
203
  "jupytext": {
204
- "cell_metadata_filter": "-all"
205
- },
206
- "kernelspec": {
207
- "display_name": "minichain",
208
- "language": "python",
209
- "name": "minichain"
210
- },
211
- "language_info": {
212
- "codemirror_mode": {
213
- "name": "ipython",
214
- "version": 3
215
- },
216
- "file_extension": ".py",
217
- "mimetype": "text/x-python",
218
- "name": "python",
219
- "nbconvert_exporter": "python",
220
- "pygments_lexer": "ipython3",
221
- "version": "3.10.6"
222
  }
223
  },
224
  "nbformat": 4,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "c2508e7c",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "8e61335c",
18
  "metadata": {
19
+ "lines_to_next_cell": 2
 
 
 
 
 
20
  },
21
  "outputs": [],
22
  "source": [
23
  "desc = \"\"\"\n",
24
  "### Prompt-aided Language Models\n",
25
  "\n",
26
+ "Chain for answering complex problems by code generation and execution. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/pal.ipynb)\n",
27
  "\n",
28
  "(Adapted from Prompt-aided Language Models [PAL](https://arxiv.org/pdf/2211.10435.pdf)).\n",
29
  "\"\"\""
30
  ]
31
  },
 
 
 
 
 
 
 
 
32
  {
33
  "cell_type": "code",
34
+ "execution_count": null,
35
+ "id": "ce0d9ca8",
36
  "metadata": {
 
 
 
 
 
 
37
  "lines_to_next_cell": 1
38
  },
39
  "outputs": [],
 
43
  },
44
  {
45
  "cell_type": "code",
46
+ "execution_count": null,
47
+ "id": "abce4228",
48
  "metadata": {
 
 
 
 
 
 
49
  "lines_to_next_cell": 1
50
  },
51
  "outputs": [],
 
57
  },
58
  {
59
  "cell_type": "code",
60
+ "execution_count": null,
61
+ "id": "f695e1ac",
62
  "metadata": {
 
 
 
 
 
 
63
  "lines_to_next_cell": 1
64
  },
65
  "outputs": [],
66
  "source": [
67
  "@prompt(Python())\n",
68
  "def python(model, inp):\n",
69
+ " return float(model(inp + \"\\nprint(solution())\"))"
70
  ]
71
  },
72
  {
73
  "cell_type": "code",
74
+ "execution_count": null,
75
+ "id": "91dead77",
76
+ "metadata": {},
 
 
 
 
 
 
 
 
77
  "outputs": [],
78
  "source": [
79
  "def pal(question):\n",
80
  " return python(pal_prompt(question))"
81
  ]
82
  },
 
 
 
 
 
 
 
 
83
  {
84
  "cell_type": "code",
85
+ "execution_count": null,
86
+ "id": "dc1be82b",
87
+ "metadata": {},
 
 
 
 
 
 
 
88
  "outputs": [],
89
  "source": [
90
  "question = \"Melanie is a door-to-door saleswoman. She sold a third of her \" \\\n",
 
95
  },
96
  {
97
  "cell_type": "code",
98
+ "execution_count": null,
99
+ "id": "782a0903",
100
+ "metadata": {},
 
 
 
 
 
 
 
101
  "outputs": [],
102
  "source": [
103
  "gradio = show(pal,\n",
104
  " examples=[question],\n",
105
  " subprompts=[pal_prompt, python],\n",
106
  " description=desc,\n",
107
+ " out_type=\"json\",\n",
108
  " )"
109
  ]
110
  },
111
  {
112
  "cell_type": "code",
113
+ "execution_count": null,
114
+ "id": "337152d1",
115
+ "metadata": {},
116
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
117
  "source": [
118
  "if __name__ == \"__main__\":\n",
119
  " gradio.launch()"
 
122
  ],
123
  "metadata": {
124
  "jupytext": {
125
+ "cell_metadata_filter": "-all",
126
+ "main_language": "python",
127
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
128
  }
129
  },
130
  "nbformat": 4,
pal.py CHANGED
@@ -1,7 +1,7 @@
1
  desc = """
2
  ### Prompt-aided Language Models
3
 
4
- Chain for answering complex problems by code generation and execution. [[Code](https://github.com/srush/MiniChain/blob/main/examples/pal.py)]
5
 
6
  (Adapted from Prompt-aided Language Models [PAL](https://arxiv.org/pdf/2211.10435.pdf)).
7
  """
 
1
  desc = """
2
  ### Prompt-aided Language Models
3
 
4
+ Chain for answering complex problems by code generation and execution. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/pal.ipynb)
5
 
6
  (Adapted from Prompt-aided Language Models [PAL](https://arxiv.org/pdf/2211.10435.pdf)).
7
  """
parallel.ipynb ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "6b039013",
7
+ "metadata": {
8
+ "lines_to_next_cell": 2
9
+ },
10
+ "outputs": [],
11
+ "source": [
12
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
13
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
14
+ ]
15
+ }
16
+ ],
17
+ "metadata": {
18
+ "jupytext": {
19
+ "cell_metadata_filter": "-all",
20
+ "main_language": "python",
21
+ "notebook_metadata_filter": "-all"
22
+ }
23
+ },
24
+ "nbformat": 4,
25
+ "nbformat_minor": 5
26
+ }
process.ipynb ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "870a6ac7",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "2f24257e",
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "import sys"
22
+ ]
23
+ },
24
+ {
25
+ "cell_type": "code",
26
+ "execution_count": null,
27
+ "id": "a1d0bd19",
28
+ "metadata": {},
29
+ "outputs": [],
30
+ "source": [
31
+ "print(\"!pip install -q git+https://github.com/srush/MiniChain\")\n",
32
+ "print(\"!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . \")\n",
33
+ "print()"
34
+ ]
35
+ },
36
+ {
37
+ "cell_type": "code",
38
+ "execution_count": null,
39
+ "id": "dc1c0742",
40
+ "metadata": {},
41
+ "outputs": [],
42
+ "source": [
43
+ "for l in sys.stdin:\n",
44
+ "\n",
45
+ " if l.strip() == \"# $\":\n",
46
+ " continue\n",
47
+ " if l.strip().startswith(\"code=\"):\n",
48
+ " continue\n",
49
+ " print(l, end=\"\")"
50
+ ]
51
+ }
52
+ ],
53
+ "metadata": {
54
+ "jupytext": {
55
+ "cell_metadata_filter": "-all",
56
+ "main_language": "python",
57
+ "notebook_metadata_filter": "-all"
58
+ }
59
+ },
60
+ "nbformat": 4,
61
+ "nbformat_minor": 5
62
+ }
process.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ print("!pip install -q git+https://github.com/srush/MiniChain")
4
+ print("!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . ")
5
+ print()
6
+
7
+ for l in sys.stdin:
8
+
9
+ if l.strip() == "# $":
10
+ continue
11
+ if l.strip().startswith("code="):
12
+ continue
13
+ print(l, end="")
process.py~ ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+
3
+ print("!pip install -qqq git+https://github.com/srush/MiniChain@gradio")
4
+ print()
5
+
6
+ for l in sys.stdin:
7
+ print(l, end="")
8
+ if l.strip() == "# $":
9
+ continue
10
+ if l.strip().startswith("code="):
11
+ continue
qa.ipynb CHANGED
@@ -2,15 +2,21 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 1,
6
- "id": "383d6fa9",
 
 
 
 
 
 
 
 
 
 
 
7
  "metadata": {
8
- "execution": {
9
- "iopub.execute_input": "2023-03-22T17:00:55.200350Z",
10
- "iopub.status.busy": "2023-03-22T17:00:55.199620Z",
11
- "iopub.status.idle": "2023-03-22T17:00:55.219766Z",
12
- "shell.execute_reply": "2023-03-22T17:00:55.217755Z"
13
- },
14
  "tags": [
15
  "hide_inp"
16
  ]
@@ -20,32 +26,17 @@
20
  "desc = \"\"\"\n",
21
  "### Question Answering with Retrieval\n",
22
  "\n",
23
- "Chain that answers questions with embeedding based retrieval. [[Code](https://github.com/srush/MiniChain/blob/main/examples/qa.py)]\n",
24
  "\n",
25
  "(Adapted from [OpenAI Notebook](https://github.com/openai/openai-cookbook/blob/main/examples/Question_answering_using_embeddings.ipynb).)\n",
26
  "\"\"\""
27
  ]
28
  },
29
- {
30
- "cell_type": "markdown",
31
- "id": "51bf7e4c",
32
- "metadata": {},
33
- "source": [
34
- "$"
35
- ]
36
- },
37
  {
38
  "cell_type": "code",
39
- "execution_count": 2,
40
- "id": "ab7925a4",
41
- "metadata": {
42
- "execution": {
43
- "iopub.execute_input": "2023-03-22T17:00:55.226549Z",
44
- "iopub.status.busy": "2023-03-22T17:00:55.226361Z",
45
- "iopub.status.idle": "2023-03-22T17:00:56.643976Z",
46
- "shell.execute_reply": "2023-03-22T17:00:56.643271Z"
47
- }
48
- },
49
  "outputs": [],
50
  "source": [
51
  "import datasets\n",
@@ -56,7 +47,7 @@
56
  },
57
  {
58
  "cell_type": "markdown",
59
- "id": "0725197d",
60
  "metadata": {},
61
  "source": [
62
  "We use Hugging Face Datasets as the database by assigning\n",
@@ -65,45 +56,10 @@
65
  },
66
  {
67
  "cell_type": "code",
68
- "execution_count": 3,
69
- "id": "d2c2312f",
70
- "metadata": {
71
- "execution": {
72
- "iopub.execute_input": "2023-03-22T17:00:56.646848Z",
73
- "iopub.status.busy": "2023-03-22T17:00:56.646502Z",
74
- "iopub.status.idle": "2023-03-22T17:00:56.714869Z",
75
- "shell.execute_reply": "2023-03-22T17:00:56.714309Z"
76
- }
77
- },
78
- "outputs": [
79
- {
80
- "data": {
81
- "application/vnd.jupyter.widget-view+json": {
82
- "model_id": "7ec1305895ad44a29163c6c51f641600",
83
- "version_major": 2,
84
- "version_minor": 0
85
- },
86
- "text/plain": [
87
- " 0%| | 0/4 [00:00<?, ?it/s]"
88
- ]
89
- },
90
- "metadata": {},
91
- "output_type": "display_data"
92
- },
93
- {
94
- "data": {
95
- "text/plain": [
96
- "Dataset({\n",
97
- " features: ['title', 'heading', 'content', 'tokens', 'embeddings'],\n",
98
- " num_rows: 3964\n",
99
- "})"
100
- ]
101
- },
102
- "execution_count": 3,
103
- "metadata": {},
104
- "output_type": "execute_result"
105
- }
106
- ],
107
  "source": [
108
  "olympics = datasets.load_from_disk(\"olympics.data\")\n",
109
  "olympics.add_faiss_index(\"embeddings\")"
@@ -111,7 +67,7 @@
111
  },
112
  {
113
  "cell_type": "markdown",
114
- "id": "1b2badf2",
115
  "metadata": {},
116
  "source": [
117
  "Fast KNN retieval prompt"
@@ -119,15 +75,9 @@
119
  },
120
  {
121
  "cell_type": "code",
122
- "execution_count": 4,
123
- "id": "456c5610",
124
  "metadata": {
125
- "execution": {
126
- "iopub.execute_input": "2023-03-22T17:00:56.717227Z",
127
- "iopub.status.busy": "2023-03-22T17:00:56.717039Z",
128
- "iopub.status.idle": "2023-03-22T17:00:56.720303Z",
129
- "shell.execute_reply": "2023-03-22T17:00:56.719854Z"
130
- },
131
  "lines_to_next_cell": 1
132
  },
133
  "outputs": [],
@@ -141,15 +91,9 @@
141
  },
142
  {
143
  "cell_type": "code",
144
- "execution_count": 5,
145
- "id": "61ae797c",
146
  "metadata": {
147
- "execution": {
148
- "iopub.execute_input": "2023-03-22T17:00:56.722475Z",
149
- "iopub.status.busy": "2023-03-22T17:00:56.722183Z",
150
- "iopub.status.idle": "2023-03-22T17:00:56.725075Z",
151
- "shell.execute_reply": "2023-03-22T17:00:56.724640Z"
152
- },
153
  "lines_to_next_cell": 1
154
  },
155
  "outputs": [],
@@ -162,16 +106,9 @@
162
  },
163
  {
164
  "cell_type": "code",
165
- "execution_count": 6,
166
- "id": "0d7639e8",
167
- "metadata": {
168
- "execution": {
169
- "iopub.execute_input": "2023-03-22T17:00:56.727258Z",
170
- "iopub.status.busy": "2023-03-22T17:00:56.726952Z",
171
- "iopub.status.idle": "2023-03-22T17:00:56.729629Z",
172
- "shell.execute_reply": "2023-03-22T17:00:56.729172Z"
173
- }
174
- },
175
  "outputs": [],
176
  "source": [
177
  "def qa(query):\n",
@@ -180,27 +117,18 @@
180
  ]
181
  },
182
  {
183
- "cell_type": "markdown",
184
- "id": "fb5a255a",
185
- "metadata": {
186
- "lines_to_next_cell": 2
187
- },
188
- "source": [
189
- "$"
190
- ]
191
  },
192
  {
193
  "cell_type": "code",
194
- "execution_count": 7,
195
- "id": "399bbfeb",
196
- "metadata": {
197
- "execution": {
198
- "iopub.execute_input": "2023-03-22T17:00:56.731957Z",
199
- "iopub.status.busy": "2023-03-22T17:00:56.731562Z",
200
- "iopub.status.idle": "2023-03-22T17:00:56.734263Z",
201
- "shell.execute_reply": "2023-03-22T17:00:56.733828Z"
202
- }
203
- },
204
  "outputs": [],
205
  "source": [
206
  "questions = [\"Who won the 2020 Summer Olympics men's high jump?\",\n",
@@ -212,467 +140,28 @@
212
  },
213
  {
214
  "cell_type": "code",
215
- "execution_count": 8,
216
- "id": "ba3a7294",
217
  "metadata": {
218
- "execution": {
219
- "iopub.execute_input": "2023-03-22T17:00:56.736360Z",
220
- "iopub.status.busy": "2023-03-22T17:00:56.736081Z",
221
- "iopub.status.idle": "2023-03-22T17:00:57.115392Z",
222
- "shell.execute_reply": "2023-03-22T17:00:57.114736Z"
223
- }
224
  },
225
- "outputs": [
226
- {
227
- "name": "stdout",
228
- "output_type": "stream",
229
- "text": [
230
- "Running on local URL: http://127.0.0.1:7861\n",
231
- "\n",
232
- "To create a public link, set `share=True` in `launch()`.\n"
233
- ]
234
- },
235
- {
236
- "data": {
237
- "text/html": [
238
- "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
239
- ],
240
- "text/plain": [
241
- "<IPython.core.display.HTML object>"
242
- ]
243
- },
244
- "metadata": {},
245
- "output_type": "display_data"
246
- }
247
- ],
248
  "source": [
249
  "gradio = show(qa,\n",
250
  " examples=questions,\n",
251
  " subprompts=[get_neighbors, get_result],\n",
252
  " description=desc,\n",
253
- " code=open(\"qa.py\", \"r\").read().split(\"$\")[1].strip().strip(\"#\").strip(),\n",
254
  " )\n",
255
  "if __name__ == \"__main__\":\n",
256
  " gradio.launch()"
257
  ]
258
- },
259
- {
260
- "cell_type": "code",
261
- "execution_count": null,
262
- "id": "524d2c29",
263
- "metadata": {},
264
- "outputs": [],
265
- "source": []
266
- },
267
- {
268
- "cell_type": "markdown",
269
- "id": "500edc1f",
270
- "metadata": {},
271
- "source": [
272
- "# + tags=[\"hide_inp\"]\n",
273
- "QAPrompt().show(\n",
274
- " {\"question\": \"Who won the race?\", \"docs\": [\"doc1\", \"doc2\", \"doc3\"]}, \"Joe Bob\"\n",
275
- ")\n",
276
- "# -"
277
- ]
278
- },
279
- {
280
- "cell_type": "markdown",
281
- "id": "599833ea",
282
- "metadata": {},
283
- "source": [
284
- "show_log(\"qa.log\")"
285
- ]
286
  }
287
  ],
288
  "metadata": {
289
  "jupytext": {
290
- "cell_metadata_filter": "tags,-all"
291
- },
292
- "kernelspec": {
293
- "display_name": "minichain",
294
- "language": "python",
295
- "name": "minichain"
296
- },
297
- "language_info": {
298
- "codemirror_mode": {
299
- "name": "ipython",
300
- "version": 3
301
- },
302
- "file_extension": ".py",
303
- "mimetype": "text/x-python",
304
- "name": "python",
305
- "nbconvert_exporter": "python",
306
- "pygments_lexer": "ipython3",
307
- "version": "3.10.6"
308
- },
309
- "widgets": {
310
- "application/vnd.jupyter.widget-state+json": {
311
- "state": {
312
- "0e04d7f51add4bc0a2d621f6fb5d59a8": {
313
- "model_module": "@jupyter-widgets/base",
314
- "model_module_version": "2.0.0",
315
- "model_name": "LayoutModel",
316
- "state": {
317
- "_model_module": "@jupyter-widgets/base",
318
- "_model_module_version": "2.0.0",
319
- "_model_name": "LayoutModel",
320
- "_view_count": null,
321
- "_view_module": "@jupyter-widgets/base",
322
- "_view_module_version": "2.0.0",
323
- "_view_name": "LayoutView",
324
- "align_content": null,
325
- "align_items": null,
326
- "align_self": null,
327
- "border_bottom": null,
328
- "border_left": null,
329
- "border_right": null,
330
- "border_top": null,
331
- "bottom": null,
332
- "display": null,
333
- "flex": null,
334
- "flex_flow": null,
335
- "grid_area": null,
336
- "grid_auto_columns": null,
337
- "grid_auto_flow": null,
338
- "grid_auto_rows": null,
339
- "grid_column": null,
340
- "grid_gap": null,
341
- "grid_row": null,
342
- "grid_template_areas": null,
343
- "grid_template_columns": null,
344
- "grid_template_rows": null,
345
- "height": null,
346
- "justify_content": null,
347
- "justify_items": null,
348
- "left": null,
349
- "margin": null,
350
- "max_height": null,
351
- "max_width": null,
352
- "min_height": null,
353
- "min_width": null,
354
- "object_fit": null,
355
- "object_position": null,
356
- "order": null,
357
- "overflow": null,
358
- "padding": null,
359
- "right": null,
360
- "top": null,
361
- "visibility": null,
362
- "width": null
363
- }
364
- },
365
- "292f111caba646bf9d542fb9b7cd91d7": {
366
- "model_module": "@jupyter-widgets/controls",
367
- "model_module_version": "2.0.0",
368
- "model_name": "HTMLModel",
369
- "state": {
370
- "_dom_classes": [],
371
- "_model_module": "@jupyter-widgets/controls",
372
- "_model_module_version": "2.0.0",
373
- "_model_name": "HTMLModel",
374
- "_view_count": null,
375
- "_view_module": "@jupyter-widgets/controls",
376
- "_view_module_version": "2.0.0",
377
- "_view_name": "HTMLView",
378
- "description": "",
379
- "description_allow_html": false,
380
- "layout": "IPY_MODEL_90473b32017648bb994c5818b85de6a8",
381
- "placeholder": "​",
382
- "style": "IPY_MODEL_daaac50111eb4f46a537b26b0a84acd9",
383
- "tabbable": null,
384
- "tooltip": null,
385
- "value": "100%"
386
- }
387
- },
388
- "2cf8dd3e135442fdadeb416bc1398f9a": {
389
- "model_module": "@jupyter-widgets/base",
390
- "model_module_version": "2.0.0",
391
- "model_name": "LayoutModel",
392
- "state": {
393
- "_model_module": "@jupyter-widgets/base",
394
- "_model_module_version": "2.0.0",
395
- "_model_name": "LayoutModel",
396
- "_view_count": null,
397
- "_view_module": "@jupyter-widgets/base",
398
- "_view_module_version": "2.0.0",
399
- "_view_name": "LayoutView",
400
- "align_content": null,
401
- "align_items": null,
402
- "align_self": null,
403
- "border_bottom": null,
404
- "border_left": null,
405
- "border_right": null,
406
- "border_top": null,
407
- "bottom": null,
408
- "display": null,
409
- "flex": null,
410
- "flex_flow": null,
411
- "grid_area": null,
412
- "grid_auto_columns": null,
413
- "grid_auto_flow": null,
414
- "grid_auto_rows": null,
415
- "grid_column": null,
416
- "grid_gap": null,
417
- "grid_row": null,
418
- "grid_template_areas": null,
419
- "grid_template_columns": null,
420
- "grid_template_rows": null,
421
- "height": null,
422
- "justify_content": null,
423
- "justify_items": null,
424
- "left": null,
425
- "margin": null,
426
- "max_height": null,
427
- "max_width": null,
428
- "min_height": null,
429
- "min_width": null,
430
- "object_fit": null,
431
- "object_position": null,
432
- "order": null,
433
- "overflow": null,
434
- "padding": null,
435
- "right": null,
436
- "top": null,
437
- "visibility": null,
438
- "width": null
439
- }
440
- },
441
- "59f2ff1fb7644aff98e9909b2b3d4f15": {
442
- "model_module": "@jupyter-widgets/base",
443
- "model_module_version": "2.0.0",
444
- "model_name": "LayoutModel",
445
- "state": {
446
- "_model_module": "@jupyter-widgets/base",
447
- "_model_module_version": "2.0.0",
448
- "_model_name": "LayoutModel",
449
- "_view_count": null,
450
- "_view_module": "@jupyter-widgets/base",
451
- "_view_module_version": "2.0.0",
452
- "_view_name": "LayoutView",
453
- "align_content": null,
454
- "align_items": null,
455
- "align_self": null,
456
- "border_bottom": null,
457
- "border_left": null,
458
- "border_right": null,
459
- "border_top": null,
460
- "bottom": null,
461
- "display": null,
462
- "flex": null,
463
- "flex_flow": null,
464
- "grid_area": null,
465
- "grid_auto_columns": null,
466
- "grid_auto_flow": null,
467
- "grid_auto_rows": null,
468
- "grid_column": null,
469
- "grid_gap": null,
470
- "grid_row": null,
471
- "grid_template_areas": null,
472
- "grid_template_columns": null,
473
- "grid_template_rows": null,
474
- "height": null,
475
- "justify_content": null,
476
- "justify_items": null,
477
- "left": null,
478
- "margin": null,
479
- "max_height": null,
480
- "max_width": null,
481
- "min_height": null,
482
- "min_width": null,
483
- "object_fit": null,
484
- "object_position": null,
485
- "order": null,
486
- "overflow": null,
487
- "padding": null,
488
- "right": null,
489
- "top": null,
490
- "visibility": null,
491
- "width": null
492
- }
493
- },
494
- "5c1f4b9bdf7c4e8f98d2e8dce11af726": {
495
- "model_module": "@jupyter-widgets/controls",
496
- "model_module_version": "2.0.0",
497
- "model_name": "ProgressStyleModel",
498
- "state": {
499
- "_model_module": "@jupyter-widgets/controls",
500
- "_model_module_version": "2.0.0",
501
- "_model_name": "ProgressStyleModel",
502
- "_view_count": null,
503
- "_view_module": "@jupyter-widgets/base",
504
- "_view_module_version": "2.0.0",
505
- "_view_name": "StyleView",
506
- "bar_color": null,
507
- "description_width": ""
508
- }
509
- },
510
- "68b1e61a345f48e8a25e229af275def1": {
511
- "model_module": "@jupyter-widgets/controls",
512
- "model_module_version": "2.0.0",
513
- "model_name": "FloatProgressModel",
514
- "state": {
515
- "_dom_classes": [],
516
- "_model_module": "@jupyter-widgets/controls",
517
- "_model_module_version": "2.0.0",
518
- "_model_name": "FloatProgressModel",
519
- "_view_count": null,
520
- "_view_module": "@jupyter-widgets/controls",
521
- "_view_module_version": "2.0.0",
522
- "_view_name": "ProgressView",
523
- "bar_style": "success",
524
- "description": "",
525
- "description_allow_html": false,
526
- "layout": "IPY_MODEL_0e04d7f51add4bc0a2d621f6fb5d59a8",
527
- "max": 4.0,
528
- "min": 0.0,
529
- "orientation": "horizontal",
530
- "style": "IPY_MODEL_5c1f4b9bdf7c4e8f98d2e8dce11af726",
531
- "tabbable": null,
532
- "tooltip": null,
533
- "value": 4.0
534
- }
535
- },
536
- "7ec1305895ad44a29163c6c51f641600": {
537
- "model_module": "@jupyter-widgets/controls",
538
- "model_module_version": "2.0.0",
539
- "model_name": "HBoxModel",
540
- "state": {
541
- "_dom_classes": [],
542
- "_model_module": "@jupyter-widgets/controls",
543
- "_model_module_version": "2.0.0",
544
- "_model_name": "HBoxModel",
545
- "_view_count": null,
546
- "_view_module": "@jupyter-widgets/controls",
547
- "_view_module_version": "2.0.0",
548
- "_view_name": "HBoxView",
549
- "box_style": "",
550
- "children": [
551
- "IPY_MODEL_292f111caba646bf9d542fb9b7cd91d7",
552
- "IPY_MODEL_68b1e61a345f48e8a25e229af275def1",
553
- "IPY_MODEL_fdaa96ec85664a7fb6343b81facc8b0a"
554
- ],
555
- "layout": "IPY_MODEL_59f2ff1fb7644aff98e9909b2b3d4f15",
556
- "tabbable": null,
557
- "tooltip": null
558
- }
559
- },
560
- "90473b32017648bb994c5818b85de6a8": {
561
- "model_module": "@jupyter-widgets/base",
562
- "model_module_version": "2.0.0",
563
- "model_name": "LayoutModel",
564
- "state": {
565
- "_model_module": "@jupyter-widgets/base",
566
- "_model_module_version": "2.0.0",
567
- "_model_name": "LayoutModel",
568
- "_view_count": null,
569
- "_view_module": "@jupyter-widgets/base",
570
- "_view_module_version": "2.0.0",
571
- "_view_name": "LayoutView",
572
- "align_content": null,
573
- "align_items": null,
574
- "align_self": null,
575
- "border_bottom": null,
576
- "border_left": null,
577
- "border_right": null,
578
- "border_top": null,
579
- "bottom": null,
580
- "display": null,
581
- "flex": null,
582
- "flex_flow": null,
583
- "grid_area": null,
584
- "grid_auto_columns": null,
585
- "grid_auto_flow": null,
586
- "grid_auto_rows": null,
587
- "grid_column": null,
588
- "grid_gap": null,
589
- "grid_row": null,
590
- "grid_template_areas": null,
591
- "grid_template_columns": null,
592
- "grid_template_rows": null,
593
- "height": null,
594
- "justify_content": null,
595
- "justify_items": null,
596
- "left": null,
597
- "margin": null,
598
- "max_height": null,
599
- "max_width": null,
600
- "min_height": null,
601
- "min_width": null,
602
- "object_fit": null,
603
- "object_position": null,
604
- "order": null,
605
- "overflow": null,
606
- "padding": null,
607
- "right": null,
608
- "top": null,
609
- "visibility": null,
610
- "width": null
611
- }
612
- },
613
- "d3792312355a49ab93e82971aa1ca4c4": {
614
- "model_module": "@jupyter-widgets/controls",
615
- "model_module_version": "2.0.0",
616
- "model_name": "HTMLStyleModel",
617
- "state": {
618
- "_model_module": "@jupyter-widgets/controls",
619
- "_model_module_version": "2.0.0",
620
- "_model_name": "HTMLStyleModel",
621
- "_view_count": null,
622
- "_view_module": "@jupyter-widgets/base",
623
- "_view_module_version": "2.0.0",
624
- "_view_name": "StyleView",
625
- "background": null,
626
- "description_width": "",
627
- "font_size": null,
628
- "text_color": null
629
- }
630
- },
631
- "daaac50111eb4f46a537b26b0a84acd9": {
632
- "model_module": "@jupyter-widgets/controls",
633
- "model_module_version": "2.0.0",
634
- "model_name": "HTMLStyleModel",
635
- "state": {
636
- "_model_module": "@jupyter-widgets/controls",
637
- "_model_module_version": "2.0.0",
638
- "_model_name": "HTMLStyleModel",
639
- "_view_count": null,
640
- "_view_module": "@jupyter-widgets/base",
641
- "_view_module_version": "2.0.0",
642
- "_view_name": "StyleView",
643
- "background": null,
644
- "description_width": "",
645
- "font_size": null,
646
- "text_color": null
647
- }
648
- },
649
- "fdaa96ec85664a7fb6343b81facc8b0a": {
650
- "model_module": "@jupyter-widgets/controls",
651
- "model_module_version": "2.0.0",
652
- "model_name": "HTMLModel",
653
- "state": {
654
- "_dom_classes": [],
655
- "_model_module": "@jupyter-widgets/controls",
656
- "_model_module_version": "2.0.0",
657
- "_model_name": "HTMLModel",
658
- "_view_count": null,
659
- "_view_module": "@jupyter-widgets/controls",
660
- "_view_module_version": "2.0.0",
661
- "_view_name": "HTMLView",
662
- "description": "",
663
- "description_allow_html": false,
664
- "layout": "IPY_MODEL_2cf8dd3e135442fdadeb416bc1398f9a",
665
- "placeholder": "​",
666
- "style": "IPY_MODEL_d3792312355a49ab93e82971aa1ca4c4",
667
- "tabbable": null,
668
- "tooltip": null,
669
- "value": " 4/4 [00:00&lt;00:00, 89.21it/s]"
670
- }
671
- }
672
- },
673
- "version_major": 2,
674
- "version_minor": 0
675
- }
676
  }
677
  },
678
  "nbformat": 4,
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "4425fe57",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "4068c493",
18
  "metadata": {
19
+ "lines_to_next_cell": 2,
 
 
 
 
 
20
  "tags": [
21
  "hide_inp"
22
  ]
 
26
  "desc = \"\"\"\n",
27
  "### Question Answering with Retrieval\n",
28
  "\n",
29
+ "Chain that answers questions with embeedding based retrieval. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/qa.ipynb)\n",
30
  "\n",
31
  "(Adapted from [OpenAI Notebook](https://github.com/openai/openai-cookbook/blob/main/examples/Question_answering_using_embeddings.ipynb).)\n",
32
  "\"\"\""
33
  ]
34
  },
 
 
 
 
 
 
 
 
35
  {
36
  "cell_type": "code",
37
+ "execution_count": null,
38
+ "id": "67a325ce",
39
+ "metadata": {},
 
 
 
 
 
 
 
40
  "outputs": [],
41
  "source": [
42
  "import datasets\n",
 
47
  },
48
  {
49
  "cell_type": "markdown",
50
+ "id": "b24d6ab8",
51
  "metadata": {},
52
  "source": [
53
  "We use Hugging Face Datasets as the database by assigning\n",
 
56
  },
57
  {
58
  "cell_type": "code",
59
+ "execution_count": null,
60
+ "id": "6a9774bb",
61
+ "metadata": {},
62
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
  "source": [
64
  "olympics = datasets.load_from_disk(\"olympics.data\")\n",
65
  "olympics.add_faiss_index(\"embeddings\")"
 
67
  },
68
  {
69
  "cell_type": "markdown",
70
+ "id": "4fa3c179",
71
  "metadata": {},
72
  "source": [
73
  "Fast KNN retieval prompt"
 
75
  },
76
  {
77
  "cell_type": "code",
78
+ "execution_count": null,
79
+ "id": "c3e5273c",
80
  "metadata": {
 
 
 
 
 
 
81
  "lines_to_next_cell": 1
82
  },
83
  "outputs": [],
 
91
  },
92
  {
93
  "cell_type": "code",
94
+ "execution_count": null,
95
+ "id": "12df8217",
96
  "metadata": {
 
 
 
 
 
 
97
  "lines_to_next_cell": 1
98
  },
99
  "outputs": [],
 
106
  },
107
  {
108
  "cell_type": "code",
109
+ "execution_count": null,
110
+ "id": "df36d640",
111
+ "metadata": {},
 
 
 
 
 
 
 
112
  "outputs": [],
113
  "source": [
114
  "def qa(query):\n",
 
117
  ]
118
  },
119
  {
120
+ "cell_type": "code",
121
+ "execution_count": null,
122
+ "id": "08dd0eeb",
123
+ "metadata": {},
124
+ "outputs": [],
125
+ "source": []
 
 
126
  },
127
  {
128
  "cell_type": "code",
129
+ "execution_count": null,
130
+ "id": "76e13672",
131
+ "metadata": {},
 
 
 
 
 
 
 
132
  "outputs": [],
133
  "source": [
134
  "questions = [\"Who won the 2020 Summer Olympics men's high jump?\",\n",
 
140
  },
141
  {
142
  "cell_type": "code",
143
+ "execution_count": null,
144
+ "id": "add9d044",
145
  "metadata": {
146
+ "lines_to_next_cell": 2
 
 
 
 
 
147
  },
148
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
149
  "source": [
150
  "gradio = show(qa,\n",
151
  " examples=questions,\n",
152
  " subprompts=[get_neighbors, get_result],\n",
153
  " description=desc,\n",
 
154
  " )\n",
155
  "if __name__ == \"__main__\":\n",
156
  " gradio.launch()"
157
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
158
  }
159
  ],
160
  "metadata": {
161
  "jupytext": {
162
+ "cell_metadata_filter": "tags,-all",
163
+ "main_language": "python",
164
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
165
  }
166
  },
167
  "nbformat": 4,
qa.py CHANGED
@@ -2,7 +2,7 @@
2
  desc = """
3
  ### Question Answering with Retrieval
4
 
5
- Chain that answers questions with embeedding based retrieval. [[Code](https://github.com/srush/MiniChain/blob/main/examples/qa.py)]
6
 
7
  (Adapted from [OpenAI Notebook](https://github.com/openai/openai-cookbook/blob/main/examples/Question_answering_using_embeddings.ipynb).)
8
  """
@@ -57,12 +57,3 @@ gradio = show(qa,
57
  if __name__ == "__main__":
58
  gradio.launch()
59
 
60
-
61
-
62
- # # + tags=["hide_inp"]
63
- # QAPrompt().show(
64
- # {"question": "Who won the race?", "docs": ["doc1", "doc2", "doc3"]}, "Joe Bob"
65
- # )
66
- # # -
67
-
68
- # show_log("qa.log")
 
2
  desc = """
3
  ### Question Answering with Retrieval
4
 
5
+ Chain that answers questions with embeedding based retrieval. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/qa.ipynb)
6
 
7
  (Adapted from [OpenAI Notebook](https://github.com/openai/openai-cookbook/blob/main/examples/Question_answering_using_embeddings.ipynb).)
8
  """
 
57
  if __name__ == "__main__":
58
  gradio.launch()
59
 
 
 
 
 
 
 
 
 
 
requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
  gradio==3.21.0
2
- git+https://github.com/srush/minichain@gradio
3
  manifest-ml
4
  faiss-cpu
 
1
  gradio==3.21.0
2
+ git+https://github.com/srush/minichain
3
  manifest-ml
4
  faiss-cpu
selfask.ipynb CHANGED
@@ -3,56 +3,39 @@
3
  {
4
  "cell_type": "code",
5
  "execution_count": null,
6
- "id": "12eeb7f2",
7
  "metadata": {
8
- "lines_to_next_cell": 0
9
  },
10
  "outputs": [],
11
- "source": []
 
 
 
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 1,
16
- "id": "0290a7f2",
17
  "metadata": {
18
- "execution": {
19
- "iopub.execute_input": "2023-03-22T17:00:59.828303Z",
20
- "iopub.status.busy": "2023-03-22T17:00:59.827586Z",
21
- "iopub.status.idle": "2023-03-22T17:00:59.836907Z",
22
- "shell.execute_reply": "2023-03-22T17:00:59.836357Z"
23
- }
24
  },
25
  "outputs": [],
26
  "source": [
27
  "desc = \"\"\"\n",
28
  "### Self-Ask\n",
29
  "\n",
30
- " Notebook implementation of the self-ask + Google tool use prompt.\n",
31
  "\n",
32
  " (Adapted from [Self-Ask repo](https://github.com/ofirpress/self-ask))\n",
33
  "\"\"\""
34
  ]
35
  },
36
- {
37
- "cell_type": "markdown",
38
- "id": "29b0fe2a",
39
- "metadata": {},
40
- "source": [
41
- "$"
42
- ]
43
- },
44
  {
45
  "cell_type": "code",
46
- "execution_count": 2,
47
- "id": "f3544dba",
48
- "metadata": {
49
- "execution": {
50
- "iopub.execute_input": "2023-03-22T17:00:59.839279Z",
51
- "iopub.status.busy": "2023-03-22T17:00:59.839084Z",
52
- "iopub.status.idle": "2023-03-22T17:01:01.144851Z",
53
- "shell.execute_reply": "2023-03-22T17:01:01.144194Z"
54
- }
55
- },
56
  "outputs": [],
57
  "source": [
58
  "from dataclasses import dataclass, replace\n",
@@ -62,16 +45,9 @@
62
  },
63
  {
64
  "cell_type": "code",
65
- "execution_count": 3,
66
- "id": "a89961cf",
67
- "metadata": {
68
- "execution": {
69
- "iopub.execute_input": "2023-03-22T17:01:01.147570Z",
70
- "iopub.status.busy": "2023-03-22T17:01:01.147250Z",
71
- "iopub.status.idle": "2023-03-22T17:01:01.151110Z",
72
- "shell.execute_reply": "2023-03-22T17:01:01.150563Z"
73
- }
74
- },
75
  "outputs": [],
76
  "source": [
77
  "@dataclass\n",
@@ -84,15 +60,9 @@
84
  },
85
  {
86
  "cell_type": "code",
87
- "execution_count": 4,
88
- "id": "0ebdb069",
89
  "metadata": {
90
- "execution": {
91
- "iopub.execute_input": "2023-03-22T17:01:01.153276Z",
92
- "iopub.status.busy": "2023-03-22T17:01:01.152995Z",
93
- "iopub.status.idle": "2023-03-22T17:01:01.156865Z",
94
- "shell.execute_reply": "2023-03-22T17:01:01.156229Z"
95
- },
96
  "lines_to_next_cell": 1
97
  },
98
  "outputs": [],
@@ -111,15 +81,9 @@
111
  },
112
  {
113
  "cell_type": "code",
114
- "execution_count": 5,
115
- "id": "a8fdbdcf",
116
  "metadata": {
117
- "execution": {
118
- "iopub.execute_input": "2023-03-22T17:01:01.159342Z",
119
- "iopub.status.busy": "2023-03-22T17:01:01.159137Z",
120
- "iopub.status.idle": "2023-03-22T17:01:01.162472Z",
121
- "shell.execute_reply": "2023-03-22T17:01:01.162014Z"
122
- },
123
  "lines_to_next_cell": 1
124
  },
125
  "outputs": [],
@@ -128,7 +92,7 @@
128
  "def google(model, state):\n",
129
  " if state.next_query is None:\n",
130
  " return state\n",
131
- " \n",
132
  " result = model(state.next_query)\n",
133
  " return State(state.question,\n",
134
  " state.history + \"\\nIntermediate answer: \" + result + \"\\n\")"
@@ -136,17 +100,9 @@
136
  },
137
  {
138
  "cell_type": "code",
139
- "execution_count": 6,
140
- "id": "82883cd6",
141
- "metadata": {
142
- "execution": {
143
- "iopub.execute_input": "2023-03-22T17:01:01.164441Z",
144
- "iopub.status.busy": "2023-03-22T17:01:01.164266Z",
145
- "iopub.status.idle": "2023-03-22T17:01:01.167251Z",
146
- "shell.execute_reply": "2023-03-22T17:01:01.166820Z"
147
- },
148
- "lines_to_next_cell": 1
149
- },
150
  "outputs": [],
151
  "source": [
152
  "def selfask(question):\n",
@@ -157,55 +113,17 @@
157
  " return state"
158
  ]
159
  },
160
- {
161
- "cell_type": "markdown",
162
- "id": "4f9bf472",
163
- "metadata": {},
164
- "source": [
165
- "$"
166
- ]
167
- },
168
  {
169
  "cell_type": "code",
170
- "execution_count": 7,
171
- "id": "20e37090",
172
- "metadata": {
173
- "execution": {
174
- "iopub.execute_input": "2023-03-22T17:01:01.169337Z",
175
- "iopub.status.busy": "2023-03-22T17:01:01.169152Z",
176
- "iopub.status.idle": "2023-03-22T17:01:01.506567Z",
177
- "shell.execute_reply": "2023-03-22T17:01:01.505941Z"
178
- }
179
- },
180
- "outputs": [
181
- {
182
- "name": "stdout",
183
- "output_type": "stream",
184
- "text": [
185
- "Running on local URL: http://127.0.0.1:7861\n",
186
- "\n",
187
- "To create a public link, set `share=True` in `launch()`.\n"
188
- ]
189
- },
190
- {
191
- "data": {
192
- "text/html": [
193
- "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
194
- ],
195
- "text/plain": [
196
- "<IPython.core.display.HTML object>"
197
- ]
198
- },
199
- "metadata": {},
200
- "output_type": "display_data"
201
- }
202
- ],
203
  "source": [
204
  "gradio = show(selfask,\n",
205
  " examples=[\"What is the zip code of the city where George Washington was born?\"],\n",
206
  " subprompts=[self_ask, google] * 3,\n",
207
  " description=desc,\n",
208
- " code=open(\"selfask.py\", \"r\").read().split(\"$\")[1].strip().strip(\"#\").strip(),\n",
209
  " out_type=\"json\"\n",
210
  " )\n",
211
  "if __name__ == \"__main__\":\n",
@@ -215,7 +133,7 @@
215
  {
216
  "cell_type": "code",
217
  "execution_count": null,
218
- "id": "fdf780fc",
219
  "metadata": {},
220
  "outputs": [],
221
  "source": []
@@ -223,24 +141,9 @@
223
  ],
224
  "metadata": {
225
  "jupytext": {
226
- "cell_metadata_filter": "-all"
227
- },
228
- "kernelspec": {
229
- "display_name": "minichain",
230
- "language": "python",
231
- "name": "minichain"
232
- },
233
- "language_info": {
234
- "codemirror_mode": {
235
- "name": "ipython",
236
- "version": 3
237
- },
238
- "file_extension": ".py",
239
- "mimetype": "text/x-python",
240
- "name": "python",
241
- "nbconvert_exporter": "python",
242
- "pygments_lexer": "ipython3",
243
- "version": "3.10.6"
244
  }
245
  },
246
  "nbformat": 4,
 
3
  {
4
  "cell_type": "code",
5
  "execution_count": null,
6
+ "id": "0ac58c93",
7
  "metadata": {
8
+ "lines_to_next_cell": 2
9
  },
10
  "outputs": [],
11
+ "source": [
12
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
13
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
14
+ ]
15
  },
16
  {
17
  "cell_type": "code",
18
+ "execution_count": null,
19
+ "id": "25776503",
20
  "metadata": {
21
+ "lines_to_next_cell": 2
 
 
 
 
 
22
  },
23
  "outputs": [],
24
  "source": [
25
  "desc = \"\"\"\n",
26
  "### Self-Ask\n",
27
  "\n",
28
+ " Notebook implementation of the self-ask + Google tool use prompt. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/selfask.ipynb)\n",
29
  "\n",
30
  " (Adapted from [Self-Ask repo](https://github.com/ofirpress/self-ask))\n",
31
  "\"\"\""
32
  ]
33
  },
 
 
 
 
 
 
 
 
34
  {
35
  "cell_type": "code",
36
+ "execution_count": null,
37
+ "id": "aca4e9fd",
38
+ "metadata": {},
 
 
 
 
 
 
 
39
  "outputs": [],
40
  "source": [
41
  "from dataclasses import dataclass, replace\n",
 
45
  },
46
  {
47
  "cell_type": "code",
48
+ "execution_count": null,
49
+ "id": "86bc5318",
50
+ "metadata": {},
 
 
 
 
 
 
 
51
  "outputs": [],
52
  "source": [
53
  "@dataclass\n",
 
60
  },
61
  {
62
  "cell_type": "code",
63
+ "execution_count": null,
64
+ "id": "671c9b48",
65
  "metadata": {
 
 
 
 
 
 
66
  "lines_to_next_cell": 1
67
  },
68
  "outputs": [],
 
81
  },
82
  {
83
  "cell_type": "code",
84
+ "execution_count": null,
85
+ "id": "eae30812",
86
  "metadata": {
 
 
 
 
 
 
87
  "lines_to_next_cell": 1
88
  },
89
  "outputs": [],
 
92
  "def google(model, state):\n",
93
  " if state.next_query is None:\n",
94
  " return state\n",
95
+ "\n",
96
  " result = model(state.next_query)\n",
97
  " return State(state.question,\n",
98
  " state.history + \"\\nIntermediate answer: \" + result + \"\\n\")"
 
100
  },
101
  {
102
  "cell_type": "code",
103
+ "execution_count": null,
104
+ "id": "7577534c",
105
+ "metadata": {},
 
 
 
 
 
 
 
 
106
  "outputs": [],
107
  "source": [
108
  "def selfask(question):\n",
 
113
  " return state"
114
  ]
115
  },
 
 
 
 
 
 
 
 
116
  {
117
  "cell_type": "code",
118
+ "execution_count": null,
119
+ "id": "6f2eabf1",
120
+ "metadata": {},
121
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
122
  "source": [
123
  "gradio = show(selfask,\n",
124
  " examples=[\"What is the zip code of the city where George Washington was born?\"],\n",
125
  " subprompts=[self_ask, google] * 3,\n",
126
  " description=desc,\n",
 
127
  " out_type=\"json\"\n",
128
  " )\n",
129
  "if __name__ == \"__main__\":\n",
 
133
  {
134
  "cell_type": "code",
135
  "execution_count": null,
136
+ "id": "182ccd80",
137
  "metadata": {},
138
  "outputs": [],
139
  "source": []
 
141
  ],
142
  "metadata": {
143
  "jupytext": {
144
+ "cell_metadata_filter": "-all",
145
+ "main_language": "python",
146
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
147
  }
148
  },
149
  "nbformat": 4,
selfask.py CHANGED
@@ -2,7 +2,7 @@
2
  desc = """
3
  ### Self-Ask
4
 
5
- Notebook implementation of the self-ask + Google tool use prompt.
6
 
7
  (Adapted from [Self-Ask repo](https://github.com/ofirpress/self-ask))
8
  """
@@ -32,12 +32,12 @@ def self_ask(model, state):
32
  return replace(state, next_query=res)
33
  elif out.startswith("So the final answer is:"):
34
  return replace(state, final_answer=res)
35
-
36
  @prompt(Google())
37
  def google(model, state):
38
  if state.next_query is None:
39
  return state
40
-
41
  result = model(state.next_query)
42
  return State(state.question,
43
  state.history + "\nIntermediate answer: " + result + "\n")
 
2
  desc = """
3
  ### Self-Ask
4
 
5
+ Notebook implementation of the self-ask + Google tool use prompt. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/selfask.ipynb)
6
 
7
  (Adapted from [Self-Ask repo](https://github.com/ofirpress/self-ask))
8
  """
 
32
  return replace(state, next_query=res)
33
  elif out.startswith("So the final answer is:"):
34
  return replace(state, final_answer=res)
35
+
36
  @prompt(Google())
37
  def google(model, state):
38
  if state.next_query is None:
39
  return state
40
+
41
  result = model(state.next_query)
42
  return State(state.question,
43
  state.history + "\nIntermediate answer: " + result + "\n")
show.ipynb ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "cddd35ad",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "de75d893",
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "import json, sys\n",
22
+ "from eliottree import tasks_from_iterable, render_tasks\n",
23
+ "render_tasks(sys.stderr.write,\n",
24
+ " tasks_from_iterable([json.loads(l) for l in open(\"bash.log\")]),\n",
25
+ " colorize=True, human_readable=True)"
26
+ ]
27
+ }
28
+ ],
29
+ "metadata": {
30
+ "jupytext": {
31
+ "cell_metadata_filter": "-all",
32
+ "main_language": "python",
33
+ "notebook_metadata_filter": "-all"
34
+ }
35
+ },
36
+ "nbformat": 4,
37
+ "nbformat_minor": 5
38
+ }
stats.ipynb CHANGED
@@ -3,55 +3,39 @@
3
  {
4
  "cell_type": "code",
5
  "execution_count": null,
6
- "id": "f34e932d",
7
  "metadata": {
8
- "lines_to_next_cell": 0
9
  },
10
  "outputs": [],
11
- "source": []
 
 
 
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 1,
16
- "id": "12c38ff0",
17
  "metadata": {
18
- "execution": {
19
- "iopub.execute_input": "2023-03-22T17:01:04.041687Z",
20
- "iopub.status.busy": "2023-03-22T17:01:04.040967Z",
21
- "iopub.status.idle": "2023-03-22T17:01:04.062830Z",
22
- "shell.execute_reply": "2023-03-22T17:01:04.060675Z"
23
- }
24
  },
25
  "outputs": [],
26
  "source": [
27
  "desc = \"\"\"\n",
28
  "### Typed Extraction\n",
29
  "\n",
30
- "Information extraction that is automatically generated from a typed specification. [[Code](https://github.com/srush/MiniChain/blob/main/examples/stats.py)]\n",
31
  "\n",
32
  "(Novel to MiniChain)\n",
33
  "\"\"\""
34
  ]
35
  },
36
- {
37
- "cell_type": "markdown",
38
- "id": "7c3125d1",
39
- "metadata": {},
40
- "source": [
41
- "$"
42
- ]
43
- },
44
  {
45
  "cell_type": "code",
46
- "execution_count": 2,
47
- "id": "fafef296",
48
  "metadata": {
49
- "execution": {
50
- "iopub.execute_input": "2023-03-22T17:01:04.071923Z",
51
- "iopub.status.busy": "2023-03-22T17:01:04.071227Z",
52
- "iopub.status.idle": "2023-03-22T17:01:05.411226Z",
53
- "shell.execute_reply": "2023-03-22T17:01:05.410575Z"
54
- },
55
  "lines_to_next_cell": 1
56
  },
57
  "outputs": [],
@@ -64,7 +48,7 @@
64
  },
65
  {
66
  "cell_type": "markdown",
67
- "id": "defad07f",
68
  "metadata": {},
69
  "source": [
70
  "Data specification"
@@ -72,16 +56,9 @@
72
  },
73
  {
74
  "cell_type": "code",
75
- "execution_count": 3,
76
- "id": "b28d3bef",
77
- "metadata": {
78
- "execution": {
79
- "iopub.execute_input": "2023-03-22T17:01:05.414380Z",
80
- "iopub.status.busy": "2023-03-22T17:01:05.413863Z",
81
- "iopub.status.idle": "2023-03-22T17:01:05.418873Z",
82
- "shell.execute_reply": "2023-03-22T17:01:05.418413Z"
83
- }
84
- },
85
  "outputs": [],
86
  "source": [
87
  "class StatType(Enum):\n",
@@ -102,69 +79,25 @@
102
  },
103
  {
104
  "cell_type": "code",
105
- "execution_count": 4,
106
- "id": "7e79dbb5",
107
- "metadata": {
108
- "execution": {
109
- "iopub.execute_input": "2023-03-22T17:01:05.420896Z",
110
- "iopub.status.busy": "2023-03-22T17:01:05.420709Z",
111
- "iopub.status.idle": "2023-03-22T17:01:05.423985Z",
112
- "shell.execute_reply": "2023-03-22T17:01:05.423565Z"
113
- },
114
- "lines_to_next_cell": 1
115
- },
116
  "outputs": [],
117
  "source": [
118
  "@prompt(OpenAI(), template_file=\"stats.pmpt.tpl\", parser=\"json\")\n",
119
  "def stats(model, passage):\n",
120
  " out = model(dict(passage=passage, typ=type_to_prompt(Player)))\n",
121
- " return [Player(**j) for j in out] "
122
- ]
123
- },
124
- {
125
- "cell_type": "markdown",
126
- "id": "ecc90cb1",
127
- "metadata": {},
128
- "source": [
129
- "$"
130
  ]
131
  },
132
  {
133
  "cell_type": "code",
134
- "execution_count": 5,
135
- "id": "73cfa15f",
136
  "metadata": {
137
- "execution": {
138
- "iopub.execute_input": "2023-03-22T17:01:05.426096Z",
139
- "iopub.status.busy": "2023-03-22T17:01:05.425913Z",
140
- "iopub.status.idle": "2023-03-22T17:01:05.738646Z",
141
- "shell.execute_reply": "2023-03-22T17:01:05.737900Z"
142
- },
143
  "lines_to_next_cell": 2
144
  },
145
- "outputs": [
146
- {
147
- "name": "stdout",
148
- "output_type": "stream",
149
- "text": [
150
- "Running on local URL: http://127.0.0.1:7861\n",
151
- "\n",
152
- "To create a public link, set `share=True` in `launch()`.\n"
153
- ]
154
- },
155
- {
156
- "data": {
157
- "text/html": [
158
- "<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
159
- ],
160
- "text/plain": [
161
- "<IPython.core.display.HTML object>"
162
- ]
163
- },
164
- "metadata": {},
165
- "output_type": "display_data"
166
- }
167
- ],
168
  "source": [
169
  "article = open(\"sixers.txt\").read()\n",
170
  "gradio = show(lambda passage: stats(passage),\n",
@@ -172,7 +105,6 @@
172
  " subprompts=[stats],\n",
173
  " out_type=\"json\",\n",
174
  " description=desc,\n",
175
- " code=open(\"stats.py\", \"r\").read().split(\"$\")[1].strip().strip(\"#\").strip(),\n",
176
  ")\n",
177
  "if __name__ == \"__main__\":\n",
178
  " gradio.launch()"
@@ -180,7 +112,7 @@
180
  },
181
  {
182
  "cell_type": "markdown",
183
- "id": "c2c3c29a",
184
  "metadata": {},
185
  "source": [
186
  "ExtractionPrompt().show({\"passage\": \"Harden had 10 rebounds.\"},\n",
@@ -189,7 +121,7 @@
189
  },
190
  {
191
  "cell_type": "markdown",
192
- "id": "d6453878",
193
  "metadata": {},
194
  "source": [
195
  "# View the run log."
@@ -197,7 +129,7 @@
197
  },
198
  {
199
  "cell_type": "markdown",
200
- "id": "4439426d",
201
  "metadata": {},
202
  "source": [
203
  "minichain.show_log(\"bash.log\")"
@@ -206,24 +138,9 @@
206
  ],
207
  "metadata": {
208
  "jupytext": {
209
- "cell_metadata_filter": "-all"
210
- },
211
- "kernelspec": {
212
- "display_name": "minichain",
213
- "language": "python",
214
- "name": "minichain"
215
- },
216
- "language_info": {
217
- "codemirror_mode": {
218
- "name": "ipython",
219
- "version": 3
220
- },
221
- "file_extension": ".py",
222
- "mimetype": "text/x-python",
223
- "name": "python",
224
- "nbconvert_exporter": "python",
225
- "pygments_lexer": "ipython3",
226
- "version": "3.10.6"
227
  }
228
  },
229
  "nbformat": 4,
 
3
  {
4
  "cell_type": "code",
5
  "execution_count": null,
6
+ "id": "9acde9e6",
7
  "metadata": {
8
+ "lines_to_next_cell": 2
9
  },
10
  "outputs": [],
11
+ "source": [
12
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
13
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
14
+ ]
15
  },
16
  {
17
  "cell_type": "code",
18
+ "execution_count": null,
19
+ "id": "8f7a9c7a",
20
  "metadata": {
21
+ "lines_to_next_cell": 2
 
 
 
 
 
22
  },
23
  "outputs": [],
24
  "source": [
25
  "desc = \"\"\"\n",
26
  "### Typed Extraction\n",
27
  "\n",
28
+ "Information extraction that is automatically generated from a typed specification. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/pal.ipynb)\n",
29
  "\n",
30
  "(Novel to MiniChain)\n",
31
  "\"\"\""
32
  ]
33
  },
 
 
 
 
 
 
 
 
34
  {
35
  "cell_type": "code",
36
+ "execution_count": null,
37
+ "id": "59806374",
38
  "metadata": {
 
 
 
 
 
 
39
  "lines_to_next_cell": 1
40
  },
41
  "outputs": [],
 
48
  },
49
  {
50
  "cell_type": "markdown",
51
+ "id": "e1cb8aa5",
52
  "metadata": {},
53
  "source": [
54
  "Data specification"
 
56
  },
57
  {
58
  "cell_type": "code",
59
+ "execution_count": null,
60
+ "id": "79fc4394",
61
+ "metadata": {},
 
 
 
 
 
 
 
62
  "outputs": [],
63
  "source": [
64
  "class StatType(Enum):\n",
 
79
  },
80
  {
81
  "cell_type": "code",
82
+ "execution_count": null,
83
+ "id": "61aed166",
84
+ "metadata": {},
 
 
 
 
 
 
 
 
85
  "outputs": [],
86
  "source": [
87
  "@prompt(OpenAI(), template_file=\"stats.pmpt.tpl\", parser=\"json\")\n",
88
  "def stats(model, passage):\n",
89
  " out = model(dict(passage=passage, typ=type_to_prompt(Player)))\n",
90
+ " return [Player(**j) for j in out]"
 
 
 
 
 
 
 
 
91
  ]
92
  },
93
  {
94
  "cell_type": "code",
95
+ "execution_count": null,
96
+ "id": "b6949159",
97
  "metadata": {
 
 
 
 
 
 
98
  "lines_to_next_cell": 2
99
  },
100
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
101
  "source": [
102
  "article = open(\"sixers.txt\").read()\n",
103
  "gradio = show(lambda passage: stats(passage),\n",
 
105
  " subprompts=[stats],\n",
106
  " out_type=\"json\",\n",
107
  " description=desc,\n",
 
108
  ")\n",
109
  "if __name__ == \"__main__\":\n",
110
  " gradio.launch()"
 
112
  },
113
  {
114
  "cell_type": "markdown",
115
+ "id": "764eec48",
116
  "metadata": {},
117
  "source": [
118
  "ExtractionPrompt().show({\"passage\": \"Harden had 10 rebounds.\"},\n",
 
121
  },
122
  {
123
  "cell_type": "markdown",
124
+ "id": "fefa83c7",
125
  "metadata": {},
126
  "source": [
127
  "# View the run log."
 
129
  },
130
  {
131
  "cell_type": "markdown",
132
+ "id": "f84dac69",
133
  "metadata": {},
134
  "source": [
135
  "minichain.show_log(\"bash.log\")"
 
138
  ],
139
  "metadata": {
140
  "jupytext": {
141
+ "cell_metadata_filter": "-all",
142
+ "main_language": "python",
143
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
144
  }
145
  },
146
  "nbformat": 4,
stats.py CHANGED
@@ -2,7 +2,7 @@
2
  desc = """
3
  ### Typed Extraction
4
 
5
- Information extraction that is automatically generated from a typed specification. [[Code](https://github.com/srush/MiniChain/blob/main/examples/stats.py)]
6
 
7
  (Novel to MiniChain)
8
  """
@@ -37,7 +37,7 @@ class Player:
37
  @prompt(OpenAI(), template_file="stats.pmpt.tpl", parser="json")
38
  def stats(model, passage):
39
  out = model(dict(passage=passage, typ=type_to_prompt(Player)))
40
- return [Player(**j) for j in out]
41
 
42
  # $
43
 
 
2
  desc = """
3
  ### Typed Extraction
4
 
5
+ Information extraction that is automatically generated from a typed specification. [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/srush/MiniChain/blob/master/examples/pal.ipynb)
6
 
7
  (Novel to MiniChain)
8
  """
 
37
  @prompt(OpenAI(), template_file="stats.pmpt.tpl", parser="json")
38
  def stats(model, passage):
39
  out = model(dict(passage=passage, typ=type_to_prompt(Player)))
40
+ return [Player(**j) for j in out]
41
 
42
  # $
43
 
story.ipynb ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "eff9255a",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "d4910f1d",
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "from asyncio import gather"
22
+ ]
23
+ },
24
+ {
25
+ "cell_type": "code",
26
+ "execution_count": null,
27
+ "id": "3265015f",
28
+ "metadata": {},
29
+ "outputs": [],
30
+ "source": [
31
+ "@simple\n",
32
+ "def outline():\n",
33
+ " return \"What is the outline?\""
34
+ ]
35
+ },
36
+ {
37
+ "cell_type": "code",
38
+ "execution_count": null,
39
+ "id": "fc23ad60",
40
+ "metadata": {
41
+ "lines_to_next_cell": 2
42
+ },
43
+ "outputs": [],
44
+ "source": [
45
+ "@composite\n",
46
+ "async def story(bot: Bot):\n",
47
+ " calls = [bot(outline()), bot(characters())]\n",
48
+ " outline, characters = gather([calls])"
49
+ ]
50
+ }
51
+ ],
52
+ "metadata": {
53
+ "jupytext": {
54
+ "cell_metadata_filter": "-all",
55
+ "main_language": "python",
56
+ "notebook_metadata_filter": "-all"
57
+ }
58
+ },
59
+ "nbformat": 4,
60
+ "nbformat_minor": 5
61
+ }
summary.ipynb CHANGED
@@ -1,27 +1,31 @@
1
  {
2
  "cells": [
 
 
 
 
 
 
 
 
 
 
 
3
  {
4
  "cell_type": "markdown",
5
- "id": "9f2e2ab6",
6
  "metadata": {},
7
  "source": [
8
- "Summarize a long document by chunking and summarizing parts.\n",
9
- "Uses aynchronous calls to the API.\n",
10
- "Adapted from LangChain [Map-Reduce summary](https://langchain.readthedocs.io/en/stable/_modules/langchain/chains/mapreduce.html)."
11
  ]
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 1,
16
- "id": "3bd927bd",
17
- "metadata": {
18
- "execution": {
19
- "iopub.execute_input": "2023-02-26T01:42:15.831020Z",
20
- "iopub.status.busy": "2023-02-26T01:42:15.830584Z",
21
- "iopub.status.idle": "2023-02-26T01:42:15.896842Z",
22
- "shell.execute_reply": "2023-02-26T01:42:15.896197Z"
23
- }
24
- },
25
  "outputs": [],
26
  "source": [
27
  "import trio"
@@ -29,16 +33,9 @@
29
  },
30
  {
31
  "cell_type": "code",
32
- "execution_count": 2,
33
- "id": "9cd3cb67",
34
- "metadata": {
35
- "execution": {
36
- "iopub.execute_input": "2023-02-26T01:42:15.899476Z",
37
- "iopub.status.busy": "2023-02-26T01:42:15.899291Z",
38
- "iopub.status.idle": "2023-02-26T01:42:15.950081Z",
39
- "shell.execute_reply": "2023-02-26T01:42:15.949433Z"
40
- }
41
- },
42
  "outputs": [],
43
  "source": [
44
  "from minichain import TemplatePrompt, show_log, start_chain"
@@ -46,7 +43,7 @@
46
  },
47
  {
48
  "cell_type": "markdown",
49
- "id": "a8d9d96b",
50
  "metadata": {
51
  "lines_to_next_cell": 2
52
  },
@@ -56,85 +53,39 @@
56
  },
57
  {
58
  "cell_type": "code",
59
- "execution_count": 3,
60
- "id": "bda5ebf7",
61
- "metadata": {
62
- "execution": {
63
- "iopub.execute_input": "2023-02-26T01:42:15.952712Z",
64
- "iopub.status.busy": "2023-02-26T01:42:15.952495Z",
65
- "iopub.status.idle": "2023-02-26T01:42:15.955601Z",
66
- "shell.execute_reply": "2023-02-26T01:42:15.955080Z"
67
- },
68
- "lines_to_next_cell": 1
69
- },
70
  "outputs": [],
71
  "source": [
72
- "class SummaryPrompt(TemplatePrompt[str]):\n",
73
  " template_file = \"summary.pmpt.tpl\""
74
  ]
75
  },
76
  {
77
  "cell_type": "code",
78
- "execution_count": 4,
79
- "id": "f451f936",
80
- "metadata": {
81
- "execution": {
82
- "iopub.execute_input": "2023-02-26T01:42:15.957639Z",
83
- "iopub.status.busy": "2023-02-26T01:42:15.957462Z",
84
- "iopub.status.idle": "2023-02-26T01:42:15.961518Z",
85
- "shell.execute_reply": "2023-02-26T01:42:15.960983Z"
86
- }
87
- },
88
  "outputs": [],
89
  "source": [
90
- "def chunk(f):\n",
91
  " \"Split a documents into 4800 character overlapping chunks\"\n",
92
  " text = open(f).read().replace(\"\\n\\n\", \"\\n\")\n",
93
  " chunks = []\n",
94
- " W = 4000\n",
95
- " O = 800\n",
96
  " for i in range(4):\n",
97
- " if i * W > len(text):\n",
98
  " break\n",
99
- " chunks.append({\"text\": text[i * W : (i + 1) * W + O]})\n",
100
  " return chunks"
101
  ]
102
  },
103
  {
104
  "cell_type": "code",
105
- "execution_count": 5,
106
- "id": "bebcd7dd",
107
- "metadata": {
108
- "execution": {
109
- "iopub.execute_input": "2023-02-26T01:42:15.963729Z",
110
- "iopub.status.busy": "2023-02-26T01:42:15.963411Z",
111
- "iopub.status.idle": "2023-02-26T01:42:29.326051Z",
112
- "shell.execute_reply": "2023-02-26T01:42:29.325114Z"
113
- }
114
- },
115
- "outputs": [
116
- {
117
- "name": "stdout",
118
- "output_type": "stream",
119
- "text": [
120
- "\u001b[1mINFO \u001b[0m \u001b[32m2023-02-25 20:42:16.679\u001b[0m: \u001b[36masync_openai.client\u001b[0m:\u001b[36m__init__\u001b[0m: \u001b[1mOpenAI Client initialized: https://api.openai.com/v1\u001b[0m\n"
121
- ]
122
- },
123
- {
124
- "name": "stdout",
125
- "output_type": "stream",
126
- "text": [
127
- "\u001b[1mINFO \u001b[0m \u001b[32m2023-02-25 20:42:16.679\u001b[0m: \u001b[36masync_openai.client\u001b[0m:\u001b[36m__init__\u001b[0m: \u001b[1mOpenAI Client initialized: https://api.openai.com/v1\u001b[0m\n"
128
- ]
129
- },
130
- {
131
- "name": "stdout",
132
- "output_type": "stream",
133
- "text": [
134
- " In response to Russia's aggression in Ukraine, President Biden has taken action to provide military, economic, and humanitarian assistance to Ukraine, impose economic sanctions on Russia, and close off American airspace to Russian flights. He has also passed the American Rescue Plan to provide economic relief to Americans and the Bipartisan Innovation Act to level the playing field with China and other competitors. This bill will create 10,000 new jobs and increase Intel's investment to $100 billion. President Biden is also fighting inflation by capping the cost of insulin at $35 a month and allowing Medicare to negotiate lower prices for prescription drugs.\n"
135
- ]
136
- }
137
- ],
138
  "source": [
139
  "with start_chain(\"summary\") as backend:\n",
140
  " prompt = SummaryPrompt(backend.OpenAI())\n",
@@ -149,569 +100,27 @@
149
  },
150
  {
151
  "cell_type": "code",
152
- "execution_count": 6,
153
- "id": "ae8efff6",
154
  "metadata": {
155
- "execution": {
156
- "iopub.execute_input": "2023-02-26T01:42:29.329395Z",
157
- "iopub.status.busy": "2023-02-26T01:42:29.329095Z",
158
- "iopub.status.idle": "2023-02-26T01:42:29.387107Z",
159
- "shell.execute_reply": "2023-02-26T01:42:29.386548Z"
160
- },
161
  "tags": [
162
  "hide_inp"
163
  ]
164
  },
165
- "outputs": [
166
- {
167
- "data": {
168
- "text/html": [
169
- "\n",
170
- "<!-- <link rel=\"stylesheet\" href=\"https://cdn.rawgit.com/Chalarangelo/mini.css/v3.0.1/dist/mini-default.min.css\"> -->\n",
171
- " <main class=\"container\">\n",
172
- "\n",
173
- "<h3>SummaryPrompt</h3>\n",
174
- "\n",
175
- "<dl>\n",
176
- " <dt>Input:</dt>\n",
177
- " <dd>\n",
178
- "<div class=\"highlight\"><pre><span></span><span class=\"p\">{</span><span class=\"s1\">&#39;text&#39;</span><span class=\"p\">:</span> <span class=\"s1\">&#39;One way to fight inflation is to drive down wages and make Americans poorer.&#39;</span><span class=\"p\">}</span>\n",
179
- "</pre></div>\n",
180
- "\n",
181
- "\n",
182
- " </dd>\n",
183
- "\n",
184
- " <dt> Full Prompt: </dt>\n",
185
- " <dd>\n",
186
- " <details>\n",
187
- " <summary>Prompt</summary>\n",
188
- " <p>Write a concise summary of the following:<br><br><br>\"<div style='color:red'>One way to fight inflation is to drive down wages and make Americans poorer.</div>\"<br><br><br>CONCISE SUMMARY:</p>\n",
189
- " </details>\n",
190
- " </dd>\n",
191
- "\n",
192
- " <dt> Response: </dt>\n",
193
- " <dd>\n",
194
- " Make Americans poorer\n",
195
- " </dd>\n",
196
- "\n",
197
- " <dt>Value:</dt>\n",
198
- " <dd>\n",
199
- "<div class=\"highlight\"><pre><span></span><span class=\"n\">Make</span> <span class=\"n\">Americans</span> <span class=\"n\">poorer</span>\n",
200
- "</pre></div>\n",
201
- "\n",
202
- " </dd>\n",
203
- "</main>\n"
204
- ],
205
- "text/plain": [
206
- "HTML(html='\\n<!-- <link rel=\"stylesheet\" href=\"https://cdn.rawgit.com/Chalarangelo/mini.css/v3.0.1/dist/mini-default.min.css\"> -->\\n <main class=\"container\">\\n\\n<h3>SummaryPrompt</h3>\\n\\n<dl>\\n <dt>Input:</dt>\\n <dd>\\n<div class=\"highlight\"><pre><span></span><span class=\"p\">{</span><span class=\"s1\">&#39;text&#39;</span><span class=\"p\">:</span> <span class=\"s1\">&#39;One way to fight inflation is to drive down wages and make Americans poorer.&#39;</span><span class=\"p\">}</span>\\n</pre></div>\\n\\n\\n </dd>\\n\\n <dt> Full Prompt: </dt>\\n <dd>\\n <details>\\n <summary>Prompt</summary>\\n <p>Write a concise summary of the following:<br><br><br>\"<div style=\\'color:red\\'>One way to fight inflation is to drive down wages and make Americans poorer.</div>\"<br><br><br>CONCISE SUMMARY:</p>\\n </details>\\n </dd>\\n\\n <dt> Response: </dt>\\n <dd>\\n Make Americans poorer\\n </dd>\\n\\n <dt>Value:</dt>\\n <dd>\\n<div class=\"highlight\"><pre><span></span><span class=\"n\">Make</span> <span class=\"n\">Americans</span> <span class=\"n\">poorer</span>\\n</pre></div>\\n\\n </dd>\\n</main>\\n')"
207
- ]
208
- },
209
- "execution_count": 6,
210
- "metadata": {},
211
- "output_type": "execute_result"
212
- }
213
- ],
214
  "source": [
215
  "SummaryPrompt().show(\n",
216
- " {\n",
217
- " \"text\": \"One way to fight inflation is to drive down wages and make Americans poorer.\"\n",
218
- " },\n",
219
  " \"Make Americans poorer\",\n",
220
  ")"
221
  ]
222
  },
223
  {
224
  "cell_type": "code",
225
- "execution_count": 7,
226
- "id": "59f0b406",
227
- "metadata": {
228
- "execution": {
229
- "iopub.execute_input": "2023-02-26T01:42:29.389430Z",
230
- "iopub.status.busy": "2023-02-26T01:42:29.389170Z",
231
- "iopub.status.idle": "2023-02-26T01:42:29.435668Z",
232
- "shell.execute_reply": "2023-02-26T01:42:29.434963Z"
233
- }
234
- },
235
- "outputs": [
236
- {
237
- "name": "stderr",
238
- "output_type": "stream",
239
- "text": [
240
- "\u001b[38;5;15m6174bdb9-8e7d-4859-9c91-d36235b2ad41\u001b[1m\u001b[0m\n",
241
- "└── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m3.979s\u001b[2m\u001b[0m\n",
242
- " ├── \u001b[38;5;5mInput Function\u001b[0m/2/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.001s\u001b[2m\u001b[0m\n",
243
- " │ ├── \u001b[38;5;4minput\u001b[0m: \u001b[0m\n",
244
- " │ │ └── \u001b[38;5;4mtext\u001b[0m: r economy. The Ruble has lost 30% of its value. ⏎\n",
245
- " │ │ The Russian stock market has lost 40% of its value and trading remains suspended. Russia’s economy is reeling and Putin alone is to blame. ⏎\n",
246
- " │ │ Together with our allies we are providing support to the Ukrainians in their fight for freedom. Military assistance. Economic assistance. Humanitarian assistance. ⏎\n",
247
- " │ │ We are giving more than $1 Billion in direct assistance to Ukraine. ⏎\n",
248
- " │ │ And we will continue to aid the Ukrainian people as they defend their country and to help ease their suffering. ⏎\n",
249
- " │ │ Let me be clear, our forces are not engaged and will not engage in conflict with Russian forces in Ukraine. ⏎\n",
250
- " │ │ Our forces are not going to Europe to fight in Ukraine, but to defend our NATO Allies – in the event that Putin decides to keep moving west. ⏎\n",
251
- " │ │ For that purpose we’ve mobilized American ground forces, air squadrons, and ship deployments to protect NATO countries including Poland, Romania, Latvia, Lithuania, and Estonia. ⏎\n",
252
- " │ │ As I have made crystal clear the United States and our Allies will defend every inch of territory of NATO countries with the full force of our collective power. ⏎\n",
253
- " │ │ And we remain clear-eyed. The Ukrainians are fighting back with pure courage. But the next few days weeks, months, will be hard on them. ⏎\n",
254
- " │ │ Putin has unleashed violence and chaos. But while he may make gains on the battlefield – he will pay a continuing high price over the long run. ⏎\n",
255
- " │ │ And a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. ⏎\n",
256
- " │ │ To all Americans, I will be honest with you, as I’ve always promised. A Russian dictator, invading a foreign country, has costs around the world. ⏎\n",
257
- " │ │ And I’m taking robust action to make sure the pain of our sanctions is targeted at Russia’s economy. And I will use every tool at our disposal to protect American businesses and consumers. ⏎\n",
258
- " │ │ Tonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. ⏎\n",
259
- " │ │ America will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. ⏎\n",
260
- " │ │ These steps will help blunt gas prices here at home. And I know the news about what’s happening can seem alarming. ⏎\n",
261
- " │ │ But I want you to know that we are going to be okay. ⏎\n",
262
- " │ │ When the history of this era is written Putin’s war on Ukraine will have left Russia weaker and the rest of the world stronger. ⏎\n",
263
- " │ │ While it shouldn’t have taken something so terrible for people around the world to see what’s at stake now everyone sees it clearly. ⏎\n",
264
- " │ │ We see the unity among leaders of nations and a more unified Europe a more unified West. And we see unity among the people who are gathering in cities in large crowds around the world even in Russia to demonstrate their support for Ukraine. ⏎\n",
265
- " │ │ In the battle between democracy and autocracy, democracies are rising to the moment, and the world is clearly choosing the side of peace and security. ⏎\n",
266
- " │ │ This is a real test. It’s going to take time. So let us continue to draw inspiration from the iron will of the Ukrainian people. ⏎\n",
267
- " │ │ To our fellow Ukrainian Americans who forge a deep bond that connects our two nations we stand with you. ⏎\n",
268
- " │ │ Putin may circle Kyiv with tanks, but he will never gain the hearts and souls of the Ukrainian people. ⏎\n",
269
- " │ │ He will never extinguish their love of freedom. He will never weaken the resolve of the free world. ⏎\n",
270
- " │ │ We meet tonight in an America that has lived through two of the hardest years this nation has ever faced. ⏎\n",
271
- " │ │ The pandemic has been punishing. ⏎\n",
272
- " │ │ And so many families are living paycheck to paycheck, struggling to keep up with the rising cost of food, gas, housing, and so much more. ⏎\n",
273
- " │ │ I understand. ⏎\n",
274
- " │ │ I remember when my Dad had to leave our home in Scranton, Pennsylvania to find work. I grew up in a family where if the price of food went up, you felt it. ⏎\n",
275
- " │ │ That’s why one of the first things I did as President was fight to pass the American Rescue Plan. ⏎\n",
276
- " │ │ Because people were hurting. We needed to act, and we did. ⏎\n",
277
- " │ │ Few pieces of legislation have done more in a critical moment in our history to lift us out of crisis. ⏎\n",
278
- " │ │ It fueled our efforts to vaccinate the nation and combat COVID-19. It delivered immediate economic relief for tens of millions of Americans. ⏎\n",
279
- " │ │ Helped put food on their table, keep a roof over their heads, and cut the cost of health insurance. ⏎\n",
280
- " │ │ And as my Dad used to say, it gave people a little breathing room. ⏎\n",
281
- " │ │ And unlike the $2 Trillion tax cut passed in the previous administration that benefitted the top 1% of Americans, the American Rescue Plan helped working people—and left no one behind. ⏎\n",
282
- " │ │ And it worked. It created jobs. Lots of jobs. ⏎\n",
283
- " │ │ In fact—our econ\u001b[0m\n",
284
- " │ └── \u001b[38;5;5mInput Function\u001b[0m/2/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m\n",
285
- " ├── \u001b[38;5;5mPrompted\u001b[0m/3/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m3.978s\u001b[2m\u001b[0m\n",
286
- " │ ├── \u001b[38;5;4mprompt\u001b[0m: Write a concise summary of the following:⏎\n",
287
- " │ │ ⏎\n",
288
- " │ │ ⏎\n",
289
- " │ │ \"r economy. The Ruble has lost 30% of its value. ⏎\n",
290
- " │ │ The Russian stock market has lost 40% of its value and trading remains suspended. Russia’s economy is reeling and Putin alone is to blame. ⏎\n",
291
- " │ │ Together with our allies we are providing support to the Ukrainians in their fight for freedom. Military assistance. Economic assistance. Humanitarian assistance. ⏎\n",
292
- " │ │ We are giving more than $1 Billion in direct assistance to Ukraine. ⏎\n",
293
- " │ │ And we will continue to aid the Ukrainian people as they defend their country and to help ease their suffering. ⏎\n",
294
- " │ │ Let me be clear, our forces are not engaged and will not engage in conflict with Russian forces in Ukraine. ⏎\n",
295
- " │ │ Our forces are not going to Europe to fight in Ukraine, but to defend our NATO Allies – in the event that Putin decides to keep moving west. ⏎\n",
296
- " │ │ For that purpose we’ve mobilized American ground forces, air squadrons, and ship deployments to protect NATO countries including Poland, Romania, Latvia, Lithuania, and Estonia. ⏎\n",
297
- " │ │ As I have made crystal clear the United States and our Allies will defend every inch of territory of NATO countries with the full force of our collective power. ⏎\n",
298
- " │ │ And we remain clear-eyed. The Ukrainians are fighting back with pure courage. But the next few days weeks, months, will be hard on them. ⏎\n",
299
- " │ │ Putin has unleashed violence and chaos. But while he may make gains on the battlefield – he will pay a continuing high price over the long run. ⏎\n",
300
- " │ │ And a proud Ukrainian people, who have known 30 years of independence, have repeatedly shown that they will not tolerate anyone who tries to take their country backwards. ⏎\n",
301
- " │ │ To all Americans, I will be honest with you, as I’ve always promised. A Russian dictator, invading a foreign country, has costs around the world. ⏎\n",
302
- " │ │ And I’m taking robust action to make sure the pain of our sanctions is targeted at Russia’s economy. And I will use every tool at our disposal to protect American businesses and consumers. ⏎\n",
303
- " │ │ Tonight, I can announce that the United States has worked with 30 other countries to release 60 Million barrels of oil from reserves around the world. ⏎\n",
304
- " │ │ America will lead that effort, releasing 30 Million barrels from our own Strategic Petroleum Reserve. And we stand ready to do more if necessary, unified with our allies. ⏎\n",
305
- " │ │ These steps will help blunt gas prices here at home. And I know the news about what’s happening can seem alarming. ⏎\n",
306
- " │ │ But I want you to know that we are going to be okay. ⏎\n",
307
- " │ │ When the history of this era is written Putin’s war on Ukraine will have left Russia weaker and the rest of the world stronger. ⏎\n",
308
- " │ │ While it shouldn’t have taken something so terrible for people around the world to see what’s at stake now everyone sees it clearly. ⏎\n",
309
- " │ │ We see the unity among leaders of nations and a more unified Europe a more unified West. And we see unity among the people who are gathering in cities in large crowds around the world even in Russia to demonstrate their support for Ukraine. ⏎\n",
310
- " │ │ In the battle between democracy and autocracy, democracies are rising to the moment, and the world is clearly choosing the side of peace and security. ⏎\n",
311
- " │ │ This is a real test. It’s going to take time. So let us continue to draw inspiration from the iron will of the Ukrainian people. ⏎\n",
312
- " │ │ To our fellow Ukrainian Americans who forge a deep bond that connects our two nations we stand with you. ⏎\n",
313
- " │ │ Putin may circle Kyiv with tanks, but he will never gain the hearts and souls of the Ukrainian people. ⏎\n",
314
- " │ │ He will never extinguish their love of freedom. He will never weaken the resolve of the free world. ⏎\n",
315
- " │ │ We meet tonight in an America that has lived through two of the hardest years this nation has ever faced. ⏎\n",
316
- " │ │ The pandemic has been punishing. ⏎\n",
317
- " │ │ And so many families are living paycheck to paycheck, struggling to keep up with the rising cost of food, gas, housing, and so much more. ⏎\n",
318
- " │ │ I understand. ⏎\n",
319
- " │ │ I remember when my Dad had to leave our home in Scranton, Pennsylvania to find work. I grew up in a family where if the price of food went up, you felt it. ⏎\n",
320
- " │ │ That’s why one of the first things I did as President was fight to pass the American Rescue Plan. ⏎\n",
321
- " │ │ Because people were hurting. We needed to act, and we did. ⏎\n",
322
- " │ │ Few pieces of legislation have done more in a critical moment in our history to lift us out of crisis. ⏎\n",
323
- " │ │ It fueled our efforts to vaccinate the nation and combat COVID-19. It delivered immediate economic relief for tens of millions of Americans. ⏎\n",
324
- " │ │ Helped put food on their table, keep a roof over their heads, and cut the cost of health insurance. ⏎\n",
325
- " │ │ And as my Dad used to say, it gave people a little breathing room. ⏎\n",
326
- " │ │ And unlike the $2 Trillion tax cut passed in the previous administration that benefitted the top 1% of Americans, the American Rescue Plan helped working people—and left no one behind. ⏎\n",
327
- " │ │ And it worked. It created jobs. Lots of jobs. ⏎\n",
328
- " │ │ In fact—our econ\"⏎\n",
329
- " │ │ ⏎\n",
330
- " │ │ ⏎\n",
331
- " │ │ CONCISE SUMMARY:\u001b[0m\n",
332
- " │ └── \u001b[38;5;5mPrompted\u001b[0m/3/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:20Z\u001b[2m\u001b[0m\n",
333
- " ├── \u001b[38;5;5mResult\u001b[0m/4/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:20Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.000s\u001b[2m\u001b[0m\n",
334
- " │ ├── \u001b[38;5;4mresult\u001b[0m: In response to Russian President Putin's invasion of Ukraine, the US and its allies are providing military, economic, and humanitarian assistance to the Ukrainians. The US is also leading an effort to release 60 million barrels of oil from reserves around the world, including 30 million from the US Strategic Petroleum Reserve, to help blunt gas prices. The US President has also passed the American Rescue Plan to provide economic relief to tens of millions of Americans and create jobs.\u001b[0m\n",
335
- " │ └── \u001b[38;5;5mResult\u001b[0m/4/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:20Z\u001b[2m\u001b[0m\n",
336
- " └── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/5\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:20Z\u001b[2m\u001b[0m\n",
337
- "\n",
338
- "\u001b[38;5;15m2d2df820-8a1d-4db7-80ae-c54638c6a067\u001b[1m\u001b[0m\n",
339
- "└── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m4.513s\u001b[2m\u001b[0m\n",
340
- " ├── \u001b[38;5;5mInput Function\u001b[0m/2/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.001s\u001b[2m\u001b[0m\n",
341
- " │ ├── \u001b[38;5;4minput\u001b[0m: \u001b[0m\n",
342
- " │ │ └── \u001b[38;5;4mtext\u001b[0m: st things I did as President was fight to pass the American Rescue Plan. ⏎\n",
343
- " │ │ Because people were hurting. We needed to act, and we did. ⏎\n",
344
- " │ │ Few pieces of legislation have done more in a critical moment in our history to lift us out of crisis. ⏎\n",
345
- " │ │ It fueled our efforts to vaccinate the nation and combat COVID-19. It delivered immediate economic relief for tens of millions of Americans. ⏎\n",
346
- " │ │ Helped put food on their table, keep a roof over their heads, and cut the cost of health insurance. ⏎\n",
347
- " │ │ And as my Dad used to say, it gave people a little breathing room. ⏎\n",
348
- " │ │ And unlike the $2 Trillion tax cut passed in the previous administration that benefitted the top 1% of Americans, the American Rescue Plan helped working people—and left no one behind. ⏎\n",
349
- " │ │ And it worked. It created jobs. Lots of jobs. ⏎\n",
350
- " │ │ In fact—our economy created over 6.5 Million new jobs just last year, more jobs created in one year ⏎\n",
351
- " │ │ than ever before in the history of America. ⏎\n",
352
- " │ │ Our economy grew at a rate of 5.7% last year, the strongest growth in nearly 40 years, the first step in bringing fundamental change to an economy that hasn’t worked for the working people of this nation for too long. ⏎\n",
353
- " │ │ For the past 40 years we were told that if we gave tax breaks to those at the very top, the benefits would trickle down to everyone else. ⏎\n",
354
- " │ │ But that trickle-down theory led to weaker economic growth, lower wages, bigger deficits, and the widest gap between those at the top and everyone else in nearly a century. ⏎\n",
355
- " │ │ Vice President Harris and I ran for office with a new economic vision for America. ⏎\n",
356
- " │ │ Invest in America. Educate Americans. Grow the workforce. Build the economy from the bottom up ⏎\n",
357
- " │ │ and the middle out, not from the top down. ⏎\n",
358
- " │ │ Because we know that when the middle class grows, the poor have a ladder up and the wealthy do very well. ⏎\n",
359
- " │ │ America used to have the best roads, bridges, and airports on Earth. ⏎\n",
360
- " │ │ Now our infrastructure is ranked 13th in the world. ⏎\n",
361
- " │ │ We won’t be able to compete for the jobs of the 21st Century if we don’t fix that. ⏎\n",
362
- " │ │ That’s why it was so important to pass the Bipartisan Infrastructure Law—the most sweeping investment to rebuild America in history. ⏎\n",
363
- " │ │ This was a bipartisan effort, and I want to thank the members of both parties who worked to make it happen. ⏎\n",
364
- " │ │ We’re done talking about infrastructure weeks. ⏎\n",
365
- " │ │ We’re going to have an infrastructure decade. ⏎\n",
366
- " │ │ It is going to transform America and put us on a path to win the economic competition of the 21st Century that we face with the rest of the world—particularly with China. ⏎\n",
367
- " │ │ As I’ve told Xi Jinping, it is never a good bet to bet against the American people. ⏎\n",
368
- " │ │ We’ll create good jobs for millions of Americans, modernizing roads, airports, ports, and waterways all across America. ⏎\n",
369
- " │ │ And we’ll do it all to withstand the devastating effects of the climate crisis and promote environmental justice. ⏎\n",
370
- " │ │ We’ll build a national network of 500,000 electric vehicle charging stations, begin to replace poisonous lead pipes—so every child—and every American—has clean water to drink at home and at school, provide affordable high-speed internet for every American—urban, suburban, rural, and tribal communities. ⏎\n",
371
- " │ │ 4,000 projects have already been announced. ⏎\n",
372
- " │ │ And tonight, I’m announcing that this year we will start fixing over 65,000 miles of highway and 1,500 bridges in disrepair. ⏎\n",
373
- " │ │ When we use taxpayer dollars to rebuild America – we are going to Buy American: buy American products to support American jobs. ⏎\n",
374
- " │ │ The federal government spends about $600 Billion a year to keep the country safe and secure. ⏎\n",
375
- " │ │ There’s been a law on the books for almost a century ⏎\n",
376
- " │ │ to make sure taxpayers’ dollars support American jobs and businesses. ⏎\n",
377
- " │ │ Every Administration says they’ll do it, but we are actually doing it. ⏎\n",
378
- " │ │ We will buy American to make sure everything from the deck of an aircraft carrier to the steel on highway guardrails are made in America. ⏎\n",
379
- " │ │ But to compete for the best jobs of the future, we also need to level the playing field with China and other competitors. ⏎\n",
380
- " │ │ That’s why it is so important to pass the Bipartisan Innovation Act sitting in Congress that will make record investments in emerging technologies and American manufacturing. ⏎\n",
381
- " │ │ Let me give you one example of why it’s so important to pass it. ⏎\n",
382
- " │ │ If you travel 20 miles east of Columbus, Ohio, you’ll find 1,000 empty acres of land. ⏎\n",
383
- " │ │ It won’t look like much, but if you stop and look closely, you’ll see a “Field of dreams,” the ground on which America’s future will be built. ⏎\n",
384
- " │ │ This is where Intel, the American company that helped build Silicon Valley, is going to build its $20 billion semiconductor “mega site”. ⏎\n",
385
- " │ │ Up to eight state-of-the-art factories in one place. 10,000 new good-paying jobs. ⏎\n",
386
- " │ │ Some of the most sophisticated manufacturing in the world to make computer chips the size of a fingertip that po\u001b[0m\n",
387
- " │ └── \u001b[38;5;5mInput Function\u001b[0m/2/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m\n",
388
- " ├── \u001b[38;5;5mPrompted\u001b[0m/3/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m4.512s\u001b[2m\u001b[0m\n",
389
- " │ ├── \u001b[38;5;4mprompt\u001b[0m: Write a concise summary of the following:⏎\n",
390
- " │ │ ⏎\n",
391
- " │ │ ⏎\n",
392
- " │ │ \"st things I did as President was fight to pass the American Rescue Plan. ⏎\n",
393
- " │ │ Because people were hurting. We needed to act, and we did. ⏎\n",
394
- " │ │ Few pieces of legislation have done more in a critical moment in our history to lift us out of crisis. ⏎\n",
395
- " │ │ It fueled our efforts to vaccinate the nation and combat COVID-19. It delivered immediate economic relief for tens of millions of Americans. ⏎\n",
396
- " │ │ Helped put food on their table, keep a roof over their heads, and cut the cost of health insurance. ⏎\n",
397
- " │ │ And as my Dad used to say, it gave people a little breathing room. ⏎\n",
398
- " │ │ And unlike the $2 Trillion tax cut passed in the previous administration that benefitted the top 1% of Americans, the American Rescue Plan helped working people—and left no one behind. ⏎\n",
399
- " │ │ And it worked. It created jobs. Lots of jobs. ⏎\n",
400
- " │ │ In fact—our economy created over 6.5 Million new jobs just last year, more jobs created in one year ⏎\n",
401
- " │ │ than ever before in the history of America. ⏎\n",
402
- " │ │ Our economy grew at a rate of 5.7% last year, the strongest growth in nearly 40 years, the first step in bringing fundamental change to an economy that hasn’t worked for the working people of this nation for too long. ⏎\n",
403
- " │ │ For the past 40 years we were told that if we gave tax breaks to those at the very top, the benefits would trickle down to everyone else. ⏎\n",
404
- " │ │ But that trickle-down theory led to weaker economic growth, lower wages, bigger deficits, and the widest gap between those at the top and everyone else in nearly a century. ⏎\n",
405
- " │ │ Vice President Harris and I ran for office with a new economic vision for America. ⏎\n",
406
- " │ │ Invest in America. Educate Americans. Grow the workforce. Build the economy from the bottom up ⏎\n",
407
- " │ │ and the middle out, not from the top down. ⏎\n",
408
- " │ │ Because we know that when the middle class grows, the poor have a ladder up and the wealthy do very well. ⏎\n",
409
- " │ │ America used to have the best roads, bridges, and airports on Earth. ⏎\n",
410
- " │ │ Now our infrastructure is ranked 13th in the world. ⏎\n",
411
- " │ │ We won’t be able to compete for the jobs of the 21st Century if we don’t fix that. ⏎\n",
412
- " │ │ That’s why it was so important to pass the Bipartisan Infrastructure Law—the most sweeping investment to rebuild America in history. ⏎\n",
413
- " │ │ This was a bipartisan effort, and I want to thank the members of both parties who worked to make it happen. ⏎\n",
414
- " │ │ We’re done talking about infrastructure weeks. ⏎\n",
415
- " │ │ We’re going to have an infrastructure decade. ⏎\n",
416
- " │ │ It is going to transform America and put us on a path to win the economic competition of the 21st Century that we face with the rest of the world—particularly with China. ⏎\n",
417
- " │ │ As I’ve told Xi Jinping, it is never a good bet to bet against the American people. ⏎\n",
418
- " │ │ We’ll create good jobs for millions of Americans, modernizing roads, airports, ports, and waterways all across America. ⏎\n",
419
- " │ │ And we’ll do it all to withstand the devastating effects of the climate crisis and promote environmental justice. ⏎\n",
420
- " │ │ We’ll build a national network of 500,000 electric vehicle charging stations, begin to replace poisonous lead pipes—so every child—and every American—has clean water to drink at home and at school, provide affordable high-speed internet for every American—urban, suburban, rural, and tribal communities. ⏎\n",
421
- " │ │ 4,000 projects have already been announced. ⏎\n",
422
- " │ │ And tonight, I’m announcing that this year we will start fixing over 65,000 miles of highway and 1,500 bridges in disrepair. ⏎\n",
423
- " │ │ When we use taxpayer dollars to rebuild America – we are going to Buy American: buy American products to support American jobs. ⏎\n",
424
- " │ │ The federal government spends about $600 Billion a year to keep the country safe and secure. ⏎\n",
425
- " │ │ There’s been a law on the books for almost a century ⏎\n",
426
- " │ │ to make sure taxpayers’ dollars support American jobs and businesses. ⏎\n",
427
- " │ │ Every Administration says they’ll do it, but we are actually doing it. ⏎\n",
428
- " │ │ We will buy American to make sure everything from the deck of an aircraft carrier to the steel on highway guardrails are made in America. ⏎\n",
429
- " │ │ But to compete for the best jobs of the future, we also need to level the playing field with China and other competitors. ⏎\n",
430
- " │ │ That’s why it is so important to pass the Bipartisan Innovation Act sitting in Congress that will make record investments in emerging technologies and American manufacturing. ⏎\n",
431
- " │ │ Let me give you one example of why it’s so important to pass it. ⏎\n",
432
- " │ │ If you travel 20 miles east of Columbus, Ohio, you’ll find 1,000 empty acres of land. ⏎\n",
433
- " │ │ It won’t look like much, but if you stop and look closely, you’ll see a “Field of dreams,” the ground on which America’s future will be built. ⏎\n",
434
- " │ │ This is where Intel, the American company that helped build Silicon Valley, is going to build its $20 billion semiconductor “mega site”. ⏎\n",
435
- " │ │ Up to eight state-of-the-art factories in one place. 10,000 new good-paying jobs. ⏎\n",
436
- " │ │ Some of the most sophisticated manufacturing in the world to make computer chips the size of a fingertip that po\"⏎\n",
437
- " │ │ ⏎\n",
438
- " │ │ ⏎\n",
439
- " │ │ CONCISE SUMMARY:\u001b[0m\n",
440
- " │ └── \u001b[38;5;5mPrompted\u001b[0m/3/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:21Z\u001b[2m\u001b[0m\n",
441
- " ├── \u001b[38;5;5mResult\u001b[0m/4/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:21Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.000s\u001b[2m\u001b[0m\n",
442
- " │ ├── \u001b[38;5;4mresult\u001b[0m: President Biden fought to pass the American Rescue Plan to provide economic relief to tens of millions of Americans and create jobs. He also passed the Bipartisan Infrastructure Law to rebuild America and the Bipartisan Innovation Act to level the playing field with China and other competitors. An example of this is Intel's $20 billion semiconductor \"mega site\" in Ohio, which will create 10,000 new jobs.\u001b[0m\n",
443
- " │ └── \u001b[38;5;5mResult\u001b[0m/4/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:21Z\u001b[2m\u001b[0m\n",
444
- " └── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/5\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:21Z\u001b[2m\u001b[0m\n",
445
- "\n",
446
- "\u001b[38;5;15mdff81dcf-2073-4ba4-8856-d0f82c213311\u001b[1m\u001b[0m\n",
447
- "└── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m6.248s\u001b[2m\u001b[0m\n",
448
- " ├── \u001b[38;5;5mInput Function\u001b[0m/2/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.001s\u001b[2m\u001b[0m\n",
449
- " │ ├── \u001b[38;5;4minput\u001b[0m: \u001b[0m\n",
450
- " │ │ └── \u001b[38;5;4mtext\u001b[0m: Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. ⏎\n",
451
- " │ │ Last year COVID-19 kept us apart. This year we are finally together again. ⏎\n",
452
- " │ │ Tonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. ⏎\n",
453
- " │ │ With a duty to one another to the American people to the Constitution. ⏎\n",
454
- " │ │ And with an unwavering resolve that freedom will always triumph over tyranny. ⏎\n",
455
- " │ │ Six days ago, Russia’s Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. ⏎\n",
456
- " │ │ He thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. ⏎\n",
457
- " │ │ He met the Ukrainian people. ⏎\n",
458
- " │ │ From President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. ⏎\n",
459
- " │ │ Groups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland. ⏎\n",
460
- " │ │ In this struggle as President Zelenskyy said in his speech to the European Parliament “Light will win over darkness.” The Ukrainian Ambassador to the United States is here tonight. ⏎\n",
461
- " │ │ Let each of us here tonight in this Chamber send an unmistakable signal to Ukraine and to the world. ⏎\n",
462
- " │ │ Please rise if you are able and show that, Yes, we the United States of America stand with the Ukrainian people. ⏎\n",
463
- " │ │ Throughout our history we’ve learned this lesson when dictators do not pay a price for their aggression they cause more chaos. ⏎\n",
464
- " │ │ They keep moving. ⏎\n",
465
- " │ │ And the costs and the threats to America and the world keep rising. ⏎\n",
466
- " │ │ That’s why the NATO Alliance was created to secure peace and stability in Europe after World War 2. ⏎\n",
467
- " │ │ The United States is a member along with 29 other nations. ⏎\n",
468
- " │ │ It matters. American diplomacy matters. American resolve matters. ⏎\n",
469
- " │ │ Putin’s latest attack on Ukraine was premeditated and unprovoked. ⏎\n",
470
- " │ │ He rejected repeated efforts at diplomacy. ⏎\n",
471
- " │ │ He thought the West and NATO wouldn’t respond. And he thought he could divide us at home. Putin was wrong. We were ready. Here is what we did. ⏎\n",
472
- " │ │ We prepared extensively and carefully. ⏎\n",
473
- " │ │ We spent months building a coalition of other freedom-loving nations from Europe and the Americas to Asia and Africa to confront Putin. ⏎\n",
474
- " │ │ I spent countless hours unifying our European allies. We shared with the world in advance what we knew Putin was planning and precisely how he would try to falsely justify his aggression. ⏎\n",
475
- " │ │ We countered Russia’s lies with truth. ⏎\n",
476
- " │ │ And now that he has acted the free world is holding him accountable. ⏎\n",
477
- " │ │ Along with twenty-seven members of the European Union including France, Germany, Italy, as well as countries like the United Kingdom, Canada, Japan, Korea, Australia, New Zealand, and many others, even Switzerland. ⏎\n",
478
- " │ │ We are inflicting pain on Russia and supporting the people of Ukraine. Putin is now isolated from the world more than ever. ⏎\n",
479
- " │ │ Together with our allies –we are right now enforcing powerful economic sanctions. ⏎\n",
480
- " │ │ We are cutting off Russia’s largest banks from the international financial system. ⏎\n",
481
- " │ │ Preventing Russia’s central bank from defending the Russian Ruble making Putin’s $630 Billion “war fund” worthless. ⏎\n",
482
- " │ │ We are choking off Russia’s access to technology that will sap its economic strength and weaken its military for years to come. ⏎\n",
483
- " │ │ Tonight I say to the Russian oligarchs and corrupt leaders who have bilked billions of dollars off this violent regime no more. ⏎\n",
484
- " │ │ The U.S. Department of Justice is assembling a dedicated task force to go after the crimes of Russian oligarchs. ⏎\n",
485
- " │ │ We are joining with our European allies to find and seize your yachts your luxury apartments your private jets. We are coming for your ill-begotten gains. ⏎\n",
486
- " │ │ And tonight I am announcing that we will join our allies in closing off American air space to all Russian flights – further isolating Russia – and adding an additional squeeze –on their economy. The Ruble has lost 30% of its value. ⏎\n",
487
- " │ │ The Russian stock market has lost 40% of its value and trading remains suspended. Russia’s economy is reeling and Putin alone is to blame. ⏎\n",
488
- " │ │ Together with our allies we are providing support to the Ukrainians in their fight for freedom. Military assistance. Economic assistance. Humanitarian assistance. ⏎\n",
489
- " │ │ We are giving more than $1 Billion in direct assistance to Ukraine. ⏎\n",
490
- " │ │ And we will continue to aid the Ukrainian people as they defend their country and to help ease their suffering. ⏎\n",
491
- " │ │ Let me be clear, our forces are not engaged and will not engage in conflict with Russian forces in Ukraine. ⏎\n",
492
- " │ │ Our forces are not going to Europe to fight in Ukraine, but to defend our NATO Allies – in the event that Putin decides to keep moving west. ⏎\n",
493
- " │ │ For that pu\u001b[0m\n",
494
- " │ └── \u001b[38;5;5mInput Function\u001b[0m/2/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m\n",
495
- " ├── \u001b[38;5;5mPrompted\u001b[0m/3/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m6.247s\u001b[2m\u001b[0m\n",
496
- " │ ├── \u001b[38;5;4mprompt\u001b[0m: Write a concise summary of the following:⏎\n",
497
- " │ │ ⏎\n",
498
- " │ │ ⏎\n",
499
- " │ │ \"Madam Speaker, Madam Vice President, our First Lady and Second Gentleman. Members of Congress and the Cabinet. Justices of the Supreme Court. My fellow Americans. ⏎\n",
500
- " │ │ Last year COVID-19 kept us apart. This year we are finally together again. ⏎\n",
501
- " │ │ Tonight, we meet as Democrats Republicans and Independents. But most importantly as Americans. ⏎\n",
502
- " │ │ With a duty to one another to the American people to the Constitution. ⏎\n",
503
- " │ │ And with an unwavering resolve that freedom will always triumph over tyranny. ⏎\n",
504
- " │ │ Six days ago, Russia’s Vladimir Putin sought to shake the foundations of the free world thinking he could make it bend to his menacing ways. But he badly miscalculated. ⏎\n",
505
- " │ │ He thought he could roll into Ukraine and the world would roll over. Instead he met a wall of strength he never imagined. ⏎\n",
506
- " │ │ He met the Ukrainian people. ⏎\n",
507
- " │ │ From President Zelenskyy to every Ukrainian, their fearlessness, their courage, their determination, inspires the world. ⏎\n",
508
- " │ │ Groups of citizens blocking tanks with their bodies. Everyone from students to retirees teachers turned soldiers defending their homeland. ⏎\n",
509
- " │ │ In this struggle as President Zelenskyy said in his speech to the European Parliament “Light will win over darkness.” The Ukrainian Ambassador to the United States is here tonight. ⏎\n",
510
- " │ │ Let each of us here tonight in this Chamber send an unmistakable signal to Ukraine and to the world. ⏎\n",
511
- " │ │ Please rise if you are able and show that, Yes, we the United States of America stand with the Ukrainian people. ⏎\n",
512
- " │ │ Throughout our history we’ve learned this lesson when dictators do not pay a price for their aggression they cause more chaos. ⏎\n",
513
- " │ │ They keep moving. ⏎\n",
514
- " │ │ And the costs and the threats to America and the world keep rising. ⏎\n",
515
- " │ │ That’s why the NATO Alliance was created to secure peace and stability in Europe after World War 2. ⏎\n",
516
- " │ │ The United States is a member along with 29 other nations. ⏎\n",
517
- " │ │ It matters. American diplomacy matters. American resolve matters. ⏎\n",
518
- " │ │ Putin’s latest attack on Ukraine was premeditated and unprovoked. ⏎\n",
519
- " │ │ He rejected repeated efforts at diplomacy. ⏎\n",
520
- " │ │ He thought the West and NATO wouldn’t respond. And he thought he could divide us at home. Putin was wrong. We were ready. Here is what we did. ⏎\n",
521
- " │ │ We prepared extensively and carefully. ⏎\n",
522
- " │ │ We spent months building a coalition of other freedom-loving nations from Europe and the Americas to Asia and Africa to confront Putin. ⏎\n",
523
- " │ │ I spent countless hours unifying our European allies. We shared with the world in advance what we knew Putin was planning and precisely how he would try to falsely justify his aggression. ⏎\n",
524
- " │ │ We countered Russia’s lies with truth. ⏎\n",
525
- " │ │ And now that he has acted the free world is holding him accountable. ⏎\n",
526
- " │ │ Along with twenty-seven members of the European Union including France, Germany, Italy, as well as countries like the United Kingdom, Canada, Japan, Korea, Australia, New Zealand, and many others, even Switzerland. ⏎\n",
527
- " │ │ We are inflicting pain on Russia and supporting the people of Ukraine. Putin is now isolated from the world more than ever. ⏎\n",
528
- " │ │ Together with our allies –we are right now enforcing powerful economic sanctions. ⏎\n",
529
- " │ │ We are cutting off Russia’s largest banks from the international financial system. ⏎\n",
530
- " │ │ Preventing Russia’s central bank from defending the Russian Ruble making Putin’s $630 Billion “war fund” worthless. ⏎\n",
531
- " │ │ We are choking off Russia’s access to technology that will sap its economic strength and weaken its military for years to come. ⏎\n",
532
- " │ │ Tonight I say to the Russian oligarchs and corrupt leaders who have bilked billions of dollars off this violent regime no more. ⏎\n",
533
- " │ │ The U.S. Department of Justice is assembling a dedicated task force to go after the crimes of Russian oligarchs. ⏎\n",
534
- " │ │ We are joining with our European allies to find and seize your yachts your luxury apartments your private jets. We are coming for your ill-begotten gains. ⏎\n",
535
- " │ │ And tonight I am announcing that we will join our allies in closing off American air space to all Russian flights – further isolating Russia – and adding an additional squeeze –on their economy. The Ruble has lost 30% of its value. ⏎\n",
536
- " │ │ The Russian stock market has lost 40% of its value and trading remains suspended. Russia’s economy is reeling and Putin alone is to blame. ⏎\n",
537
- " │ │ Together with our allies we are providing support to the Ukrainians in their fight for freedom. Military assistance. Economic assistance. Humanitarian assistance. ⏎\n",
538
- " │ │ We are giving more than $1 Billion in direct assistance to Ukraine. ⏎\n",
539
- " │ │ And we will continue to aid the Ukrainian people as they defend their country and to help ease their suffering. ⏎\n",
540
- " │ │ Let me be clear, our forces are not engaged and will not engage in conflict with Russian forces in Ukraine. ⏎\n",
541
- " │ │ Our forces are not going to Europe to fight in Ukraine, but to defend our NATO Allies – in the event that Putin decides to keep moving west. ⏎\n",
542
- " │ │ For that pu\"⏎\n",
543
- " │ │ ⏎\n",
544
- " │ │ ⏎\n",
545
- " │ │ CONCISE SUMMARY:\u001b[0m\n",
546
- " │ └── \u001b[38;5;5mPrompted\u001b[0m/3/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:22Z\u001b[2m\u001b[0m\n",
547
- " ├── \u001b[38;5;5mResult\u001b[0m/4/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:22Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.000s\u001b[2m\u001b[0m\n",
548
- " │ ├── \u001b[38;5;4mresult\u001b[0m: In this speech, the speaker addresses the American people and the world, emphasizing the strength of the Ukrainian people in the face of Russia's aggression. The speaker also announces that the US and its allies are imposing economic sanctions on Russia, and providing military, economic, and humanitarian assistance to Ukraine. The US is also joining its allies in closing off American airspace to all Russian flights, and the Department of Justice is assembling a task force to go after the crimes of Russian oligarchs. The speaker also clarifies that US forces are not engaging in conflict with Russian forces in Ukraine, but are there to defend NATO allies.\u001b[0m\n",
549
- " │ └── \u001b[38;5;5mResult\u001b[0m/4/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:22Z\u001b[2m\u001b[0m\n",
550
- " └── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/5\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:22Z\u001b[2m\u001b[0m\n",
551
- "\n",
552
- "\u001b[38;5;15m2039cff8-944d-4daf-91e1-a78747fce7c0\u001b[1m\u001b[0m\n",
553
- "└── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m7.129s\u001b[2m\u001b[0m\n",
554
- " ├── \u001b[38;5;5mInput Function\u001b[0m/2/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.003s\u001b[2m\u001b[0m\n",
555
- " │ ├── \u001b[38;5;4minput\u001b[0m: \u001b[0m\n",
556
- " │ │ └── \u001b[38;5;4mtext\u001b[0m: why it is so important to pass the Bipartisan Innovation Act sitting in Congress that will make record investments in emerging technologies and American manufacturing. ⏎\n",
557
- " │ │ Let me give you one example of why it’s so important to pass it. ⏎\n",
558
- " │ │ If you travel 20 miles east of Columbus, Ohio, you’ll find 1,000 empty acres of land. ⏎\n",
559
- " │ │ It won’t look like much, but if you stop and look closely, you’ll see a “Field of dreams,” the ground on which America’s future will be built. ⏎\n",
560
- " │ │ This is where Intel, the American company that helped build Silicon Valley, is going to build its $20 billion semiconductor “mega site”. ⏎\n",
561
- " │ │ Up to eight state-of-the-art factories in one place. 10,000 new good-paying jobs. ⏎\n",
562
- " │ │ Some of the most sophisticated manufacturing in the world to make computer chips the size of a fingertip that power the world and our everyday lives. ⏎\n",
563
- " │ │ Smartphones. The Internet. Technology we have yet to invent. ⏎\n",
564
- " │ │ But that’s just the beginning. ⏎\n",
565
- " │ │ Intel’s CEO, Pat Gelsinger, who is here tonight, told me they are ready to increase their investment from ⏎\n",
566
- " │ │ $20 billion to $100 billion. ⏎\n",
567
- " │ │ That would be one of the biggest investments in manufacturing in American history. ⏎\n",
568
- " │ │ And all they’re waiting for is for you to pass this bill. ⏎\n",
569
- " │ │ So let’s not wait any longer. Send it to my desk. I’ll sign it. ⏎\n",
570
- " │ │ And we will really take off. ⏎\n",
571
- " │ │ And Intel is not alone. ⏎\n",
572
- " │ │ There’s something happening in America. ⏎\n",
573
- " │ │ Just look around and you’ll see an amazing story. ⏎\n",
574
- " │ │ The rebirth of the pride that comes from stamping products “Made In America.” The revitalization of American manufacturing. ⏎\n",
575
- " │ │ Companies are choosing to build new factories here, when just a few years ago, they would have built them overseas. ⏎\n",
576
- " │ │ That’s what is happening. Ford is investing $11 billion to build electric vehicles, creating 11,000 jobs across the country. ⏎\n",
577
- " │ │ GM is making the largest investment in its history—$7 billion to build electric vehicles, creating 4,000 jobs in Michigan. ⏎\n",
578
- " │ │ All told, we created 369,000 new manufacturing jobs in America just last year. ⏎\n",
579
- " │ │ Powered by people I’ve met like JoJo Burgess, from generations of union steelworkers from Pittsburgh, who’s here with us tonight. ⏎\n",
580
- " │ │ As Ohio Senator Sherrod Brown says, “It’s time to bury the label “Rust Belt.” ⏎\n",
581
- " │ │ It’s time. ⏎\n",
582
- " │ │ But with all the bright spots in our economy, record job growth and higher wages, too many families are struggling to keep up with the bills. ⏎\n",
583
- " │ │ Inflation is robbing them of the gains they might otherwise feel. ⏎\n",
584
- " │ │ I get it. That’s why my top priority is getting prices under control. ⏎\n",
585
- " │ │ Look, our economy roared back faster than most predicted, but the pandemic meant that businesses had a hard time hiring enough workers to keep up production in their factories. ⏎\n",
586
- " │ │ The pandemic also disrupted global supply chains. ⏎\n",
587
- " │ │ When factories close, it takes longer to make goods and get them from the warehouse to the store, and prices go up. ⏎\n",
588
- " │ │ Look at cars. ⏎\n",
589
- " │ │ Last year, there weren’t enough semiconductors to make all the cars that people wanted to buy. ⏎\n",
590
- " │ │ And guess what, prices of automobiles went up. ⏎\n",
591
- " │ │ So—we have a choice. ⏎\n",
592
- " │ │ One way to fight inflation is to drive down wages and make Americans poorer. ⏎\n",
593
- " │ │ I have a better plan to fight inflation. ⏎\n",
594
- " │ │ Lower your costs, not your wages. ⏎\n",
595
- " │ │ Make more cars and semiconductors in America. ⏎\n",
596
- " │ │ More infrastructure and innovation in America. ⏎\n",
597
- " │ │ More goods moving faster and cheaper in America. ⏎\n",
598
- " │ │ More jobs where you can earn a good living in America. ⏎\n",
599
- " │ │ And instead of relying on foreign supply chains, let’s make it in America. ⏎\n",
600
- " │ │ Economists call it “increasing the productive capacity of our economy.�� ⏎\n",
601
- " │ │ I call it building a better America. ⏎\n",
602
- " │ │ My plan to fight inflation will lower your costs and lower the deficit. ⏎\n",
603
- " │ │ 17 Nobel laureates in economics say my plan will ease long-term inflationary pressures. Top business leaders and most Americans support my plan. And here’s the plan: ⏎\n",
604
- " │ │ First – cut the cost of prescription drugs. Just look at insulin. One in ten Americans has diabetes. In Virginia, I met a 13-year-old boy named Joshua Davis. ⏎\n",
605
- " │ │ He and his Dad both have Type 1 diabetes, which means they need insulin every day. Insulin costs about $10 a vial to make. ⏎\n",
606
- " │ │ But drug companies charge families like Joshua and his Dad up to 30 times more. I spoke with Joshua’s mom. ⏎\n",
607
- " │ │ Imagine what it’s like to look at your child who needs insulin and have no idea how you’re going to pay for it. ⏎\n",
608
- " │ │ What it does to your dignity, your ability to look your child in the eye, to be the parent you expect to be. ⏎\n",
609
- " │ │ Joshua is here with us tonight. Yesterday was his birthday. Happy birthday, buddy. ⏎\n",
610
- " │ │ For Joshua, and for the 200,000 other young people with Type 1 diabetes, let’s cap the cost of insulin at $35 a month so everyone can afford it. ⏎\n",
611
- " │ │ Drug companies will still do very well. And while we’re at it let Medicare negotiate lower prices for prescription drugs, like the VA a\u001b[0m\n",
612
- " │ └── \u001b[38;5;5mInput Function\u001b[0m/2/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m\n",
613
- " ├── \u001b[38;5;5mPrompted\u001b[0m/3/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:16Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m7.125s\u001b[2m\u001b[0m\n",
614
- " │ ├── \u001b[38;5;4mprompt\u001b[0m: Write a concise summary of the following:⏎\n",
615
- " │ │ ⏎\n",
616
- " │ │ ⏎\n",
617
- " │ │ \" why it is so important to pass the Bipartisan Innovation Act sitting in Congress that will make record investments in emerging technologies and American manufacturing. ⏎\n",
618
- " │ │ Let me give you one example of why it’s so important to pass it. ⏎\n",
619
- " │ │ If you travel 20 miles east of Columbus, Ohio, you’ll find 1,000 empty acres of land. ⏎\n",
620
- " │ │ It won’t look like much, but if you stop and look closely, you’ll see a “Field of dreams,” the ground on which America’s future will be built. ⏎\n",
621
- " │ │ This is where Intel, the American company that helped build Silicon Valley, is going to build its $20 billion semiconductor “mega site”. ⏎\n",
622
- " │ │ Up to eight state-of-the-art factories in one place. 10,000 new good-paying jobs. ⏎\n",
623
- " │ │ Some of the most sophisticated manufacturing in the world to make computer chips the size of a fingertip that power the world and our everyday lives. ⏎\n",
624
- " │ │ Smartphones. The Internet. Technology we have yet to invent. ⏎\n",
625
- " │ │ But that’s just the beginning. ⏎\n",
626
- " │ │ Intel’s CEO, Pat Gelsinger, who is here tonight, told me they are ready to increase their investment from ⏎\n",
627
- " │ │ $20 billion to $100 billion. ⏎\n",
628
- " │ │ That would be one of the biggest investments in manufacturing in American history. ⏎\n",
629
- " │ │ And all they’re waiting for is for you to pass this bill. ⏎\n",
630
- " │ │ So let’s not wait any longer. Send it to my desk. I’ll sign it. ⏎\n",
631
- " │ │ And we will really take off. ⏎\n",
632
- " │ │ And Intel is not alone. ⏎\n",
633
- " │ │ There’s something happening in America. ⏎\n",
634
- " │ │ Just look around and you’ll see an amazing story. ⏎\n",
635
- " │ │ The rebirth of the pride that comes from stamping products “Made In America.” The revitalization of American manufacturing. ⏎\n",
636
- " │ │ Companies are choosing to build new factories here, when just a few years ago, they would have built them overseas. ⏎\n",
637
- " │ │ That’s what is happening. Ford is investing $11 billion to build electric vehicles, creating 11,000 jobs across the country. ⏎\n",
638
- " │ │ GM is making the largest investment in its history—$7 billion to build electric vehicles, creating 4,000 jobs in Michigan. ⏎\n",
639
- " │ │ All told, we created 369,000 new manufacturing jobs in America just last year. ⏎\n",
640
- " │ │ Powered by people I’ve met like JoJo Burgess, from generations of union steelworkers from Pittsburgh, who’s here with us tonight. ⏎\n",
641
- " │ │ As Ohio Senator Sherrod Brown says, “It’s time to bury the label “Rust Belt.” ⏎\n",
642
- " │ │ It’s time. ⏎\n",
643
- " │ │ But with all the bright spots in our economy, record job growth and higher wages, too many families are struggling to keep up with the bills. ⏎\n",
644
- " │ │ Inflation is robbing them of the gains they might otherwise feel. ⏎\n",
645
- " │ │ I get it. That’s why my top priority is getting prices under control. ⏎\n",
646
- " │ │ Look, our economy roared back faster than most predicted, but the pandemic meant that businesses had a hard time hiring enough workers to keep up production in their factories. ⏎\n",
647
- " │ │ The pandemic also disrupted global supply chains. ⏎\n",
648
- " │ │ When factories close, it takes longer to make goods and get them from the warehouse to the store, and prices go up. ⏎\n",
649
- " │ │ Look at cars. ⏎\n",
650
- " │ │ Last year, there weren’t enough semiconductors to make all the cars that people wanted to buy. ⏎\n",
651
- " │ │ And guess what, prices of automobiles went up. ⏎\n",
652
- " │ │ So—we have a choice. ⏎\n",
653
- " │ │ One way to fight inflation is to drive down wages and make Americans poorer. ⏎\n",
654
- " │ │ I have a better plan to fight inflation. ⏎\n",
655
- " │ │ Lower your costs, not your wages. ⏎\n",
656
- " │ │ Make more cars and semiconductors in America. ⏎\n",
657
- " │ │ More infrastructure and innovation in America. ⏎\n",
658
- " │ │ More goods moving faster and cheaper in America. ⏎\n",
659
- " │ │ More jobs where you can earn a good living in America. ⏎\n",
660
- " │ │ And instead of relying on foreign supply chains, let’s make it in America. ⏎\n",
661
- " │ │ Economists call it “increasing the productive capacity of our economy.” ⏎\n",
662
- " │ │ I call it building a better America. ⏎\n",
663
- " │ │ My plan to fight inflation will lower your costs and lower the deficit. ⏎\n",
664
- " │ │ 17 Nobel laureates in economics say my plan will ease long-term inflationary pressures. Top business leaders and most Americans support my plan. And here’s the plan: ⏎\n",
665
- " │ │ First – cut the cost of prescription drugs. Just look at insulin. One in ten Americans has diabetes. In Virginia, I met a 13-year-old boy named Joshua Davis. ⏎\n",
666
- " │ │ He and his Dad both have Type 1 diabetes, which means they need insulin every day. Insulin costs about $10 a vial to make. ⏎\n",
667
- " │ │ But drug companies charge families like Joshua and his Dad up to 30 times more. I spoke with Joshua’s mom. ⏎\n",
668
- " │ │ Imagine what it’s like to look at your child who needs insulin and have no idea how you’re going to pay for it. ⏎\n",
669
- " │ │ What it does to your dignity, your ability to look your child in the eye, to be the parent you expect to be. ⏎\n",
670
- " │ │ Joshua is here with us tonight. Yesterday was his birthday. Happy birthday, buddy. ⏎\n",
671
- " │ │ For Joshua, and for the 200,000 other young people with Type 1 diabetes, let’s cap the cost of insulin at $35 a month so everyone can afford it. ⏎\n",
672
- " │ │ Drug companies will still do very well. And while we’re at it let Medicare negotiate lower prices for prescription drugs, like the VA a\"⏎\n",
673
- " │ │ ⏎\n",
674
- " │ │ ⏎\n",
675
- " │ │ CONCISE SUMMARY:\u001b[0m\n",
676
- " │ └── \u001b[38;5;5mPrompted\u001b[0m/3/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m\n",
677
- " ├── \u001b[38;5;5mResult\u001b[0m/4/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.000s\u001b[2m\u001b[0m\n",
678
- " │ ├── \u001b[38;5;4mresult\u001b[0m: The Bipartisan Innovation Act is an important bill that is currently sitting in Congress. It will make record investments in emerging technologies and American manufacturing, such as Intel's $20 billion semiconductor \"mega site\" in Ohio. This bill will create 10,000 new jobs and increase Intel's investment to $100 billion. Other companies, such as Ford and GM, are also investing in American manufacturing, creating 369,000 new jobs in the last year. President Biden's plan to fight inflation is to lower costs, not wages, by increasing the productive capacity of the economy. This includes capping the cost of insulin at $35 a month and allowing Medicare to negotiate lower prices for prescription drugs.\u001b[0m\n",
679
- " │ └── \u001b[38;5;5mResult\u001b[0m/4/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m\n",
680
- " └── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/5\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m\n",
681
- "\n",
682
- "\u001b[38;5;15ma2de8aab-500a-4df0-8764-3b3c091db0b9\u001b[1m\u001b[0m\n",
683
- "└── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m5.830s\u001b[2m\u001b[0m\n",
684
- " ├── \u001b[38;5;5mInput Function\u001b[0m/2/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.001s\u001b[2m\u001b[0m\n",
685
- " │ ├── \u001b[38;5;4minput\u001b[0m: \u001b[0m\n",
686
- " │ │ └── \u001b[38;5;4mtext\u001b[0m: In this speech, the speaker addresses the American people and the world, emphasizing the strength of the Ukrainian people in the face of Russia's aggression. The speaker also announces that the US and its allies are imposing economic sanctions on Russia, and providing military, economic, and humanitarian assistance to Ukraine. The US is also joining its allies in closing off American airspace to all Russian flights, and the Department of Justice is assembling a task force to go after the crimes of Russian oligarchs. The speaker also clarifies that US forces are not engaging in conflict with Russian forces in Ukraine, but are there to defend NATO allies.⏎\n",
687
- " │ │ In response to Russian President Putin's invasion of Ukraine, the US and its allies are providing military, economic, and humanitarian assistance to the Ukrainians. The US is also leading an effort to release 60 million barrels of oil from reserves around the world, including 30 million from the US Strategic Petroleum Reserve, to help blunt gas prices. The US President has also passed the American Rescue Plan to provide economic relief to tens of millions of Americans and create jobs.⏎\n",
688
- " │ │ President Biden fought to pass the American Rescue Plan to provide economic relief to tens of millions of Americans and create jobs. He also passed the Bipartisan Infrastructure Law to rebuild America and the Bipartisan Innovation Act to level the playing field with China and other competitors. An example of this is Intel's $20 billion semiconductor \"mega site\" in Ohio, which will create 10,000 new jobs.⏎\n",
689
- " │ │ The Bipartisan Innovation Act is an important bill that is currently sitting in Congress. It will make record investments in emerging technologies and American manufacturing, such as Intel's $20 billion semiconductor \"mega site\" in Ohio. This bill will create 10,000 new jobs and increase Intel's investment to $100 billion. Other companies, such as Ford and GM, are also investing in American manufacturing, creating 369,000 new jobs in the last year. President Biden's plan to fight inflation is to lower costs, not wages, by increasing the productive capacity of the economy. This includes capping the cost of insulin at $35 a month and allowing Medicare to negotiate lower prices for prescription drugs.\u001b[0m\n",
690
- " │ └── \u001b[38;5;5mInput Function\u001b[0m/2/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m\n",
691
- " ├── \u001b[38;5;5mPrompted\u001b[0m/3/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:23Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m5.828s\u001b[2m\u001b[0m\n",
692
- " │ ├── \u001b[38;5;4mprompt\u001b[0m: Write a concise summary of the following:⏎\n",
693
- " │ │ ⏎\n",
694
- " │ │ ⏎\n",
695
- " │ │ \" In this speech, the speaker addresses the American people and the world, emphasizing the strength of the Ukrainian people in the face of Russia's aggression. The speaker also announces that the US and its allies are imposing economic sanctions on Russia, and providing military, economic, and humanitarian assistance to Ukraine. The US is also joining its allies in closing off American airspace to all Russian flights, and the Department of Justice is assembling a task force to go after the crimes of Russian oligarchs. The speaker also clarifies that US forces are not engaging in conflict with Russian forces in Ukraine, but are there to defend NATO allies.⏎\n",
696
- " │ │ In response to Russian President Putin's invasion of Ukraine, the US and its allies are providing military, economic, and humanitarian assistance to the Ukrainians. The US is also leading an effort to release 60 million barrels of oil from reserves around the world, including 30 million from the US Strategic Petroleum Reserve, to help blunt gas prices. The US President has also passed the American Rescue Plan to provide economic relief to tens of millions of Americans and create jobs.⏎\n",
697
- " │ │ President Biden fought to pass the American Rescue Plan to provide economic relief to tens of millions of Americans and create jobs. He also passed the Bipartisan Infrastructure Law to rebuild America and the Bipartisan Innovation Act to level the playing field with China and other competitors. An example of this is Intel's $20 billion semiconductor \"mega site\" in Ohio, which will create 10,000 new jobs.⏎\n",
698
- " │ │ The Bipartisan Innovation Act is an important bill that is currently sitting in Congress. It will make record investments in emerging technologies and American manufacturing, such as Intel's $20 billion semiconductor \"mega site\" in Ohio. This bill will create 10,000 new jobs and increase Intel's investment to $100 billion. Other companies, such as Ford and GM, are also investing in American manufacturing, creating 369,000 new jobs in the last year. President Biden's plan to fight inflation is to lower costs, not wages, by increasing the productive capacity of the economy. This includes capping the cost of insulin at $35 a month and allowing Medicare to negotiate lower prices for prescription drugs.\"⏎\n",
699
- " │ │ ⏎\n",
700
- " │ │ ⏎\n",
701
- " │ │ CONCISE SUMMARY:\u001b[0m\n",
702
- " │ └── \u001b[38;5;5mPrompted\u001b[0m/3/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:29Z\u001b[2m\u001b[0m\n",
703
- " ├── \u001b[38;5;5mResult\u001b[0m/4/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:29Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m0.000s\u001b[2m\u001b[0m\n",
704
- " │ ├── \u001b[38;5;4mresult\u001b[0m: In response to Russia's aggression in Ukraine, President Biden has taken action to provide military, economic, and humanitarian assistance to Ukraine, impose economic sanctions on Russia, and close off American airspace to Russian flights. He has also passed the American Rescue Plan to provide economic relief to Americans and the Bipartisan Innovation Act to level the playing field with China and other competitors. This bill will create 10,000 new jobs and increase Intel's investment to $100 billion. President Biden is also fighting inflation by capping the cost of insulin at $35 a month and allowing Medicare to negotiate lower prices for prescription drugs.\u001b[0m\n",
705
- " │ └── \u001b[38;5;5mResult\u001b[0m/4/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:29Z\u001b[2m\u001b[0m\n",
706
- " └── \u001b[38;5;5m<class '__main__.SummaryPrompt'>\u001b[0m/5\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:29Z\u001b[2m\u001b[0m\n",
707
- "\n",
708
- "\u001b[38;5;15mb77ccab5-cdad-4436-a3e1-230da294a528\u001b[1m\u001b[0m\n",
709
- "└── \u001b[38;5;5msummary\u001b[0m/1\u001b[0m ⇒ \u001b[38;5;2mstarted\u001b[0m \u001b[38;5;15m2023-02-26 01:42:15Z\u001b[2m\u001b[0m ⧖ \u001b[38;5;4m13.358s\u001b[2m\u001b[0m\n",
710
- " └── \u001b[38;5;5msummary\u001b[0m/2\u001b[0m ⇒ \u001b[38;5;2msucceeded\u001b[0m \u001b[38;5;15m2023-02-26 01:42:29Z\u001b[2m\u001b[0m\n",
711
- "\n"
712
- ]
713
- }
714
- ],
715
  "source": [
716
  "show_log(\"summary.log\")"
717
  ]
@@ -719,24 +128,9 @@
719
  ],
720
  "metadata": {
721
  "jupytext": {
722
- "cell_metadata_filter": "tags,-all"
723
- },
724
- "kernelspec": {
725
- "display_name": "minichain",
726
- "language": "python",
727
- "name": "minichain"
728
- },
729
- "language_info": {
730
- "codemirror_mode": {
731
- "name": "ipython",
732
- "version": 3
733
- },
734
- "file_extension": ".py",
735
- "mimetype": "text/x-python",
736
- "name": "python",
737
- "nbconvert_exporter": "python",
738
- "pygments_lexer": "ipython3",
739
- "version": "3.10.6"
740
  }
741
  },
742
  "nbformat": 4,
 
1
  {
2
  "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "806592e6",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
  {
15
  "cell_type": "markdown",
16
+ "id": "67d25b4b",
17
  "metadata": {},
18
  "source": [
19
+ "Summarize a long document by chunking and summarizing parts. Uses\n",
20
+ "aynchronous calls to the API. Adapted from LangChain [Map-Reduce\n",
21
+ "summary](https://langchain.readthedocs.io/en/stable/_modules/langchain/chains/mapreduce.html)."
22
  ]
23
  },
24
  {
25
  "cell_type": "code",
26
+ "execution_count": null,
27
+ "id": "264b8a01",
28
+ "metadata": {},
 
 
 
 
 
 
 
29
  "outputs": [],
30
  "source": [
31
  "import trio"
 
33
  },
34
  {
35
  "cell_type": "code",
36
+ "execution_count": null,
37
+ "id": "dba80a0d",
38
+ "metadata": {},
 
 
 
 
 
 
 
39
  "outputs": [],
40
  "source": [
41
  "from minichain import TemplatePrompt, show_log, start_chain"
 
43
  },
44
  {
45
  "cell_type": "markdown",
46
+ "id": "aeb6c55b",
47
  "metadata": {
48
  "lines_to_next_cell": 2
49
  },
 
53
  },
54
  {
55
  "cell_type": "code",
56
+ "execution_count": null,
57
+ "id": "083f994e",
58
+ "metadata": {},
 
 
 
 
 
 
 
 
59
  "outputs": [],
60
  "source": [
61
+ "class SummaryPrompt(TemplatePrompt):\n",
62
  " template_file = \"summary.pmpt.tpl\""
63
  ]
64
  },
65
  {
66
  "cell_type": "code",
67
+ "execution_count": null,
68
+ "id": "a3c3e226",
69
+ "metadata": {},
 
 
 
 
 
 
 
70
  "outputs": [],
71
  "source": [
72
+ "def chunk(f, width=4000, overlap=800):\n",
73
  " \"Split a documents into 4800 character overlapping chunks\"\n",
74
  " text = open(f).read().replace(\"\\n\\n\", \"\\n\")\n",
75
  " chunks = []\n",
 
 
76
  " for i in range(4):\n",
77
+ " if i * width > len(text):\n",
78
  " break\n",
79
+ " chunks.append({\"text\": text[i * width : (i + 1) * width + overlap]})\n",
80
  " return chunks"
81
  ]
82
  },
83
  {
84
  "cell_type": "code",
85
+ "execution_count": null,
86
+ "id": "73ac1c05",
87
+ "metadata": {},
88
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
89
  "source": [
90
  "with start_chain(\"summary\") as backend:\n",
91
  " prompt = SummaryPrompt(backend.OpenAI())\n",
 
100
  },
101
  {
102
  "cell_type": "code",
103
+ "execution_count": null,
104
+ "id": "fd616c8e",
105
  "metadata": {
 
 
 
 
 
 
106
  "tags": [
107
  "hide_inp"
108
  ]
109
  },
110
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
111
  "source": [
112
  "SummaryPrompt().show(\n",
113
+ " {\"text\": \"One way to fight is to drive down wages and make Americans poorer.\"},\n",
 
 
114
  " \"Make Americans poorer\",\n",
115
  ")"
116
  ]
117
  },
118
  {
119
  "cell_type": "code",
120
+ "execution_count": null,
121
+ "id": "2489bd19",
122
+ "metadata": {},
123
+ "outputs": [],
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
  "source": [
125
  "show_log(\"summary.log\")"
126
  ]
 
128
  ],
129
  "metadata": {
130
  "jupytext": {
131
+ "cell_metadata_filter": "tags,-all",
132
+ "main_language": "python",
133
+ "notebook_metadata_filter": "-all"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
134
  }
135
  },
136
  "nbformat": 4,
temp CHANGED
Binary files a/temp and b/temp differ
 
temp.ipynb ADDED
@@ -0,0 +1,186 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "2c01babb",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "1cdd0343",
18
+ "metadata": {
19
+ "lines_to_next_cell": 1
20
+ },
21
+ "outputs": [],
22
+ "source": [
23
+ "import minichain\n",
24
+ "from dataclasses import fields, dataclass, is_dataclass\n",
25
+ "from typing import List\n",
26
+ "from enum import Enum"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": null,
32
+ "id": "e2cbc112",
33
+ "metadata": {
34
+ "lines_to_next_cell": 1
35
+ },
36
+ "outputs": [],
37
+ "source": [
38
+ "class ColorType(Enum):\n",
39
+ " RED = 1\n",
40
+ " GREEN = 2\n",
41
+ " BLUE = 3"
42
+ ]
43
+ },
44
+ {
45
+ "cell_type": "code",
46
+ "execution_count": null,
47
+ "id": "4b939b84",
48
+ "metadata": {},
49
+ "outputs": [],
50
+ "source": [
51
+ "@dataclass\n",
52
+ "class Color:\n",
53
+ " color: ColorType\n",
54
+ " object: str\n",
55
+ " explanation: str"
56
+ ]
57
+ },
58
+ {
59
+ "cell_type": "code",
60
+ "execution_count": null,
61
+ "id": "17655157",
62
+ "metadata": {},
63
+ "outputs": [],
64
+ "source": []
65
+ },
66
+ {
67
+ "cell_type": "markdown",
68
+ "id": "1cf9ab63",
69
+ "metadata": {},
70
+ "source": [
71
+ "class StatType(Enum):\n",
72
+ " POINTS = 1\n",
73
+ " REBOUNDS = 2\n",
74
+ " ASSISTS = 3"
75
+ ]
76
+ },
77
+ {
78
+ "cell_type": "markdown",
79
+ "id": "43d8ada4",
80
+ "metadata": {},
81
+ "source": [
82
+ "@dataclass\n",
83
+ "class Stat:\n",
84
+ " value: int\n",
85
+ " stat: StatType"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "markdown",
90
+ "id": "c5e29bce",
91
+ "metadata": {},
92
+ "source": [
93
+ "@dataclass\n",
94
+ "class Player:\n",
95
+ " player: str\n",
96
+ " stats: List[Stat]"
97
+ ]
98
+ },
99
+ {
100
+ "cell_type": "code",
101
+ "execution_count": null,
102
+ "id": "15f1c0ff",
103
+ "metadata": {
104
+ "lines_to_next_cell": 1
105
+ },
106
+ "outputs": [],
107
+ "source": [
108
+ "class T(minichain.TypedTemplatePrompt):\n",
109
+ " template_file = \"stats.pmpt.tpl\"\n",
110
+ " Out = Color"
111
+ ]
112
+ },
113
+ {
114
+ "cell_type": "markdown",
115
+ "id": "35f81370",
116
+ "metadata": {},
117
+ "source": [
118
+ "print(T().show({\"passage\": \"hello\"}, '[{\"player\": \"Harden\", \"stats\": {\"value\": 10, \"stat\": 2}}]'))"
119
+ ]
120
+ },
121
+ {
122
+ "cell_type": "code",
123
+ "execution_count": null,
124
+ "id": "07f012b6",
125
+ "metadata": {},
126
+ "outputs": [],
127
+ "source": [
128
+ "with minichain.start_chain(\"stats\") as backend:\n",
129
+ " p = T(backend.OpenAI(max_tokens=512))\n",
130
+ " print(p({\"passage\": open(\"sixers.txt\").read()}))"
131
+ ]
132
+ },
133
+ {
134
+ "cell_type": "markdown",
135
+ "id": "b4f0d711",
136
+ "metadata": {
137
+ "lines_to_next_cell": 2
138
+ },
139
+ "source": [
140
+ "def enum(x):\n",
141
+ " d = {e.name: e.value for e in x}\n",
142
+ " # d[\"__enum__\"] = True\n",
143
+ " return d"
144
+ ]
145
+ },
146
+ {
147
+ "cell_type": "markdown",
148
+ "id": "4aa96598",
149
+ "metadata": {},
150
+ "source": [
151
+ "def walk(x):\n",
152
+ " print(x)\n",
153
+ " if issubclass(x, Enum):\n",
154
+ " return enum(x)\n",
155
+ " if is_dataclass(x):\n",
156
+ " return {y.name: walk(y.type) for y in fields(x)}\n",
157
+ " return x.__name__\n",
158
+ " # return [x for x in fields(B)]\n",
159
+ " # print(x.name)\n",
160
+ " # print(x.type)\n",
161
+ " # if issubclass(x.type, Enum):\n",
162
+ " # for e in x.type:\n",
163
+ " # print(e.value)\n",
164
+ " # print(e.name)\n",
165
+ " # print(x)]"
166
+ ]
167
+ },
168
+ {
169
+ "cell_type": "markdown",
170
+ "id": "146e031e",
171
+ "metadata": {},
172
+ "source": [
173
+ "print(walk(B))"
174
+ ]
175
+ }
176
+ ],
177
+ "metadata": {
178
+ "jupytext": {
179
+ "cell_metadata_filter": "-all",
180
+ "main_language": "python",
181
+ "notebook_metadata_filter": "-all"
182
+ }
183
+ },
184
+ "nbformat": 4,
185
+ "nbformat_minor": 5
186
+ }
temp.log CHANGED
Binary files a/temp.log and b/temp.log differ
 
test.ipynb ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": null,
6
+ "id": "e38ce387",
7
+ "metadata": {},
8
+ "outputs": [],
9
+ "source": [
10
+ "!pip install -q git+https://github.com/srush/MiniChain\n",
11
+ "!git clone git+https://github.com/srush/MiniChain; cp -fr MiniChain/examples/* . "
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "819e17b6",
18
+ "metadata": {
19
+ "lines_to_next_cell": 1
20
+ },
21
+ "outputs": [],
22
+ "source": [
23
+ "from minichain import prompt, Mock, show\n",
24
+ "import minichain"
25
+ ]
26
+ },
27
+ {
28
+ "cell_type": "code",
29
+ "execution_count": null,
30
+ "id": "3cd78b45",
31
+ "metadata": {
32
+ "lines_to_next_cell": 1
33
+ },
34
+ "outputs": [],
35
+ "source": [
36
+ "@prompt(Mock([\"red\", \"blue\"]))\n",
37
+ "def prompt_function1(model, x):\n",
38
+ " return model(x)"
39
+ ]
40
+ },
41
+ {
42
+ "cell_type": "code",
43
+ "execution_count": null,
44
+ "id": "3be33c12",
45
+ "metadata": {
46
+ "lines_to_next_cell": 1
47
+ },
48
+ "outputs": [],
49
+ "source": [
50
+ "@prompt(Mock([\"b\"]), template_file=\"test.pmpt.tpl\")\n",
51
+ "def prompt_function2(model, x):\n",
52
+ " if x == \"red\":\n",
53
+ " return model.fail(1)\n",
54
+ " return model(dict(x=x))"
55
+ ]
56
+ },
57
+ {
58
+ "cell_type": "code",
59
+ "execution_count": null,
60
+ "id": "9bb94056",
61
+ "metadata": {},
62
+ "outputs": [],
63
+ "source": [
64
+ "def run(query):\n",
65
+ " x = prompt_function1(query)\n",
66
+ " return prompt_function2(prompt_function2(x))"
67
+ ]
68
+ },
69
+ {
70
+ "cell_type": "code",
71
+ "execution_count": null,
72
+ "id": "1f0d5d5a",
73
+ "metadata": {},
74
+ "outputs": [],
75
+ "source": [
76
+ "demo = show(run,\n",
77
+ " examples=[\"a\"],\n",
78
+ " subprompts=[prompt_function1, prompt_function2, prompt_function2])"
79
+ ]
80
+ },
81
+ {
82
+ "cell_type": "code",
83
+ "execution_count": null,
84
+ "id": "be9be168",
85
+ "metadata": {},
86
+ "outputs": [],
87
+ "source": [
88
+ "if __name__ == \"__main__\":\n",
89
+ " demo.launch()"
90
+ ]
91
+ }
92
+ ],
93
+ "metadata": {
94
+ "jupytext": {
95
+ "cell_metadata_filter": "-all",
96
+ "main_language": "python",
97
+ "notebook_metadata_filter": "-all"
98
+ }
99
+ },
100
+ "nbformat": 4,
101
+ "nbformat_minor": 5
102
+ }
test.py CHANGED
@@ -1,12 +1,14 @@
1
  from minichain import prompt, Mock, show
2
  import minichain
3
 
4
- @prompt(Mock(["hello"]))
5
  def prompt_function1(model, x):
6
  return model(x)
7
 
8
  @prompt(Mock(["b"]), template_file="test.pmpt.tpl")
9
  def prompt_function2(model, x):
 
 
10
  return model(dict(x=x))
11
 
12
  def run(query):
 
1
  from minichain import prompt, Mock, show
2
  import minichain
3
 
4
+ @prompt(Mock(["red", "blue"]))
5
  def prompt_function1(model, x):
6
  return model(x)
7
 
8
  @prompt(Mock(["b"]), template_file="test.pmpt.tpl")
9
  def prompt_function2(model, x):
10
+ if x == "red":
11
+ return model.fail(1)
12
  return model(dict(x=x))
13
 
14
  def run(query):