TokenBender commited on
Commit
03cfcb7
1 Parent(s): badeea0

Upload model inference jupyter notebook

Browse files
Files changed (1) hide show
  1. model_inference_example.ipynb +164 -0
model_inference_example.ipynb ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "id": "0683c00e-bd2d-430f-a4f0-629457bfcc0c",
7
+ "metadata": {},
8
+ "outputs": [
9
+ {
10
+ "data": {
11
+ "application/vnd.jupyter.widget-view+json": {
12
+ "model_id": "fbcb8ef4a4754556991458d17072576a",
13
+ "version_major": 2,
14
+ "version_minor": 0
15
+ },
16
+ "text/plain": [
17
+ "VBox(children=(HTML(value='<center> <img\\nsrc=https://huggingface.co/front/assets/huggingface_logo-noborder.sv…"
18
+ ]
19
+ },
20
+ "metadata": {},
21
+ "output_type": "display_data"
22
+ }
23
+ ],
24
+ "source": [
25
+ "from huggingface_hub import notebook_login\n",
26
+ "notebook_login()"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": 8,
32
+ "id": "ad5d9515-4dee-4b05-abe1-0a60ebbddf64",
33
+ "metadata": {},
34
+ "outputs": [
35
+ {
36
+ "data": {
37
+ "application/vnd.jupyter.widget-view+json": {
38
+ "model_id": "5622f3d576d54360b81f6a35dbc4fc76",
39
+ "version_major": 2,
40
+ "version_minor": 0
41
+ },
42
+ "text/plain": [
43
+ "Loading checkpoint shards: 0%| | 0/2 [00:00<?, ?it/s]"
44
+ ]
45
+ },
46
+ "metadata": {},
47
+ "output_type": "display_data"
48
+ }
49
+ ],
50
+ "source": [
51
+ "from transformers import AutoTokenizer\n",
52
+ "import transformers\n",
53
+ "import torch\n",
54
+ "\n",
55
+ "model = \"TokenBender/llama2-7b-chat-hf-codeCherryPop-qLoRA-merged\"\n",
56
+ "\n",
57
+ "tokenizer = AutoTokenizer.from_pretrained(model)\n",
58
+ "pipeline = transformers.pipeline(\n",
59
+ " \"text-generation\",\n",
60
+ " model=model,\n",
61
+ " torch_dtype=torch.float16,\n",
62
+ " device_map=\"auto\",\n",
63
+ ")"
64
+ ]
65
+ },
66
+ {
67
+ "cell_type": "code",
68
+ "execution_count": 9,
69
+ "id": "855e2272-1e42-46ad-9cab-9bed6a7fc6fc",
70
+ "metadata": {},
71
+ "outputs": [
72
+ {
73
+ "name": "stdout",
74
+ "output_type": "stream",
75
+ "text": [
76
+ "Result: [INST]<<SYS>>\n",
77
+ " You are a helpful coding assistant that provides code based on the given query in context. \n",
78
+ "<</SYS>>\n",
79
+ "Write a python program to perform binary search in a given list.[/INST] Of course! Here is a basic implementation of a binary search algorithm in Python:\n",
80
+ "```\n",
81
+ "def binary_search(list, target):\n",
82
+ " # Initialize the indices of the left and right halves of the list\n",
83
+ " low = 0\n",
84
+ " high = len(list) - 1\n",
85
+ "\n",
86
+ " # While the list has not been searched thoroughly\n",
87
+ " while low < high:\n",
88
+ " # Calculate the index of the middle element of the list\n",
89
+ " mid = (low + high) // 2\n",
90
+ "\n",
91
+ " # Check whether the target is equal to the middle element\n",
92
+ " if list[mid] == target:\n",
93
+ " # Return the index of the target\n",
94
+ " return mid\n",
95
+ "\n",
96
+ " # If the target is less than the middle element, move to the right\n",
97
+ " elif list[mid] < target:\n",
98
+ " high = mid - 1\n",
99
+ " # If the target is greater than the middle element, move to the left\n",
100
+ " else:\n",
101
+ " low = mid + 1\n",
102
+ "\n",
103
+ " # If we reach this point, the target was not found in the list\n",
104
+ " return -1\n",
105
+ "\n",
106
+ "# Example usage\n",
107
+ "list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]\n",
108
+ "target = 5\n",
109
+ "print(binary_search(list, target))\n",
110
+ "```\n",
111
+ "This code uses a simple loop to perform the binary search. It iterates through the list, comparing each element to the target, and adjusting the position of the current element based on the result of the comparison. At the end of the loop, it returns the index of the target element if it is found, or -1 if it is not found.\n",
112
+ "You can test the code with your own list by replacing the `list` variable with a list of your own, and calling the `binary_search` function with the target element you want to search for. For example, if your list is `['apple', 'banana', 'orange', 'banana', 'orange', 'apple']`, and you want to search for the element 'banana', you can call the function like this: `print(binary_search(list, 'banana')`.\n"
113
+ ]
114
+ }
115
+ ],
116
+ "source": [
117
+ "conversation_text = '''[INST]<<SYS>>\n",
118
+ " You are a helpful coding assistant that provides code based on the given query in context. \n",
119
+ "<</SYS>>\n",
120
+ "Write a python program to perform binary search in a given list.[/INST]'''\n",
121
+ "\n",
122
+ "sequences = pipeline(\n",
123
+ " conversation_text,\n",
124
+ " do_sample=True,\n",
125
+ " top_k=10,\n",
126
+ " num_return_sequences=1,\n",
127
+ " eos_token_id=tokenizer.eos_token_id,\n",
128
+ " max_length=2048,\n",
129
+ ")\n",
130
+ "for seq in sequences:\n",
131
+ " print(f\"Result: {seq['generated_text']}\")"
132
+ ]
133
+ },
134
+ {
135
+ "cell_type": "code",
136
+ "execution_count": null,
137
+ "id": "e85dd028-e20d-496d-a5ac-6835ecb3e212",
138
+ "metadata": {},
139
+ "outputs": [],
140
+ "source": []
141
+ }
142
+ ],
143
+ "metadata": {
144
+ "kernelspec": {
145
+ "display_name": "Python 3 (ipykernel)",
146
+ "language": "python",
147
+ "name": "python3"
148
+ },
149
+ "language_info": {
150
+ "codemirror_mode": {
151
+ "name": "ipython",
152
+ "version": 3
153
+ },
154
+ "file_extension": ".py",
155
+ "mimetype": "text/x-python",
156
+ "name": "python",
157
+ "nbconvert_exporter": "python",
158
+ "pygments_lexer": "ipython3",
159
+ "version": "3.10.6"
160
+ }
161
+ },
162
+ "nbformat": 4,
163
+ "nbformat_minor": 5
164
+ }