File size: 3,764 Bytes
171b9e7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 |
{
"cells": [
{
"cell_type": "code",
"execution_count": 9,
"metadata": {},
"outputs": [],
"source": [
"# Use a pipeline as a high-level helper\n",
"from transformers import pipeline"
]
},
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"outputs": [],
"source": [
"\n",
"prompt = [\n",
" {\"role\": \"user\", \"content\": \"\"\" \"SELECT CITYalias0.CITY_NAME FROM CITY AS CITYalias0 WHERE CITYalias0.POPULATION = ( SELECT MAX( CITYalias1.POPULATION ) FROM CITY AS CITYalias1 WHERE CITYalias1.STATE_NAME = \\\"state_name0\\\" ) AND CITYalias0.STATE_NAME = \\\"state_name0\\\" ;\"\"\"},\n",
"]"
]
},
{
"cell_type": "code",
"execution_count": 11,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"c:\\Users\\eclal\\OneDrive\\Desktop\\VSCode\\.conda\\Lib\\site-packages\\transformers\\models\\mistral\\modeling_mistral.py:674: UserWarning: 1Torch was not compiled with flash attention. (Triggered internally at ..\\aten\\src\\ATen\\native\\transformers\\cuda\\sdp_utils.cpp:455.)\n",
" attn_output = torch.nn.functional.scaled_dot_product_attention(\n"
]
},
{
"data": {
"text/plain": [
"[{'generated_text': [{'role': 'user',\n",
" 'content': ' \"SELECT CITYalias0.CITY_NAME FROM CITY AS CITYalias0 WHERE CITYalias0.POPULATION = ( SELECT MAX( CITYalias1.POPULATION ) FROM CITY AS CITYalias1 WHERE CITYalias1.STATE_NAME = \"state_name0\" ) AND CITYalias0.STATE_NAME = \"state_name0\" ;'},\n",
" {'role': 'assistant',\n",
" 'content': 'return me the city with the highest population in \" state_name0 \".\"'}]}]"
]
},
"execution_count": 11,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"generate_text = pipeline(\n",
" model=\"eclfe/sqlen-1-21\",\n",
" torch_dtype=\"auto\",\n",
" trust_remote_code=True,\n",
" device_map={\"\": \"cuda:0\"},\n",
" token=True,\n",
")\n",
"generate_text(prompt, renormalize_logits=True)"
]
},
{
"cell_type": "code",
"execution_count": 12,
"metadata": {},
"outputs": [
{
"data": {
"text/plain": [
"[{'generated_text': [{'role': 'user',\n",
" 'content': ' \"SELECT CITYalias0.CITY_NAME FROM CITY AS CITYalias0 WHERE CITYalias0.POPULATION = ( SELECT MAX( CITYalias1.POPULATION ) FROM CITY AS CITYalias1 WHERE CITYalias1.STATE_NAME = \"state_name0\" ) AND CITYalias0.STATE_NAME = \"state_name0\" ;'},\n",
" {'role': 'assistant',\n",
" 'content': 'return me the city with the highest population in \" state_name0 \".\"'}]}]"
]
},
"execution_count": 12,
"metadata": {},
"output_type": "execute_result"
}
],
"source": [
"\n",
"\n",
"prompt = [\n",
" {\"role\": \"user\", \"content\": \"\"\" \"SELECT CITYalias0.CITY_NAME FROM CITY AS CITYalias0 WHERE CITYalias0.POPULATION = ( SELECT MAX( CITYalias1.POPULATION ) FROM CITY AS CITYalias1 WHERE CITYalias1.STATE_NAME = \\\"state_name0\\\" ) AND CITYalias0.STATE_NAME = \\\"state_name0\\\" ;\"\"\"},\n",
"]\n",
"pipe = pipeline(\"text-generation\", model=\"eclfe/sqlen-1-21\")\n",
"pipe(prompt)"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.12.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
|