Skip to content

Commit

Permalink
update height
Browse files Browse the repository at this point in the history
  • Loading branch information
dnth committed Oct 24, 2024
1 parent 54d1d48 commit c925a48
Show file tree
Hide file tree
Showing 2 changed files with 15 additions and 11 deletions.
22 changes: 11 additions & 11 deletions nbs/quickstart.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@
{
"data": {
"text/html": [
"<table id=\"itables_a97359bf_88ed_49b6_a617_9c6d5e62e7d9\" class=\"display nowrap\" data-quarto-disable-processing=\"true\" style=\"table-layout:auto;width:auto;margin:auto;caption-side:bottom\">\n",
"<table id=\"itables_24ae6511_3ebd_4b0e_98ca_896cec34b2e3\" class=\"display nowrap\" data-quarto-disable-processing=\"true\" style=\"table-layout:auto;width:auto;margin:auto;caption-side:bottom\">\n",
"<thead>\n",
" <tr style=\"text-align: right;\">\n",
" \n",
Expand Down Expand Up @@ -414,7 +414,7 @@
"<script type=\"module\">\n",
" const { DataTable, jQuery: $ } = await import(window._datatables_src_for_itables_2_2_2);\n",
"\n",
" document.querySelectorAll(\"#itables_a97359bf_88ed_49b6_a617_9c6d5e62e7d9:not(.dataTable)\").forEach(table => {\n",
" document.querySelectorAll(\"#itables_24ae6511_3ebd_4b0e_98ca_896cec34b2e3:not(.dataTable)\").forEach(table => {\n",
" if (!(table instanceof HTMLTableElement))\n",
" return;\n",
"\n",
Expand Down Expand Up @@ -498,9 +498,9 @@
"name": "stderr",
"output_type": "stream",
"text": [
"\u001b[32m2024-10-24 16:07:14.739\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mxinfer.models\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m63\u001b[0m - \u001b[1mModel: vikhyatk/moondream2\u001b[0m\n",
"\u001b[32m2024-10-24 16:07:14.739\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mxinfer.models\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m64\u001b[0m - \u001b[1mDevice: cuda\u001b[0m\n",
"\u001b[32m2024-10-24 16:07:14.740\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mxinfer.models\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m65\u001b[0m - \u001b[1mDtype: float16\u001b[0m\n",
"\u001b[32m2024-10-24 16:12:52.545\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mxinfer.models\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m63\u001b[0m - \u001b[1mModel: vikhyatk/moondream2\u001b[0m\n",
"\u001b[32m2024-10-24 16:12:52.546\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mxinfer.models\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m64\u001b[0m - \u001b[1mDevice: cuda\u001b[0m\n",
"\u001b[32m2024-10-24 16:12:52.546\u001b[0m | \u001b[1mINFO \u001b[0m | \u001b[36mxinfer.models\u001b[0m:\u001b[36m__init__\u001b[0m:\u001b[36m65\u001b[0m - \u001b[1mDtype: float16\u001b[0m\n",
"PhiForCausalLM has generative capabilities, as `prepare_inputs_for_generation` is explicitly overwritten. However, it doesn't directly inherit from `GenerationMixin`. From 👉v4.50👈 onwards, `PreTrainedModel` will NOT inherit from `GenerationMixin`, and this model will lose the ability to call `generate` and other related functions.\n",
" - If you're using `trust_remote_code=True`, you can get rid of this warning by loading the model with an auto class. See https://huggingface.co/docs/transformers/en/model_doc/auto#auto-classes\n",
" - If you are the owner of the model architecture code, please modify your model class such that it inherits from `GenerationMixin` (after `PreTrainedModel`, otherwise you'll get an exception).\n",
Expand Down Expand Up @@ -635,8 +635,8 @@
"│<span style=\"color: #008080; text-decoration-color: #008080\"> Device </span>│<span style=\"color: #800080; text-decoration-color: #800080\"> cuda </span>│\n",
"│<span style=\"color: #008080; text-decoration-color: #008080\"> Dtype </span>│<span style=\"color: #800080; text-decoration-color: #800080\"> torch.float16 </span>│\n",
"│<span style=\"color: #008080; text-decoration-color: #008080\"> Number of Inferences </span>│<span style=\"color: #800080; text-decoration-color: #800080\"> 2 </span>│\n",
"│<span style=\"color: #008080; text-decoration-color: #008080\"> Total Inference Time (ms) </span>│<span style=\"color: #800080; text-decoration-color: #800080\"> 2029.0934 </span>│\n",
"│<span style=\"color: #008080; text-decoration-color: #008080\"> Average Latency (ms) </span>│<span style=\"color: #800080; text-decoration-color: #800080\"> 1014.5467 </span>│\n",
"│<span style=\"color: #008080; text-decoration-color: #008080\"> Total Inference Time (ms) </span>│<span style=\"color: #800080; text-decoration-color: #800080\"> 1970.4506 </span>│\n",
"│<span style=\"color: #008080; text-decoration-color: #008080\"> Average Latency (ms) </span>│<span style=\"color: #800080; text-decoration-color: #800080\"> 985.2253 </span>│\n",
"╰───────────────────────────┴─────────────────────╯\n",
"</pre>\n"
],
Expand All @@ -649,8 +649,8 @@
"│\u001b[36m \u001b[0m\u001b[36mDevice \u001b[0m\u001b[36m \u001b[0m│\u001b[35m \u001b[0m\u001b[35mcuda \u001b[0m\u001b[35m \u001b[0m│\n",
"│\u001b[36m \u001b[0m\u001b[36mDtype \u001b[0m\u001b[36m \u001b[0m│\u001b[35m \u001b[0m\u001b[35mtorch.float16 \u001b[0m\u001b[35m \u001b[0m│\n",
"│\u001b[36m \u001b[0m\u001b[36mNumber of Inferences \u001b[0m\u001b[36m \u001b[0m│\u001b[35m \u001b[0m\u001b[35m2 \u001b[0m\u001b[35m \u001b[0m│\n",
"│\u001b[36m \u001b[0m\u001b[36mTotal Inference Time (ms)\u001b[0m\u001b[36m \u001b[0m│\u001b[35m \u001b[0m\u001b[35m2029.0934 \u001b[0m\u001b[35m \u001b[0m│\n",
"│\u001b[36m \u001b[0m\u001b[36mAverage Latency (ms) \u001b[0m\u001b[36m \u001b[0m│\u001b[35m \u001b[0m\u001b[35m1014.5467 \u001b[0m\u001b[35m \u001b[0m│\n",
"│\u001b[36m \u001b[0m\u001b[36mTotal Inference Time (ms)\u001b[0m\u001b[36m \u001b[0m│\u001b[35m \u001b[0m\u001b[35m1970.4506 \u001b[0m\u001b[35m \u001b[0m│\n",
"│\u001b[36m \u001b[0m\u001b[36mAverage Latency (ms) \u001b[0m\u001b[36m \u001b[0m│\u001b[35m \u001b[0m\u001b[35m985.2253 \u001b[0m\u001b[35m \u001b[0m│\n",
"╰───────────────────────────┴─────────────────────╯\n"
]
},
Expand Down Expand Up @@ -708,15 +708,15 @@
"name": "stdout",
"output_type": "stream",
"text": [
"* Running on local URL: http://127.0.0.1:7862\n",
"* Running on local URL: http://127.0.0.1:7861\n",
"\n",
"To create a public link, set `share=True` in `launch()`.\n"
]
},
{
"data": {
"text/html": [
"<div><iframe src=\"http://127.0.0.1:7862/\" width=\"100%\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
"<div><iframe src=\"http://127.0.0.1:7861/\" width=\"100%\" height=\"1000\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
],
"text/plain": [
"<IPython.core.display.HTML object>"
Expand Down
4 changes: 4 additions & 0 deletions xinfer/viz.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,8 @@ def infer(image, prompt=None):
description="Upload an image to classify.",
)

# The default height of Gradio is too small for view in jupyter notebooks
if "height" not in gradio_launch_kwargs:
gradio_launch_kwargs["height"] = 1000

iface.launch(**gradio_launch_kwargs)

0 comments on commit c925a48

Please sign in to comment.