diff --git a/site/en/tutorials/python_quickstart.ipynb b/site/en/tutorials/python_quickstart.ipynb index 1aa4c72fb..41ee46d6e 100644 --- a/site/en/tutorials/python_quickstart.ipynb +++ b/site/en/tutorials/python_quickstart.ipynb @@ -141,7 +141,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 4, "metadata": { "id": "TS9l5igubpHO" }, @@ -152,9 +152,6 @@ "\n", "import google.generativeai as genai\n", "\n", - "# Used to securely store your API key\n", - "from google.colab import userdata\n", - "\n", "from IPython.display import display\n", "from IPython.display import Markdown\n", "\n", @@ -164,6 +161,18 @@ " return Markdown(textwrap.indent(text, '> ', predicate=lambda _: True))" ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "id": "d10c38a5c91f" + }, + "outputs": [], + "source": [ + "# Used to securely store your API key\n", + "from google.colab import userdata" + ] + }, { "cell_type": "markdown", "metadata": { @@ -200,7 +209,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 6, "metadata": { "id": "ab9ASynfcIZn" }, @@ -245,7 +254,7 @@ "id": "FTl5NjtrhA0J" }, "source": [ - "Note: For detailed information about the available models, including their capabilities and rate limits, see [Gemini models](https://ai.google.dev/models/gemini). We offer options for requesting [rate limit increases](https://ai.google.dev/docs/increase_quota). The rate limit for Gemini-Pro models is 60 requests per minute (RPM).\n", + "Note: For detailed information about the available models, including their capabilities and rate limits, see [Gemini models](https://ai.google.dev/models/gemini). There are options for requesting [rate limit increases](https://ai.google.dev/docs/increase_quota). The rate limit for Gemini-Pro models is 60 requests per minute (RPM).\n", "\n", "The `genai` package also supports the PaLM family of models, but only the Gemini models support the generic, multimodal capabilities of the `generateContent` method." ] @@ -1681,12 +1690,13 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 7, "metadata": { "id": "gE7I9Anl0ud7" }, "outputs": [], "source": [ + "model = genai.GenerativeModel('gemini-pro')\n", "response = model.generate_content(\n", " 'Tell me a story about a magic backpack.',\n", " generation_config=genai.types.GenerationConfig(\n", @@ -1698,6 +1708,36 @@ ")" ] }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "id": "0fbab01e8fcf" + }, + "outputs": [ + { + "data": { + "text/markdown": [ + "> Once upon a time, in a small town nestled amidst lush green hills, lived a young girl named..." + ], + "text/plain": [ + "" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "text = response.text\n", + "\n", + "if response.candidates[0].finish_reason.name == \"MAX_TOKENS\":\n", + " text += '...'\n", + "\n", + "to_markdown(text)" + ] + }, { "cell_type": "markdown", "metadata": {