Skip to content

Commit

Permalink
updated llm project
Browse files Browse the repository at this point in the history
  • Loading branch information
bamr87 committed Jun 17, 2024
1 parent 7b01447 commit 486a70b
Show file tree
Hide file tree
Showing 2 changed files with 300 additions and 190 deletions.
300 changes: 300 additions & 0 deletions pages/_notebooks/JeykLLM-create.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,300 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": []
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"---\n",
"title: \"Chat GPT Text Generation\"\n",
"description: \"This notebook demonstrates how to use the Chat GPT model for text generation.\"\n",
"tags: [\"NLP\", \"GPT\", \"Text Generation\"]\n",
"---"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# Pre-requisites\n",
"!pip show openai || pip install openai\n",
"!pip show python-dotenv || pip install python-dotenv"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"try:\n",
" %load_ext dotenv\n",
" print(\"dotenv extension loaded successfully.\")\n",
"except Exception as e:\n",
" print(\"Failed to load dotenv extension.\")\n",
" print(\"Error: \", str(e))\n",
"\n",
"try:\n",
" %dotenv\n",
" print(\"Environment variables loaded successfully.\")\n",
"except Exception as e:\n",
" print(\"Failed to load environment variables.\")\n",
" print(\"Error: \", str(e))\n",
"\n",
"try:\n",
" %reload_ext dotenv\n",
" print(\"dotenv extension reloaded successfully.\")\n",
"except Exception as e:\n",
" print(\"Failed to reload dotenv extension.\")\n",
" print(\"Error: \", str(e))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from dotenv import load_dotenv\n",
"import os\n",
"\n",
"# Load environment variables from .env file\n",
"if load_dotenv():\n",
" print(\"Environment variables loaded successfully.\")\n",
"else:\n",
" print(\"Failed to load environment variables.\")\n",
"\n",
"# Get API key from environment variables\n",
"api_key = os.getenv('OPENAI_API_KEY')\n",
"if api_key:\n",
" print(\"OPENAI_API_KEY loaded successfully.\")\n",
"else:\n",
" print(\"Failed to load OPENAI_API_KEY.\")\n",
"\n",
"# Get project ID from environment variables\n",
"project_id = os.getenv('PROJECT_ID')\n",
"if project_id:\n",
" print(\"PROJECT_ID loaded successfully.\")\n",
"else:\n",
" print(\"Failed to load PROJECT_ID.\")\n",
"\n",
"# Get organization ID from environment variables\n",
"org_id = os.getenv('ORG_ID')\n",
"if org_id:\n",
" print(\"ORG_ID loaded successfully.\")\n",
"else:\n",
" print(\"Failed to load ORG_ID.\")\n",
"\n",
"# Print the loaded values\n",
"print(\"OPENAI_API_KEY: \", api_key)\n",
"print(\"PROJECT_ID: \", project_id)\n",
"print(\"ORG_ID: \", org_id)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from openai import OpenAI\n",
"import yaml\n",
"from datetime import datetime\n",
"\n",
"client = OpenAI(\n",
" organization='org-sKWn6Hj2qqyP19zHfpXM1j1N',\n",
" project=os.getenv('PROJECT_ID'),\n",
" api_key = os.getenv('OPENAI_API_KEY')\n",
")\n",
"\n",
"chat_completion = client.chat.completions.create(\n",
" messages=[\n",
" {\n",
" \"role\": \"user\",\n",
" \"content\": \"Say this is a test\",\n",
" }\n",
" ],\n",
" model=\"gpt-3.5-turbo\",\n",
")\n",
"\n",
"print(chat_completion.choices[0].message.content)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def generate_content(prompt):\n",
" response = client.chat.completions.create(\n",
" model=\"gpt-3.5-turbo\",\n",
" messages=[\n",
" {\n",
" \"role\": \"system\",\n",
" \"content\": prompt,\n",
" },\n",
" ],\n",
" )\n",
" # Get the content of the last message in the response\n",
" return response.choices[0].message.content.strip()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def create_assistant(client):\n",
" name = input(\"Enter the name of the assistant: \")\n",
" instructions = input(\"Enter the instructions for the assistant: \")\n",
" assistant = client.beta.assistants.create(\n",
" name=name,\n",
" instructions=instructions,\n",
" model=\"gpt-4o\",\n",
" )\n",
" return assistant\n",
"\n",
"# Call the function and store the result\n",
"assistant = create_assistant(client)\n",
"print(assistant.id)\n",
"# Your job is to take content and output a 4-word title that summarizes the content in a thought provoking manner. The title should be intriguing and attention getting for a reader. In other words, try to make it a worth while title for someone to be interested in."
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def create_message(content):\n",
" thread = client.beta.threads.create()\n",
" message = client.beta.threads.messages.create(\n",
" thread_id=thread.id,\n",
" role=\"user\",\n",
" content=content\n",
" )\n",
" return message\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"\n",
"prompt = input(\"Please enter your prompt for the GPT: \")\n",
"# Tell me a story about a cat named Nubi who smoked dubi's all day by the bay\n",
"# Tell me a story about a dog named Fido who loved to play fetch in the park\n",
"content = generate_content(prompt)\n",
"message = create_message(content)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"print(message.thread_id)\n",
"print(content)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def create_and_poll_run(thread_id, assistant_id):\n",
" run = client.beta.threads.runs.create_and_poll(\n",
" thread_id=thread_id,\n",
" assistant_id=assistant_id,\n",
" )\n",
" return run\n",
"\n",
"thread_id = message.thread_id\n",
"assistant_id = assistant.id\n",
"run = create_and_poll_run(thread_id, assistant_id)\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"if run.status == 'completed': \n",
" messages = client.beta.threads.messages.list(\n",
" thread_id=message.thread_id\n",
" )\n",
" text_message = messages.data[0].content[0].text.value\n",
" title = text_message\n",
" print(title)\n",
"else:\n",
" print(run.status)\n",
" "
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def create_jekyll_post(title, content):\n",
" front_matter = {\n",
" 'title': title,\n",
" 'layout': 'journals',\n",
" 'date': datetime.now().strftime('%Y-%m-%d %H:%M:%S'),\n",
" 'categories': 'gpt',\n",
" }\n",
" post_content = f\"---\\n{yaml.dump(front_matter)}---\\n{content}\"\n",
" filename = f\"../_posts/{datetime.now().strftime('%Y-%m-%d')}-{title.lower().replace(' ', '-')}.md\"\n",
" os.makedirs(os.path.dirname(filename), exist_ok=True)\n",
" with open(filename, 'w') as file:\n",
" file.write(post_content)\n",
" return filename"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"file_path = create_jekyll_post(title, content)\n",
"print(f\"The new file is created at: {file_path}\")"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.9.6"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
Loading

0 comments on commit 486a70b

Please sign in to comment.