Skip to content
# Import the os package
import os

# Import the openai package
import openai

# Import yfinance as yf
import yfinance as yf

# From the IPython.display package, import display and Markdown
from IPython.display import display, Markdown

# Set openai.api_key to the OPENAI environment variable
openai.api_key = os.environ["OPENAI"]
# Define the system message
system_msg = 'You are a helpful assistant who understands data science.'

# Define the user message
user_msg = 'Create a small dataset of data about people. The format of the dataset should be a data frame with 5 rows and 3 columns. The columns should be called "name", "height_cm", and "eye_color". The "name" column should contain randomly chosen first names. The "height_cm" column should contain randomly chosen heights, given in centimeters. The "eye_color" column should contain randomly chosen eye colors, taken from a choice of "brown", "blue", and "green". Provide Python code to generate the dataset, then provide the output in the format of a markdown table.'

# Create a dataset using GPT
response = openai.ChatCompletion.create(
    model="gpt-3.5-turbo",
    messages=[
        {"role": "system", "content": system_msg},
        {"role": "user", "content": user_msg}
    ]
)
# Check the status code of the response variable
response["choices"][0]["finish_reason"]
# Print the content generated by GPT.
print(response["choices"][0]["message"]["content"])
# Render the Markdown content generated by GPT
display(Markdown(response["choices"][0]["message"]["content"]))
def chat(system, user_assistant):
    assert isinstance(system, str), "`system` should be a string"
    assert isinstance(user_assistant, list), "`user_assistant` should be a list"
    system_msg = [{"role": "system", "content": system}]
    user_assistant_msgs = [
        {"role": "assistant", "content": user_assistant[i]} if i % 2 else {"role": "user", "content": user_assistant[i]} 
        for i in range(len(user_assistant))
    ]
    msgs = system_msg + user_assistant_msgs
    response = openai.ChatCompletion.create(
        model="gpt-3.5-turbo",
        messages=msgs
    )
    status_code = response["choices"][0]["finish_reason"]
    assert status_code == "stop", f"The status code was {status_code}."
    return response["choices"][0]["message"]["content"]
        
response_fn_test = chat(
    "You are a machine learning expert who writes tersely.", 
    ["Explain what a support vector machine model is."]
)
display(Markdown(response_fn_test))
# Assign the content from the response in Task 1 to assistant_msg
assistant_msg = response["choices"][0]["message"]["content"]

# Define a new user message
user_msg2 = 'Using the dataset you just created, write code to calculate the mean of the `height_cm` column. Also include the result of the calculation.'

# Create an array of user and assistant messages
user_assistant_msgs = [user_msg, assistant_msg, user_msg2]

# Get GPT to perform the request
response_calc = chat(system_msg, user_assistant_msgs)

# Display the generated content
display(Markdown(response_calc))

# Create a Ticker object for SIVB
sivb = yf.Ticker("SIVB")

# Get the stock history for SIVB for the period of 1 month
sivb_history = sivb.history(period="1mo")

# Select the Close column and round it to two decimal places
sivb_close = sivb_history[["Close"]].round(2)
# Define a system message
system_msg_sivb = 'You are a financial data expert who writes tersely.'

# Define a user message (including the dataset)
user_msg_sivb = '''The closing prices for the Silicon Valley Bank stock (ticker SIVB) are provided below. Provide Python code to analyze the data including the following metrics:

- The date of the highest closing price.
- The date of the lowest closing price.
- The date with the largest change from the previous closing price.

Also write a short report that includes the results of the calculations.

Here is the dataset:

''' + sivb_close.to_string()

# Get GPT to generate a response
response_sivb = chat(system_msg_sivb, [user_msg_sivb])

# Render the response as Markdown
display(Markdown(response_sivb))