Wednesday, April 17, 2024

Module 1 : Introduction and Data Sources

 

Stock Markets Analytics Zoomcamp

Reference : 

1. Youtube : 

2. Slides :

Module01_Colab_Introduction_and_Data_SourcesL.ipynb

1
2
# install main library YFinance
!pip install yfinance
1
2
3
4
5
6
7
8
9
10
11
12
13
14
# IMPORTS
import numpy as np
import pandas as pd
 
#Fin Data Sources
import yfinance as yf
import pandas_datareader as pdr
 
#Data viz
import plotly.graph_objs as go
import plotly.express as px
 
import time
from datetime import date
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import pandas as pd
 
# Read dataset
df = pd.read_csv("GDPC1.csv")
 
# Assuming your dataframe is named df
# Convert DATE column to datetime if it's not already in datetime format
df['DATE'] = pd.to_datetime(df['DATE'])
 
# Shift GDPC1 column by 4 rows to get value from one year ago
df['GDPC1_one_year_ago'] = df['GDPC1'].shift(4)
 
# Calculate YoY growth rate
df['YoY_growth'] = df['GDPC1'] / df['GDPC1_one_year_ago'] - 1
 
# Filter dataframe to include only rows from 2023
df_2023 = df[df['DATE'].dt.year == 2023]
 
# Calculate average YoY growth rate for 2023
average_YoY_growth_2023 = df_2023['YoY_growth'].mean()
 
# Round to 1 decimal point
average_YoY_growth_2023_rounded = round(average_YoY_growth_2023 * 100, 1)
 
print("Average YoY growth rate in 2023: {}%".format(average_YoY_growth_2023_rounded))
Average YoY growth rate in 2023: 2.5%

1) Understanding Data-Driven Decisions data pulls

1
2
3
4
5
end = date.today()
print(f'Year = {end.year}; month= {end.month}; day={end.day}')
 
start = date(year=end.year-70, month=end.month, day=end.day)
print(f'Period for indexes: {start} to {end} ')
Year = 2024; month= 4; day=17
Period for indexes: 1954-04-17 to 2024-04-17 
1.1) GDP
1
2
3
4
5
6
7
# Real Potential Gross Domestic Product (GDPPOT), Billions of Chained 2012 Dollars, QUARTERLY
gdppot = pdr.DataReader("GDPPOT", "fred", start=start)
 
gdppot['gdppot_us_yoy'] = gdppot.GDPPOT/gdppot.GDPPOT.shift(4)-1
gdppot['gdppot_us_qoq'] = gdppot.GDPPOT/gdppot.GDPPOT.shift(4)-1
gdppot.tail()
GDPPOTgdppot_us_yoygdppot_us_qoq
DATE
2023-04-0122246.230.0213120.021312
2023-07-0122369.710.0217190.021719
2023-10-0122495.190.0221900.022190
2024-01-0122618.150.0222510.022251
2024-04-0122739.000.0221510.022151
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
# Visuals GDPPOT
 
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
 
fig, ax = plt.subplots(figsize=(20, 6))
plt.grid(True)
 
# Plotting area under US potential GDP curve
ax.fill_between(gdppot.index, gdppot.GDPPOT, color="red", alpha=0.3, label="US Potential GDP")
 
# Creating a secondary y-axis for GDP growth percentage
ax2 = ax.twinx()
ax2.yaxis.set_major_formatter(mtick.PercentFormatter(1.0))
ax2.plot(gdppot.gdppot_us_yoy, color="blue", marker="o", label="US Potential GDP Growth, % Y/Y")
 
# Setting labels and title
ax.set_xlabel("Date", fontsize=14)
ax.set_ylabel("US Potential GDP, $b", color="red", fontsize=14)
ax2.set_ylabel("US Potential GDP Growth, % Y/Y", color="blue", fontsize=14)
 
# Adding legend
lines, labels = ax.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax2.legend(lines + lines2, labels + labels2, loc='upper left')
 
plt.show()
1.2) INFLATION – CPI CORE
1
2
3
4
5
6
7
8
9
10
11
# # "Core CPI index", MONTHLY
# The "Consumer Price Index for All Urban Consumers: All Items Less Food & Energy"
# is an aggregate of prices paid by urban consumers for a typical basket of goods, excluding food and energy.
# This measurement, known as "Core CPI," is widely used by economists because food and energy have very volatile prices.
cpilfesl = pdr.DataReader("CPILFESL", "fred", start=start)
 
cpilfesl['cpi_core_yoy'] = cpilfesl.CPILFESL/cpilfesl.CPILFESL.shift(12)-1
cpilfesl['cpi_core_mom'] = cpilfesl.CPILFESL/cpilfesl.CPILFESL.shift(1)-1
 
cpilfesl.tail(13)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
fig, ax = plt.subplots(figsize=(20, 6))
plt.grid(True)
 
# Plotting area under CPI
ax.fill_between(cpilfesl.index, cpilfesl.CPILFESL, color="red", alpha=0.3, label="Core CPI index (monthly)")
 
# Creating a secondary y-axis for CPI growth percentage
ax2 = ax.twinx()
ax2.yaxis.set_major_formatter(mtick.PercentFormatter(1.0))
ax2.plot(cpilfesl.cpi_core_yoy, color="blue", marker="o", label="Core CPI index (monthly) Growth, % Y/Y")
 
# Setting labels and title
ax.set_xlabel("Date", fontsize=14)
ax.set_ylabel("Core CPI index (monthly)", color="red", fontsize=14)
ax2.set_ylabel("Core CPI index Growth, % Y/Y", color="blue", fontsize=14)
 
# Adding legend
lines, labels = ax.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax2.legend(lines + lines2, labels + labels2, loc='upper left')
 
plt.show()
1.3 INTEREST RATES
1
2
3
fedfunds = pdr.DataReader("FEDFUNDS", "fred", start=start)
fedfunds.tail()
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
# Fed Funds
fig, ax = plt.subplots(figsize=(20, 6))
plt.grid(True)
 
# Plotting area under US potential GDP curve
# ax.fill_between(fedfunds.index, fedfunds.FEDFUNDS, color="red", alpha=0.3, label="Core CPI index (monthly)")
 
# # Creating a secondary y-axis for GDP growth percentage
# ax2 = ax.twinx()
ax.yaxis.set_major_formatter(mtick.PercentFormatter(1.0))
ax.plot(fedfunds.index, fedfunds.FEDFUNDS/100, marker="o", label="Fed Funds Rate")
 
# Setting labels and title
ax.set_xlabel("Date", fontsize=14)
ax.set_ylabel("Fed Funds Rate", color="blue", fontsize=14)
 
# Adding legend
lines, labels = ax.get_legend_handles_labels()
ax.legend(lines, labels, loc='upper left')
 
plt.show()
1
2
3
dgs1 = pdr.DataReader("DGS1", "fred", start=start)
dgs1.tail()

Other rates for US Treasury: https://fred.stlouisfed.org/categories/115

1
2
3
dgs5 = pdr.DataReader("DGS5", "fred", start=start)
dgs5.tail()
1.4 SNP500
1
2
3
4
5
# Other indexes: https://stooq.com/t/
 
# SPX= S&P500
spx_index = pdr.get_data_stooq('^SPX', start, end)
spx_index.head()
1
2
3
4
5
# 252 trading days a year
  # REVERSE ORDER OF THE DATA!!!
spx_index['spx_dod'] = (spx_index.Close/spx_index.Close.shift(-1)-1)
spx_index['spx_qoq'] = (spx_index.Close/spx_index.Close.shift(-63)-1)
spx_index['spx_yoy'] = (spx_index.Close/spx_index.Close.shift(-252)-1)
1
spx_index.head()
1
spx_truncated = spx_index[spx_index.index>='1990-01-01']
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
# S&P500 abs. vs. relative growth
fig, ax = plt.subplots(figsize=(20, 6))
plt.grid(True)
 
# Plotting area under CPI
ax.fill_between(spx_truncated.index, spx_truncated.Close, color="red", alpha=0.3, label="S&P 500 Absolute Value (Close price)")
 
# Creating a secondary y-axis for CPI growth percentage
ax2 = ax.twinx()
ax2.yaxis.set_major_formatter(mtick.PercentFormatter(1.0))
ax2.plot(spx_truncated.spx_yoy,
         color="blue",
        #  marker=".",
         label="Year-over-Year Growth (%)")
 
# Setting labels and title
ax.set_xlabel("Date", fontsize=14)
ax.set_ylabel("S&P 500 Absolute Value (Close price", color="red", fontsize=14)
ax2.set_ylabel("Year-over-Year Growth (%)", color="blue", fontsize=14)
 
# Adding legend
lines, labels = ax.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax2.legend(lines + lines2, labels + labels2, loc='upper left')
 
plt.show()

2) Data Sources for Stocks

2.1 OHLCV DATA DAILY – INDEXES
1
2
3
4
5
6
# INDEXES from Yahoo Finance
# DAX index (XETRA - XETRA Delayed Price. Currency in EUR)
dax_daily = yf.download(tickers = "^GDAXI",
                     period = "max",
                     interval = "1d")
1
dax_daily.tail()
1
2
3
# normally 252 trading days
dax_daily['adj_close_last_year'] = dax_daily['Adj Close'].shift(252)
dax_daily['yoy_growth'] = dax_daily['Adj Close'] / dax_daily['adj_close_last_year'] -1
1
dax_daily
1
dax_daily['Adj Close'].plot.line()
1
2
3
4
5
# S&P 500 INDEX : Chicago Options - Chicago Options Delayed Price. Currency in USD
snp500_daily = yf.download(tickers = "^SPX",
                     period = "max",
                     interval = "1d")
1
snp500_daily.tail()
1
2
3
4
5
# SNP - SNP Real Time Price. Currency in USD
snp500_daily_non_delayed = yf.download(tickers = "^GSPC",
                     period = "max",
                     interval = "1d")
1
snp500_daily_non_delayed.tail()
1
2
3
4
dji_daily = yf.download(tickers = "^DJI",
                     period = "max",
                     interval = "1d")
2.2 OHLCV DATA DAILY – ETFS
1
2
3
4
voo_etf = yf.download(tickers = "VOO",
                     period = "max",
                     interval = "1d")
1
voo_etf.tail()
1
2
3
4
5
6
7
# ETFs
# WisdomTree India Earnings Fund (EPI)
# NYSEArca - Nasdaq Real Time Price. Currency in USD
epi_etf_daily = yf.download(tickers = "EPI",
                     period = "max",
                     interval = "1d")
1
2
epi_etf_daily.head()
print(epi_etf_daily.shape)
(4063, 6)
1
2
# find dividends impact on Close vs. Adj.Close
epi_etf_daily[(epi_etf_daily.index >='2023-06-23') & (epi_etf_daily.index <='2023-06-28')]
1
2
3
4
# find dividends - diff for Close vs. Adj Close
# Open/Close for 06-25 diff is close to divs = 1.845 (~1.58 for Open and 1.3 for Close)
# HELP: https://help.yahoo.com/kb/SLN28256.html#:~:text=Adjusted%20close%20is%20the%20closing,Security%20Prices%20(CRSP)%20standards.
epi_etf_daily[(epi_etf_daily.index >='2022-06-23') & (epi_etf_daily.index <='2022-06-28')]
1
2
3
 
epi_etf_daily['Close'].plot(title="EPI's etf stock price")
1
2
3
# get actions, incl. dividends - as a dataFrame
epi = yf.Ticker('EPI')
epi.get_actions()
DividendsStock SplitsCapital Gains
Date
2008-12-22 00:00:00-05:000.0910.00.0
2009-03-23 00:00:00-04:000.0070.00.0
2009-06-22 00:00:00-04:000.0020.00.0
2009-09-21 00:00:00-04:000.0450.00.0
2009-12-21 00:00:00-05:000.0060.00.0
2010-06-28 00:00:00-04:000.0650.00.0
2010-09-20 00:00:00-04:000.0650.00.0
2010-12-22 00:00:00-05:000.0130.00.0
2011-06-22 00:00:00-04:000.0620.00.0
2011-09-26 00:00:00-04:000.0980.00.0
2012-03-26 00:00:00-04:000.0100.00.0
2012-06-25 00:00:00-04:000.0450.00.0
2012-09-24 00:00:00-04:000.0830.00.0
2012-12-24 00:00:00-05:000.0240.00.0
2013-06-24 00:00:00-04:000.0680.00.0
2013-09-23 00:00:00-04:000.0630.00.0
2014-03-24 00:00:00-04:000.0990.00.0
2014-06-23 00:00:00-04:000.0590.00.0
2014-09-22 00:00:00-04:000.0680.00.0
2015-03-23 00:00:00-04:000.0310.00.0
2015-06-22 00:00:00-04:000.0670.00.0
2015-09-21 00:00:00-04:000.1160.00.0
2015-12-21 00:00:00-05:000.0240.00.0
2016-03-21 00:00:00-04:000.0100.00.0
2016-06-20 00:00:00-04:000.1250.00.0
2016-09-26 00:00:00-04:000.0700.00.0
2016-12-23 00:00:00-05:000.0060.00.0
2017-03-27 00:00:00-04:000.0750.00.0
2017-06-26 00:00:00-04:000.0550.00.0
2017-09-26 00:00:00-04:000.0800.00.0
2017-12-26 00:00:00-05:000.0260.00.0
2018-03-20 00:00:00-04:000.0610.00.0
2018-06-25 00:00:00-04:000.0850.00.0
2018-09-25 00:00:00-04:000.1110.00.0
2018-12-24 00:00:00-05:000.0080.00.0
2018-12-28 00:00:00-05:000.0280.00.0
2019-03-26 00:00:00-04:000.1200.00.0
2019-09-24 00:00:00-04:000.1300.00.0
2019-12-23 00:00:00-05:000.0410.00.0
2020-03-24 00:00:00-04:000.0170.00.0
2020-06-23 00:00:00-04:000.0940.00.0
2020-09-22 00:00:00-04:000.0350.00.0
2020-12-21 00:00:00-05:000.0810.00.0
2021-03-25 00:00:00-04:000.0430.00.0
2021-06-24 00:00:00-04:000.0850.00.0
2021-09-24 00:00:00-04:000.1100.00.0
2021-12-27 00:00:00-05:000.1920.00.0
2022-03-25 00:00:00-04:000.1150.00.0
2022-06-24 00:00:00-04:001.8450.00.0
2023-06-26 00:00:00-04:000.0600.00.0
1
2
# get dividends as Series
epi.get_dividends()
Date
2008-12-22 00:00:00-05:00    0.091
2009-03-23 00:00:00-04:00    0.007
2009-06-22 00:00:00-04:00    0.002
2009-09-21 00:00:00-04:00    0.045
2009-12-21 00:00:00-05:00    0.006
2010-06-28 00:00:00-04:00    0.065
2010-09-20 00:00:00-04:00    0.065
2010-12-22 00:00:00-05:00    0.013
2011-06-22 00:00:00-04:00    0.062
2011-09-26 00:00:00-04:00    0.098
2012-03-26 00:00:00-04:00    0.010
2012-06-25 00:00:00-04:00    0.045
2012-09-24 00:00:00-04:00    0.083
2012-12-24 00:00:00-05:00    0.024
2013-06-24 00:00:00-04:00    0.068
2013-09-23 00:00:00-04:00    0.063
2014-03-24 00:00:00-04:00    0.099
2014-06-23 00:00:00-04:00    0.059
2014-09-22 00:00:00-04:00    0.068
2015-03-23 00:00:00-04:00    0.031
2015-06-22 00:00:00-04:00    0.067
2015-09-21 00:00:00-04:00    0.116
2015-12-21 00:00:00-05:00    0.024
2016-03-21 00:00:00-04:00    0.010
2016-06-20 00:00:00-04:00    0.125
2016-09-26 00:00:00-04:00    0.070
2016-12-23 00:00:00-05:00    0.006
2017-03-27 00:00:00-04:00    0.075
2017-06-26 00:00:00-04:00    0.055
2017-09-26 00:00:00-04:00    0.080
2017-12-26 00:00:00-05:00    0.026
2018-03-20 00:00:00-04:00    0.061
2018-06-25 00:00:00-04:00    0.085
2018-09-25 00:00:00-04:00    0.111
2018-12-24 00:00:00-05:00    0.008
2018-12-28 00:00:00-05:00    0.028
2019-03-26 00:00:00-04:00    0.120
2019-09-24 00:00:00-04:00    0.130
2019-12-23 00:00:00-05:00    0.041
2020-03-24 00:00:00-04:00    0.017
2020-06-23 00:00:00-04:00    0.094
2020-09-22 00:00:00-04:00    0.035
2020-12-21 00:00:00-05:00    0.081
2021-03-25 00:00:00-04:00    0.043
2021-06-24 00:00:00-04:00    0.085
2021-09-24 00:00:00-04:00    0.110
2021-12-27 00:00:00-05:00    0.192
2022-03-25 00:00:00-04:00    0.115
2022-06-24 00:00:00-04:00    1.845
2023-06-26 00:00:00-04:00    0.060
Name: Dividends, dtype: float64
2.3 PAID DATA – POLIGON.IO (NEWS ENDPOINT)
2.4 MACROECONOMICS
  • add some indicator examples
1
2
3
4
5
6
# Gold reserves excl. gold for China
 
gold_reserves = pdr.DataReader("TRESEGCNM052N", "fred", start=start)
 
gold_reserves.TRESEGCNM052N.plot.line()
1
2
3
4
5
#  CBOE Gold ETF Volatility Index (GVZCLS)
gold_volatility = pdr.DataReader("GVZCLS", "fred", start=start)
 
gold_volatility.GVZCLS.plot.line()
1
2
3
4
5
6
#  Crude Oil Prices: West Texas Intermediate (WTI) - Cushing, Oklahoma (DCOILWTICO)
oil_wti = pdr.DataReader("DCOILWTICO", "fred", start=start)
 
# there is a bug in the data? negative price?
oil_wti.DCOILWTICO.plot.line()
1
2
3
4
5
# Crude Oil Prices: Brent - Europe (DCOILBRENTEU)
oil_brent = pdr.DataReader("DCOILBRENTEU", "fred", start=start)
 
oil_brent.DCOILBRENTEU.plot.line()
1
2
3
4
5
6
7
8
9
10
11
12
13
# Web Scraping for Macro
# can't call directly via pd.read_html() as it returns 403 (forbidden) --> need to do a bit of work, but still no Selenium
import requests
from bs4 import BeautifulSoup
 
 
headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
}
 
response = requests.get(url, headers=headers)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
# Check if the request was successful (status code 200)
if response.status_code == 200:
    # Parse the HTML content of the webpage
    soup = BeautifulSoup(response.content, "html.parser")
 
    # You need to be able to find this table tag and read all behind it
    # Find the div with class "table-responsive"
    table_div = soup.find("div", class_="table-responsive")
 
    # Extract the table within the div
    table = table_div.find("table")
 
    # Use pandas to read the table into a DataFrame
    df = pd.read_html(str(table))[0]  # Assuming there's only one table, otherwise, loop through the list
 
    # Display the DataFrame
    print(df)
else:
    print("Failed to retrieve data from the webpage.")
          Unnamed: 0    Last  Previous  Highest    Lowest  \
0                   Currency   106.0     106.0   165.00     70.70   
1               Stock Market  5067.0    5051.0  5268.00      4.40   
2            GDP Growth Rate     3.4       4.9    34.80    -28.00   
3     GDP Annual Growth Rate     3.1       2.9    13.40     -7.50   
4          Unemployment Rate     3.8       3.9    14.90      2.50   
5          Non Farm Payrolls   303.0     270.0  4615.00 -20477.00   
6             Inflation Rate     3.5       3.2    23.70    -15.80   
7         Inflation Rate MoM     0.4       0.4     2.00     -1.80   
8              Interest Rate     5.5       5.5    20.00      0.25   
9           Balance of Trade   -68.9     -67.6     1.95   -103.00   
10           Current Account  -195.0    -196.0     9.96   -284.00   
11    Current Account to GDP    -3.0      -3.8     0.20     -6.00   
12    Government Debt to GDP   129.0     127.0   129.00     31.80   
13         Government Budget    -5.8     -12.4     4.50    -15.00   
14       Business Confidence    50.3      47.8    77.50     29.40   
15         Manufacturing PMI    51.9      52.2    63.40     36.10   
16     Non Manufacturing PMI    51.4      52.6    67.60     37.80   
17              Services PMI    51.7      52.3    70.40     26.70   
18       Consumer Confidence    77.9      79.4   111.00     50.00   
19          Retail Sales MoM     0.7       0.9    19.00    -14.60   
20          Building Permits  1458.0    1523.0  2419.00    513.00   
21        Corporate Tax Rate    21.0      21.0    52.80      1.00   
22  Personal Income Tax Rate    37.0      37.0    39.60     35.00   

        Unnamed: 5 Unnamed: 6  
0              NaN     Apr/24  
1           points     Apr/24  
2          percent     Dec/23  
3          percent     Dec/23  
4          percent     Mar/24  
5         Thousand     Mar/24  
6          percent     Mar/24  
7          percent     Mar/24  
8          percent     Mar/24  
9      USD Billion     Feb/24  
10     USD Billion     Dec/23  
11  percent of GDP     Dec/23  
12  percent of GDP     Dec/22  
13  percent of GDP     Dec/22  
14          points     Mar/24  
15          points     Mar/24  
16          points     Mar/24  
17          points     Mar/24  
18          points     Apr/24  
19         percent     Mar/24  
20        Thousand     Mar/24  
21         percent     Dec/24  
22         percent     Dec/23  
1
 
2.5) FINANCIAL REPORTING – EDGAR (IN YAHOO)
1
2
# let's check for NVDA
nvda =  yf.Ticker('NVDA')
1
2
# yearly financials for the last 4 years
nvda.financials
1
2
# balance sheet
nvda.balance_sheet
1
2
# Basic info:
nvda.basic_info
lazy-loading dict with keys = ['currency', 'dayHigh', 'dayLow', 'exchange', 'fiftyDayAverage', 'lastPrice', 'lastVolume', 'marketCap', 'open', 'previousClose', 'quoteType', 'regularMarketPreviousClose', 'shares', 'tenDayAverageVolume', 'threeMonthAverageVolume', 'timezone', 'twoHundredDayAverage', 'yearChange', 'yearHigh', 'yearLow']
1
2
# marketCap is quite useful, but don't know when it was updated? Daily?
nvda.basic_info['marketCap']/1e9
2185.3750610351562
# read this article for full info: https://zoo.cs.yale.edu/classes/cs458/lectures/yfinance.html
2.6 WEB SCRAPING – COMPANY INFO FOR CLUSTERING
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
# ask chatGPT: emulate clicking the link and downloading the content
import requests
from bs4 import BeautifulSoup
 
# URL of the webpage
 
# Define headers with a user-agent to mimic a web browser
headers = {
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
}
 
# Send a GET request to the URL with headers
response = requests.get(url, headers=headers)
 
# Check if the request was successful (status code 200)
if response.status_code == 200:
    # Parse the HTML content of the webpage
    soup = BeautifulSoup(response.content, "html.parser")
 
    # Find the download link within the webpage
    download_link = soup.find("a", {"rel": "nofollow", "href": "?download=csv"})
 
    # If the download link is found
    if download_link:
        # Extract the href attribute which contains the actual download link
        download_url = 'https://companiesmarketcap.com/'+download_link["href"]
 
        # Download the CSV file using the obtained download URL
        download_response = requests.get(download_url, headers=headers)
 
        # Check if the download request was successful
        if download_response.status_code == 200:
            # Save the content of the response to a local file
            with open("global_stocks.csv", "wb") as f:
                f.write(download_response.content)
            print("CSV file downloaded successfully.")
        else:
            print("Failed to download the CSV file.")
    else:
        print("Download link not found on the webpage.")
else:
    print("Failed to retrieve data from the webpage.")
1
2
3
global_stocks = pd.read_csv("global_stocks.csv")
 
global_stocks.head()
1
global_stocks.info()

No comments:

Post a Comment

MLOps Zoomcamp 2024 - Module 2

Module 2 - Experiment-Tracking Source https://github.com/DataTalksClub/mlops-zoomcamp/tree/main/02-experiment-tracking Homework Q1. Install...