Pablo Andres Alvarado
5 min readMar 28, 2020

--

Portfolio Optimization with Python.

You might already know portfolio optimization by another name, such as ‘optimal asset allocation’ or ‘modern portfolio theory’. But no matter the name, the idea and objective are the same. You want to build your portfolio to yield the maximum possible return while maintaining the amount of risk you’re willing to carry.

This means creating a balanced portfolio, which means you want to spread your investment capital across a variety of assets. Then, you’ll balance those assets in order to attain your desired risk-reward outcome.

Portfolio optimization should result in what investors call an ‘efficient portfolio’. This means it’s generating the highest possible return at your established risk tolerance.

I decided to create a Python code for this purpose, this code perform multiple analytics to the portfolio such as Weight optimization, Risk Parity, Sharpe Ratio, Treynor Ratio, and Sortino Ratio.

Let’s jump into the code!

First, we have to import some packages that we are going to use for this project, principally pandas, bumpy for the analytics and plotly for the graphs.

#let’s 
import pandas as pd
import numpy as np
from pandas_datareader.data import DataReader
import pandas_datareader.data as web
from datetime import date
import plotly.graph_objects as go
import statsmodels.api as sm
from scipy.stats import norm
import plotly.io as pio
import plotly.express as px
import time
from tqdm import tqdm
import winsound
from scipy.optimize import minimize
from plotly.subplots import make_subplots

Now we are going to access the data from yahoo for our tickers. At the same time, we are going to compute individual returns, the average return of the portfolio and the volatility.

TOLERANCE = 1e-10pio.renderers.default = "browser"
def c():
for i in tqdm(range(10)):
time.sleep(3)
c()
#set the function for returns, mean and volatility
tickers=['KLAC','CINF','TGT','PEP','ZTS','AIZ','LMT','SO','MDT','HD']
wts=[0.125,0.119,0.117,0.106,0.1,0.098,0.096,0.094,0.084,0.061]
start_date1=date(2019,1,1)
end_date1=date(2020,1,1)
initial_investment=1000000
def get_stock_quote(tickers):
c()
start=start_date1
end=end_date1
source='yahoo'
price=DataReader(tickers,source,start,end)
df=pd.DataFrame(price)
return df
def get_returns():
c()
adj_close=get_stock_quote(tickers)['Adj Close']
rets=adj_close.pct_change()
rets=rets.dropna()
rets=pd.DataFrame(rets)
return rets
def get_mean():
c()
returns=get_returns()
return returns.mean()
def get_std():
c()
returns=get_returns()
return returns.std()

Then, we are going to obtain the benchmark data and the daily risk-free ratio(T-Bonds). This data is going to help us later to perform the optimization for our portfolio.

#get the data for risk free rate and benchmark — we will use that later
def get_risk_free_rate_daily():
c()
start=start_date1
end=end_date1
source_two=’fred’
code=’DGS10'
rfr=DataReader(code,source_two,start,end)
rfr=rfr.dropna()
rfr=rfr.mean()/100
d_rfr=rfr/252
return np.float64(d_rfr)
#this function is for regression
def get_benchmark():
c()
start=start_date1
end=end_date1
source=’yahoo’
bench=[‘^GSPC’]
adj_close=DataReader(bench,source,start,end)[‘Adj Close’]
bench_returns=adj_close.pct_change()
bench_returns=bench_returns.dropna()
return bench_returns
#this function is for sortino
def get_benchmark_average_daily_return():
c()
rets=get_benchmark()
mean=rets.mean()
return mean

We are going to finalize the portfolio analysis running correlation matrix.

To start the optimization, we are going to start our imulation with 100000 portfolios.

#figure out what is the correlation and covariance of assets
def get_correlation():
c()
returns=get_returns()
correlation=returns.corr()
return correlation
def get_covariance():
c()
returns=get_returns()
covariance=returns.cov()
return covariance
#let the fun begin
def simulate_portfolios():
c()
port_simulations=100000
cov_matrix=get_covariance()
mean_returns=get_mean()
port_returns=[]
port_volatility=[]
stock_weights=[]
sharpe_ratio=[]
d_rfr=get_risk_free_rate_daily()
num_assets=len(tickers)
for single_port in range(port_simulations):
weights=np.random.random(num_assets)
weights/=np.sum(weights)
returns=np.dot(weights,mean_returns)
volatility=np.sqrt(np.dot(weights.T,np.dot(cov_matrix,weights)))
sharpe=float((returns-d_rfr)/volatility)
port_returns.append(returns)
port_volatility.append(volatility)
stock_weights.append(weights)
sharpe_ratio.append(sharpe)
portfolio={‘Returns’:port_returns,
‘Volatility’:port_volatility,
‘Sharpe Ratio’:sharpe_ratio}
for counter,symbol in enumerate(tickers):
portfolio[symbol+’ Weight’]=[Weight[counter] for Weight in stock_weights]
df=pd.DataFrame(portfolio)
return df

Now its time to optimize our Sharpe Ratio from our efficient frontier. At the same time, we are calculating the risk parity of the investment.

def get_best_sharpe_port():
c()
all_portfolios=pd.DataFrame(simulate_portfolios())
max_sharpe_port = all_portfolios.iloc[all_portfolios[‘Sharpe Ratio’].idxmax()]
return max_sharpe_port
#risk parity
TOLERANCE = 1e-10
def _allocation_risk(weights, covariances):
portfolio_risk = np.sqrt((weights * covariances * weights.T))[0, 0]
return portfolio_risk
def _assets_risk_contribution_to_allocation_risk(weights, covariances):
portfolio_risk = _allocation_risk(weights, covariances)
assets_risk_contribution = np.multiply(weights.T, covariances * weights.T) \
/ portfolio_risk
return assets_risk_contribution
def _risk_budget_objective_error(weights, args):
covariances = args[0]
assets_risk_budget = args[1]
weights = np.matrix(weights)
portfolio_risk = _allocation_risk(weights, covariances)
assets_risk_contribution = \
_assets_risk_contribution_to_allocation_risk(weights, covariances)
assets_risk_target = \
np.asmatrix(np.multiply(portfolio_risk, assets_risk_budget))
error = \
sum(np.square(assets_risk_contribution — assets_risk_target.T))[0, 0]
return error
def _get_risk_parity_weights(covariances, assets_risk_budget, initial_weights):
constraints = ({‘type’: ‘eq’, ‘fun’: lambda x: np.sum(x) — 1.0},
{‘type’: ‘ineq’, ‘fun’: lambda x: x})
optimize_result = minimize(fun=_risk_budget_objective_error,
x0=initial_weights,
args=[covariances, assets_risk_budget],
method=’SLSQP’,
constraints=constraints,
tol=TOLERANCE,
options={‘disp’: False})
weights = optimize_result.x
return weights
yahoo_tickers=tickers
def get_weights(yahoo_tickers,
start_date=start_date1,
end_date=end_date1):
prices = pd.DataFrame([web.DataReader(t,
‘yahoo’,
start_date,
end_date).loc[:, ‘Adj Close’]
for t in yahoo_tickers],
index=yahoo_tickers).T.asfreq(‘B’).ffill()
covariances = 52.0 * \
prices.asfreq(‘W-FRI’).pct_change().iloc[1:, :].cov().values
assets_risk_budget = [1 / prices.shape[1]] * prices.shape[1]
init_weights = [1 / prices.shape[1]] * prices.shape[1]
weights = \
_get_risk_parity_weights(covariances, assets_risk_budget, init_weights)
weights = pd.Series(weights, index=prices.columns, name=’weight’)
return weights

Finally, we get the optimize returns and weights for our optimized portfolio. In this part of the code, we are going to calculate Sharpe Ratio, Sortino Ratio with the downside, and Treynor Ratio.

def get_portfolio_returns():
c()
returns=get_returns()
ap=pd.DataFrame(simulate_portfolios())
ap=ap.sort_values(by=’Sharpe Ratio’,ascending=False)
w=ap[:1]
w=w.drop(columns=[‘Returns’,’Volatility’,’Sharpe Ratio’])
w=np.array(w)
weighted_returns=(w*returns)
port_ret=weighted_returns.sum(axis=1)
return port_ret
#just a work with portfolio
def get_mean_portfolio_returns():
c()
mu=get_portfolio_returns().mean()
return mu
def get_std_of_portfolio_returns():
c()
sigma=get_portfolio_returns().std()
return sigma
def get_returns_bp():
c()
ret_data=get_returns()
weighted_returns=(wts*ret_data)
port_ret=weighted_returns.sum(axis=1)
return port_ret
def get_mean_bp_returns():
c()
mu=get_returns_bp().mean()
return mu
def get_std_of_bp():
c()
std=get_returns_bp().std()
return std
def get_sharpe_ratio_for_bp():
c()
d_rfr=get_risk_free_rate_daily()
returns_bp=get_returns_bp()
mean=returns_bp.mean()
std=returns_bp.std()
sharpe_ratio=((mean-d_rfr)/std)
return sharpe_ratio
def get_treynor_ratio_for_bp():
regression_tab=pd.DataFrame(get_benchmark())
regression_tab[‘Portfolio Returns’]=get_returns_bp()
x=regression_tab[‘^GSPC’]
y=regression_tab[‘Portfolio Returns’]
model = sm.OLS(x,y).fit()
beta=model.params[0]
d_risk_free=get_risk_free_rate_daily()
treynor_ratio=(y.mean()-d_risk_free)/beta
return treynor_ratio
def get_sortino_for_bp():
c()
d_rfr=get_risk_free_rate_daily()
tab=pd.DataFrame(get_benchmark())
tab[‘Portfolio Returns’]=get_returns_bp()
tab[‘Portfolio Drawdown Risk’]=np.where(tab[‘Portfolio Returns’]<tab[‘^GSPC’].mean(),
tab[‘Portfolio Returns’],
tab[‘^GSPC’].mean())
downside_risk=tab[‘Portfolio Drawdown Risk’].std()
sortino_ratio=(((tab[‘Portfolio Returns’].mean())-d_rfr)/downside_risk)
return sortino_ratio

For the dashboard, we are going to use the plotly package. I decided to use plotly because I love the 3D graphs!

#initial settings to get the summary. No worries, it is super slow.
sharpe_ratio = round(get_sharpe_ratio(),4)
sharpe_ratio_rp=round(get_sharpe_ratio_rp(),4)
sharpe_ratio_bp=round(get_sharpe_ratio_for_bp(),4)
treynor_ratio=round(get_treynor_ratio(),4)
treynor_ratio_rp=round(get_treynor_for_rp_port(),4)
treynor_ratio_bp=round(get_treynor_ratio_for_bp(),4)
sortino_ratio=round(get_sortino(),4)
sortino_ratio_rp=round(get_sortino_for_rpp(),4)
sortino_ratio_bp=round(get_sortino_for_bp(),4)
var=pd.DataFrame(calculate_value_at_risk_in_15_days())
var_rpp=pd.DataFrame(calculate_value_at_risk_in_15_days_rpp())
var_bp=pd.DataFrame(calculate_var_bp_in_15_days())
eff_front_df=pd.DataFrame(simulate_portfolios())
fig=go.Figure(data=[go.Table(
header=dict(values=[‘Index’,’Sharpe Ratio Portfolio’,’Risk Parity Portfolio’]),
cells=dict(values=[[‘sharpe ratio’,’treynor ratio’,’sortino ratio’],[sharpe_ratio,treynor_ratio,sortino_ratio],[sharpe_ratio_rp,treynor_ratio_rp,sortino_ratio_rp]]))])
#GRAND FINALEEEEE
#define variables for the dashboard
portfolio=pd.DataFrame(get_cumulative_return_of_portfolio())
portfolio[‘Risk Parity Portfolio’]=pd.DataFrame(get_cumulative_return_of_risk_parity())
portfolio[‘Bank Portfolio’]=pd.DataFrame(get_cumulative_return_of_bp())
portfolio[‘Market Returns’]=pd.DataFrame(get_cumulative_return_of_benchmark())
eff_front_df=pd.DataFrame(simulate_portfolios())
#define the main frame (what the dash is going to look like)
fig = make_subplots(
rows=2,cols=2,
specs=[[{‘type’:’scatter’},{‘type’:’table’}],
[{‘type’:’scatter’},{‘type’:’scatter3d’}]],
subplot_titles=(“Cumulative returns for portfolios”,”Summary table”, “Projected value at risk”,”3D Efficient frontier”))
#adding traces to the main frame
fig.add_trace(go.Scatter(x=portfolio.index,y=portfolio[‘cumulative return of portfolio’],
name=’Highest Sharpe Portfolio Cumulative Return’),
row=1,col=1)
fig.add_trace(go.Scatter(x=portfolio.index,y=portfolio[‘Risk Parity Portfolio’],
name=’Risk Parity Portfolio Cumulative Return’),
row=1,col=1)
fig.add_trace(go.Scatter(x=portfolio.index,y=portfolio[‘Market Returns’],
name=’Market Cumulative Return’),
row=1,col=1)
fig.add_trace(go.Scatter(x=portfolio.index,y=portfolio[‘Bank Portfolio’],
name=’Bank Portfolio Cumulative Return’),
row=1,col=1)
fig.add_trace(go.Table(
header=dict(values=[‘Index’,’Sharpe Ratio Portfolio’,’Risk Parity Portfolio’,’Bank Portfolio’]),
cells=dict(values=[[‘sharpe ratio’,’treynor ratio’,’sortino ratio’],
[sharpe_ratio,treynor_ratio,sortino_ratio],
[sharpe_ratio_rp,treynor_ratio_rp,sortino_ratio_rp],
[sharpe_ratio_bp,treynor_ratio_bp,sortino_ratio_bp]])),
row=1,col=2)
fig.add_trace(go.Scatter3d(x=eff_front_df[‘Volatility’],y=eff_front_df[‘Returns’],z=eff_front_df[‘Sharpe Ratio’],
name=’3D Efficient Frontier’,mode=’markers’),
row=2,col=2)
fig.add_trace(go.Scatter(x=var[‘day’],y=var[‘value’],
name=’Value at risk 15 days projections for best sharpe ratio portfolio’),
row=2,col=1)
fig.add_trace(go.Scatter(x=var_rpp[‘day’],y=var[‘value’],
name=’Value at risk 15 days profections for risk parity portfolio’),
row=2,col=1)
fig.add_trace(go.Scatter(x=var_bp[‘day’],y=var[‘value’],
name=’Value at risk 15 days projections for bank portfolio’),
row=2,col=1)
#make the dash look better
fig.update_xaxes(title_text=’Date’,row=1,col=1)
fig.update_yaxes(title_text=’Cumulative Return, %’,row=1,col=1)
fig.update_xaxes(title_text=’Day projected’,row=2,col=1)
fig.update_yaxes(title_text=’Value at risk,$’,row=2,col=1)
fig.update_xaxes(title_text=’Risk’,row=2,col=2)
fig.update_yaxes(title_text=’Return’,row=2,col=2)
fig.update_layout(width = 1500, height= 1000, title_text=”Portfolio Summary”)
fig.show()
#it takes long to execute, so I made a sound to signal execution
duration = 1000 # milliseconds
freq = 440 # Hz
winsound.Beep(freq, duration)
#that’s it.

You wanna test your portfolio? Optimize it with my robo. You can find the whole code in my GitHub repository.

--

--

Pablo Andres Alvarado

Every day must be payday I Fintech I Business Development