mirror of
https://github.com/bspeice/bspeice.github.io
synced 2024-12-13 10:08:10 -05:00
First static commit
This commit is contained in:
parent
fa5e72823e
commit
e78a2c1bf9
124
Makefile
124
Makefile
@ -1,124 +0,0 @@
|
||||
PY?=python
|
||||
PELICAN?=pelican
|
||||
PELICANOPTS=
|
||||
|
||||
BASEDIR=$(CURDIR)
|
||||
INPUTDIR=$(BASEDIR)/content
|
||||
OUTPUTDIR=$(BASEDIR)/output
|
||||
CONFFILE=$(BASEDIR)/pelicanconf.py
|
||||
PUBLISHCONF=$(BASEDIR)/publishconf.py
|
||||
|
||||
FTP_HOST=localhost
|
||||
FTP_USER=anonymous
|
||||
FTP_TARGET_DIR=/
|
||||
|
||||
SSH_HOST=localhost
|
||||
SSH_PORT=22
|
||||
SSH_USER=root
|
||||
SSH_TARGET_DIR=/var/www
|
||||
|
||||
S3_BUCKET=my_s3_bucket
|
||||
|
||||
CLOUDFILES_USERNAME=my_rackspace_username
|
||||
CLOUDFILES_API_KEY=my_rackspace_api_key
|
||||
CLOUDFILES_CONTAINER=my_cloudfiles_container
|
||||
|
||||
DROPBOX_DIR=~/Dropbox/Public/
|
||||
|
||||
GITHUB_PAGES_BRANCH=master
|
||||
|
||||
DEBUG ?= 0
|
||||
ifeq ($(DEBUG), 1)
|
||||
PELICANOPTS += -D
|
||||
endif
|
||||
|
||||
RELATIVE ?= 0
|
||||
ifeq ($(RELATIVE), 1)
|
||||
PELICANOPTS += --relative-urls
|
||||
endif
|
||||
|
||||
help:
|
||||
@echo 'Makefile for a pelican Web site '
|
||||
@echo ' '
|
||||
@echo 'Usage: '
|
||||
@echo ' make html (re)generate the web site '
|
||||
@echo ' make clean remove the generated files '
|
||||
@echo ' make regenerate regenerate files upon modification '
|
||||
@echo ' make publish generate using production settings '
|
||||
@echo ' make serve [PORT=8000] serve site at http://localhost:8000'
|
||||
@echo ' make serve-global [SERVER=0.0.0.0] serve (as root) to $(SERVER):80 '
|
||||
@echo ' make devserver [PORT=8000] start/restart develop_server.sh '
|
||||
@echo ' make stopserver stop local server '
|
||||
@echo ' make ssh_upload upload the web site via SSH '
|
||||
@echo ' make rsync_upload upload the web site via rsync+ssh '
|
||||
@echo ' make dropbox_upload upload the web site via Dropbox '
|
||||
@echo ' make ftp_upload upload the web site via FTP '
|
||||
@echo ' make s3_upload upload the web site via S3 '
|
||||
@echo ' make cf_upload upload the web site via Cloud Files'
|
||||
@echo ' make github upload the web site via gh-pages '
|
||||
@echo ' '
|
||||
@echo 'Set the DEBUG variable to 1 to enable debugging, e.g. make DEBUG=1 html '
|
||||
@echo 'Set the RELATIVE variable to 1 to enable relative urls '
|
||||
@echo ' '
|
||||
|
||||
html:
|
||||
$(PELICAN) $(INPUTDIR) -o $(OUTPUTDIR) -s $(CONFFILE) $(PELICANOPTS)
|
||||
|
||||
clean:
|
||||
[ ! -d $(OUTPUTDIR) ] || rm -rf $(OUTPUTDIR)
|
||||
|
||||
regenerate:
|
||||
$(PELICAN) -r $(INPUTDIR) -o $(OUTPUTDIR) -s $(CONFFILE) $(PELICANOPTS)
|
||||
|
||||
serve:
|
||||
ifdef PORT
|
||||
cd $(OUTPUTDIR) && $(PY) -m pelican.server $(PORT)
|
||||
else
|
||||
cd $(OUTPUTDIR) && $(PY) -m pelican.server
|
||||
endif
|
||||
|
||||
serve-global:
|
||||
ifdef SERVER
|
||||
cd $(OUTPUTDIR) && $(PY) -m pelican.server 80 $(SERVER)
|
||||
else
|
||||
cd $(OUTPUTDIR) && $(PY) -m pelican.server 80 0.0.0.0
|
||||
endif
|
||||
|
||||
|
||||
devserver:
|
||||
ifdef PORT
|
||||
$(BASEDIR)/develop_server.sh restart $(PORT)
|
||||
else
|
||||
$(BASEDIR)/develop_server.sh restart
|
||||
endif
|
||||
|
||||
stopserver:
|
||||
$(BASEDIR)/develop_server.sh stop
|
||||
@echo 'Stopped Pelican and SimpleHTTPServer processes running in background.'
|
||||
|
||||
publish:
|
||||
$(PELICAN) $(INPUTDIR) -o $(OUTPUTDIR) -s $(PUBLISHCONF) $(PELICANOPTS)
|
||||
|
||||
ssh_upload: publish
|
||||
scp -P $(SSH_PORT) -r $(OUTPUTDIR)/* $(SSH_USER)@$(SSH_HOST):$(SSH_TARGET_DIR)
|
||||
|
||||
rsync_upload: publish
|
||||
rsync -e "ssh -p $(SSH_PORT)" -P -rvzc --delete $(OUTPUTDIR)/ $(SSH_USER)@$(SSH_HOST):$(SSH_TARGET_DIR) --cvs-exclude
|
||||
|
||||
dropbox_upload: publish
|
||||
cp -r $(OUTPUTDIR)/* $(DROPBOX_DIR)
|
||||
|
||||
ftp_upload: publish
|
||||
lftp ftp://$(FTP_USER)@$(FTP_HOST) -e "mirror -R $(OUTPUTDIR) $(FTP_TARGET_DIR) ; quit"
|
||||
|
||||
s3_upload: publish
|
||||
s3cmd sync $(OUTPUTDIR)/ s3://$(S3_BUCKET) --acl-public --delete-removed --guess-mime-type
|
||||
|
||||
cf_upload: publish
|
||||
cd $(OUTPUTDIR) && swift -v -A https://auth.api.rackspacecloud.com/v1.0 -U $(CLOUDFILES_USERNAME) -K $(CLOUDFILES_API_KEY) upload -c $(CLOUDFILES_CONTAINER) .
|
||||
|
||||
github: publish
|
||||
ghp-import -m "Generate Pelican site" -b $(GITHUB_PAGES_BRANCH) $(OUTPUTDIR)
|
||||
git push origin $(GITHUB_PAGES_BRANCH)
|
||||
|
||||
.PHONY: html help clean regenerate serve serve-global devserver publish ssh_upload rsync_upload dropbox_upload ftp_upload s3_upload cf_upload github
|
199
_nb_header.html
199
_nb_header.html
File diff suppressed because one or more lines are too long
122
archives.html
Normal file
122
archives.html
Normal file
@ -0,0 +1,122 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content="">
|
||||
<meta name="keywords" content="">
|
||||
<link rel="icon" href="/favicon.ico">
|
||||
|
||||
<title> - Bradlee Speice</title>
|
||||
|
||||
<!-- Stylesheets -->
|
||||
<link href="/theme/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="/theme/css/fonts.css" rel="stylesheet">
|
||||
<link href="/theme/css/nest.css" rel="stylesheet">
|
||||
<link href="/theme/css/pygment.css" rel="stylesheet">
|
||||
<!-- /Stylesheets -->
|
||||
|
||||
<!-- RSS Feeds -->
|
||||
<!-- /RSS Feeds -->
|
||||
|
||||
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
|
||||
<!--[if lt IE 9]>
|
||||
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
|
||||
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
|
||||
<![endif]-->
|
||||
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<!-- Header -->
|
||||
<div class="header-container gradient">
|
||||
|
||||
<!-- Static navbar -->
|
||||
<div class="container">
|
||||
<div class="header-nav">
|
||||
<div class="header-logo">
|
||||
<a class="pull-left" href="/"><img class="mr20" src="images/logo.svg" alt="logo">Bradlee Speice</a>
|
||||
</div>
|
||||
<div class="nav pull-right">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Static navbar -->
|
||||
|
||||
<!-- Header -->
|
||||
<div class="container header-wrapper">
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div class="header-content">
|
||||
<h1 class="header-title text-uppercase"></h1>
|
||||
<div class="header-underline"></div>
|
||||
<p class="header-subtitle header-subtitle-homepage"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
|
||||
<!-- Content -->
|
||||
<div class="archive-container">
|
||||
<div class="container content archive">
|
||||
<h2><a href="/archives.html"></a></h2>
|
||||
<dl class="dl-horizontal">
|
||||
<dt>Fri 26 February 2016</dt>
|
||||
<dd><a href="/profitability-using-the-investment-formula.html">Profitability using the Investment Formula</a></dd>
|
||||
<dt>Wed 03 February 2016</dt>
|
||||
<dd><a href="/guaranteed-money-maker.html">Guaranteed Money Maker</a></dd>
|
||||
<dt>Sat 23 January 2016</dt>
|
||||
<dd><a href="/cloudy-in-seattle.html">Cloudy in Seattle</a></dd>
|
||||
<dt>Fri 01 January 2016</dt>
|
||||
<dd><a href="/complaining-about-the-weather.html">Complaining about the Weather</a></dd>
|
||||
<dt>Sat 26 December 2015</dt>
|
||||
<dd><a href="/testing-cramer.html">Testing Cramer</a></dd>
|
||||
<dt>Fri 27 November 2015</dt>
|
||||
<dd><a href="/autocallable-bonds.html">Autocallable Bonds</a></dd>
|
||||
<dt>Thu 19 November 2015</dt>
|
||||
<dd><a href="/welcome-and-an-algorithm.html">Welcome, and an algorithm</a></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Content -->
|
||||
|
||||
<!-- Footer -->
|
||||
<div class="footer gradient-2">
|
||||
<div class="container footer-container ">
|
||||
<div class="row">
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
<li><a href="https://github.com/bspeice" target="_blank">Github</a></li>
|
||||
<li><a href="https://www.linkedin.com/in/bradleespeice" target="_blank">LinkedIn</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
</div>
|
||||
<div class="col-xs-12 col-sm-3 col-md-3 col-lg-3">
|
||||
<p class="pull-right text-right">
|
||||
<small><em>Proudly powered by <a href="http://docs.getpelican.com/" target="_blank">pelican</a></em></small><br/>
|
||||
<small><em>Theme and code by <a href="https://github.com/molivier" target="_blank">molivier</a></em></small><br/>
|
||||
<small></small>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Footer -->
|
||||
</body>
|
||||
</html>
|
122
author/bradlee-speice.html
Normal file
122
author/bradlee-speice.html
Normal file
@ -0,0 +1,122 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content=" Bradlee Speice">
|
||||
<meta name="keywords" content="">
|
||||
<link rel="icon" href="/favicon.ico">
|
||||
|
||||
<title> Bradlee Speice - Bradlee Speice</title>
|
||||
|
||||
<!-- Stylesheets -->
|
||||
<link href="/theme/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="/theme/css/fonts.css" rel="stylesheet">
|
||||
<link href="/theme/css/nest.css" rel="stylesheet">
|
||||
<link href="/theme/css/pygment.css" rel="stylesheet">
|
||||
<!-- /Stylesheets -->
|
||||
|
||||
<!-- RSS Feeds -->
|
||||
<!-- /RSS Feeds -->
|
||||
|
||||
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
|
||||
<!--[if lt IE 9]>
|
||||
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
|
||||
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
|
||||
<![endif]-->
|
||||
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<!-- Header -->
|
||||
<div class="header-container gradient">
|
||||
|
||||
<!-- Static navbar -->
|
||||
<div class="container">
|
||||
<div class="header-nav">
|
||||
<div class="header-logo">
|
||||
<a class="pull-left" href="/"><img class="mr20" src="images/logo.svg" alt="logo">Bradlee Speice</a>
|
||||
</div>
|
||||
<div class="nav pull-right">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Static navbar -->
|
||||
|
||||
<!-- Header -->
|
||||
<div class="container header-wrapper">
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div class="header-content">
|
||||
<h1 class="header-title text-uppercase">Bradlee Speice</h1>
|
||||
<div class="header-underline"></div>
|
||||
<p class="header-subtitle header-subtitle-homepage"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
|
||||
<!-- Content -->
|
||||
<div class="archive-container">
|
||||
<div class="container content archive">
|
||||
<h2><a href="/author/bradlee-speice.html"></a></h2>
|
||||
<dl class="dl-horizontal">
|
||||
<dt>Fri 26 February 2016</dt>
|
||||
<dd><a href="/profitability-using-the-investment-formula.html">Profitability using the Investment Formula</a></dd>
|
||||
<dt>Wed 03 February 2016</dt>
|
||||
<dd><a href="/guaranteed-money-maker.html">Guaranteed Money Maker</a></dd>
|
||||
<dt>Sat 23 January 2016</dt>
|
||||
<dd><a href="/cloudy-in-seattle.html">Cloudy in Seattle</a></dd>
|
||||
<dt>Fri 01 January 2016</dt>
|
||||
<dd><a href="/complaining-about-the-weather.html">Complaining about the Weather</a></dd>
|
||||
<dt>Sat 26 December 2015</dt>
|
||||
<dd><a href="/testing-cramer.html">Testing Cramer</a></dd>
|
||||
<dt>Fri 27 November 2015</dt>
|
||||
<dd><a href="/autocallable-bonds.html">Autocallable Bonds</a></dd>
|
||||
<dt>Thu 19 November 2015</dt>
|
||||
<dd><a href="/welcome-and-an-algorithm.html">Welcome, and an algorithm</a></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Content -->
|
||||
|
||||
<!-- Footer -->
|
||||
<div class="footer gradient-2">
|
||||
<div class="container footer-container ">
|
||||
<div class="row">
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
<li><a href="https://github.com/bspeice" target="_blank">Github</a></li>
|
||||
<li><a href="https://www.linkedin.com/in/bradleespeice" target="_blank">LinkedIn</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
</div>
|
||||
<div class="col-xs-12 col-sm-3 col-md-3 col-lg-3">
|
||||
<p class="pull-right text-right">
|
||||
<small><em>Proudly powered by <a href="http://docs.getpelican.com/" target="_blank">pelican</a></em></small><br/>
|
||||
<small><em>Theme and code by <a href="https://github.com/molivier" target="_blank">molivier</a></em></small><br/>
|
||||
<small></small>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Footer -->
|
||||
</body>
|
||||
</html>
|
122
authors.html
Normal file
122
authors.html
Normal file
@ -0,0 +1,122 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content="">
|
||||
<meta name="keywords" content="">
|
||||
<link rel="icon" href="/favicon.ico">
|
||||
|
||||
<title> - Bradlee Speice</title>
|
||||
|
||||
<!-- Stylesheets -->
|
||||
<link href="/theme/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="/theme/css/fonts.css" rel="stylesheet">
|
||||
<link href="/theme/css/nest.css" rel="stylesheet">
|
||||
<link href="/theme/css/pygment.css" rel="stylesheet">
|
||||
<!-- /Stylesheets -->
|
||||
|
||||
<!-- RSS Feeds -->
|
||||
<!-- /RSS Feeds -->
|
||||
|
||||
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
|
||||
<!--[if lt IE 9]>
|
||||
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
|
||||
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
|
||||
<![endif]-->
|
||||
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<!-- Header -->
|
||||
<div class="header-container gradient">
|
||||
|
||||
<!-- Static navbar -->
|
||||
<div class="container">
|
||||
<div class="header-nav">
|
||||
<div class="header-logo">
|
||||
<a class="pull-left" href="/"><img class="mr20" src="images/logo.svg" alt="logo">Bradlee Speice</a>
|
||||
</div>
|
||||
<div class="nav pull-right">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Static navbar -->
|
||||
|
||||
<!-- Header -->
|
||||
<div class="container header-wrapper">
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div class="header-content">
|
||||
<h1 class="header-title text-uppercase"></h1>
|
||||
<div class="header-underline"></div>
|
||||
<p class="header-subtitle header-subtitle-homepage"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
|
||||
<!-- Content -->
|
||||
<div class="archive-container">
|
||||
<div class="container content archive">
|
||||
<h2><a href="/author/bradlee-speice.html">Bradlee Speice</a></h2>
|
||||
<dl class="dl-horizontal">
|
||||
<dt>Fri 26 February 2016</dt>
|
||||
<dd><a href="/profitability-using-the-investment-formula.html">Profitability using the Investment Formula</a></dd>
|
||||
<dt>Wed 03 February 2016</dt>
|
||||
<dd><a href="/guaranteed-money-maker.html">Guaranteed Money Maker</a></dd>
|
||||
<dt>Sat 23 January 2016</dt>
|
||||
<dd><a href="/cloudy-in-seattle.html">Cloudy in Seattle</a></dd>
|
||||
<dt>Fri 01 January 2016</dt>
|
||||
<dd><a href="/complaining-about-the-weather.html">Complaining about the Weather</a></dd>
|
||||
<dt>Sat 26 December 2015</dt>
|
||||
<dd><a href="/testing-cramer.html">Testing Cramer</a></dd>
|
||||
<dt>Fri 27 November 2015</dt>
|
||||
<dd><a href="/autocallable-bonds.html">Autocallable Bonds</a></dd>
|
||||
<dt>Thu 19 November 2015</dt>
|
||||
<dd><a href="/welcome-and-an-algorithm.html">Welcome, and an algorithm</a></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Content -->
|
||||
|
||||
<!-- Footer -->
|
||||
<div class="footer gradient-2">
|
||||
<div class="container footer-container ">
|
||||
<div class="row">
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
<li><a href="https://github.com/bspeice" target="_blank">Github</a></li>
|
||||
<li><a href="https://www.linkedin.com/in/bradleespeice" target="_blank">LinkedIn</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
</div>
|
||||
<div class="col-xs-12 col-sm-3 col-md-3 col-lg-3">
|
||||
<p class="pull-right text-right">
|
||||
<small><em>Proudly powered by <a href="http://docs.getpelican.com/" target="_blank">pelican</a></em></small><br/>
|
||||
<small><em>Theme and code by <a href="https://github.com/molivier" target="_blank">molivier</a></em></small><br/>
|
||||
<small></small>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Footer -->
|
||||
</body>
|
||||
</html>
|
3893
autocallable-bonds.html
Normal file
3893
autocallable-bonds.html
Normal file
File diff suppressed because one or more lines are too long
122
categories.html
Normal file
122
categories.html
Normal file
@ -0,0 +1,122 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content="">
|
||||
<meta name="keywords" content="">
|
||||
<link rel="icon" href="/favicon.ico">
|
||||
|
||||
<title> - Bradlee Speice</title>
|
||||
|
||||
<!-- Stylesheets -->
|
||||
<link href="/theme/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="/theme/css/fonts.css" rel="stylesheet">
|
||||
<link href="/theme/css/nest.css" rel="stylesheet">
|
||||
<link href="/theme/css/pygment.css" rel="stylesheet">
|
||||
<!-- /Stylesheets -->
|
||||
|
||||
<!-- RSS Feeds -->
|
||||
<!-- /RSS Feeds -->
|
||||
|
||||
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
|
||||
<!--[if lt IE 9]>
|
||||
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
|
||||
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
|
||||
<![endif]-->
|
||||
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<!-- Header -->
|
||||
<div class="header-container gradient">
|
||||
|
||||
<!-- Static navbar -->
|
||||
<div class="container">
|
||||
<div class="header-nav">
|
||||
<div class="header-logo">
|
||||
<a class="pull-left" href="/"><img class="mr20" src="images/logo.svg" alt="logo">Bradlee Speice</a>
|
||||
</div>
|
||||
<div class="nav pull-right">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Static navbar -->
|
||||
|
||||
<!-- Header -->
|
||||
<div class="container header-wrapper">
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div class="header-content">
|
||||
<h1 class="header-title text-uppercase"></h1>
|
||||
<div class="header-underline"></div>
|
||||
<p class="header-subtitle header-subtitle-homepage"></p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
|
||||
<!-- Content -->
|
||||
<div class="archive-container">
|
||||
<div class="container content archive">
|
||||
<h2><a href="/category/blog.html">Blog</a></h2>
|
||||
<dl class="dl-horizontal">
|
||||
<dt>Fri 26 February 2016</dt>
|
||||
<dd><a href="/profitability-using-the-investment-formula.html">Profitability using the Investment Formula</a></dd>
|
||||
<dt>Wed 03 February 2016</dt>
|
||||
<dd><a href="/guaranteed-money-maker.html">Guaranteed Money Maker</a></dd>
|
||||
<dt>Sat 23 January 2016</dt>
|
||||
<dd><a href="/cloudy-in-seattle.html">Cloudy in Seattle</a></dd>
|
||||
<dt>Fri 01 January 2016</dt>
|
||||
<dd><a href="/complaining-about-the-weather.html">Complaining about the Weather</a></dd>
|
||||
<dt>Sat 26 December 2015</dt>
|
||||
<dd><a href="/testing-cramer.html">Testing Cramer</a></dd>
|
||||
<dt>Fri 27 November 2015</dt>
|
||||
<dd><a href="/autocallable-bonds.html">Autocallable Bonds</a></dd>
|
||||
<dt>Thu 19 November 2015</dt>
|
||||
<dd><a href="/welcome-and-an-algorithm.html">Welcome, and an algorithm</a></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Content -->
|
||||
|
||||
<!-- Footer -->
|
||||
<div class="footer gradient-2">
|
||||
<div class="container footer-container ">
|
||||
<div class="row">
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
<li><a href="https://github.com/bspeice" target="_blank">Github</a></li>
|
||||
<li><a href="https://www.linkedin.com/in/bradleespeice" target="_blank">LinkedIn</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
</div>
|
||||
<div class="col-xs-12 col-sm-3 col-md-3 col-lg-3">
|
||||
<p class="pull-right text-right">
|
||||
<small><em>Proudly powered by <a href="http://docs.getpelican.com/" target="_blank">pelican</a></em></small><br/>
|
||||
<small><em>Theme and code by <a href="https://github.com/molivier" target="_blank">molivier</a></em></small><br/>
|
||||
<small></small>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Footer -->
|
||||
</body>
|
||||
</html>
|
122
category/blog.html
Normal file
122
category/blog.html
Normal file
@ -0,0 +1,122 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content=" Blog">
|
||||
<meta name="keywords" content="">
|
||||
<link rel="icon" href="/favicon.ico">
|
||||
|
||||
<title> Blog - Bradlee Speice</title>
|
||||
|
||||
<!-- Stylesheets -->
|
||||
<link href="/theme/css/bootstrap.min.css" rel="stylesheet">
|
||||
<link href="/theme/css/fonts.css" rel="stylesheet">
|
||||
<link href="/theme/css/nest.css" rel="stylesheet">
|
||||
<link href="/theme/css/pygment.css" rel="stylesheet">
|
||||
<!-- /Stylesheets -->
|
||||
|
||||
<!-- RSS Feeds -->
|
||||
<!-- /RSS Feeds -->
|
||||
|
||||
<!-- HTML5 shim and Respond.js for IE8 support of HTML5 elements and media queries -->
|
||||
<!--[if lt IE 9]>
|
||||
<script src="https://oss.maxcdn.com/html5shiv/3.7.2/html5shiv.min.js"></script>
|
||||
<script src="https://oss.maxcdn.com/respond/1.4.2/respond.min.js"></script>
|
||||
<![endif]-->
|
||||
|
||||
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
|
||||
<!-- Header -->
|
||||
<div class="header-container gradient">
|
||||
|
||||
<!-- Static navbar -->
|
||||
<div class="container">
|
||||
<div class="header-nav">
|
||||
<div class="header-logo">
|
||||
<a class="pull-left" href="/"><img class="mr20" src="images/logo.svg" alt="logo">Bradlee Speice</a>
|
||||
</div>
|
||||
<div class="nav pull-right">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Static navbar -->
|
||||
|
||||
<!-- Header -->
|
||||
<div class="container header-wrapper">
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div class="header-content">
|
||||
<h1 class="header-title text-uppercase"> : Blog</h1>
|
||||
<div class="header-underline"></div>
|
||||
<p class="header-subtitle header-subtitle-homepage"> "Blog"</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
</div>
|
||||
<!-- /Header -->
|
||||
|
||||
|
||||
<!-- Content -->
|
||||
<div class="archive-container">
|
||||
<div class="container content archive">
|
||||
<h2><a href="/category/blog.html">Blog</a></h2>
|
||||
<dl class="dl-horizontal">
|
||||
<dt>Fri 26 February 2016</dt>
|
||||
<dd><a href="/profitability-using-the-investment-formula.html">Profitability using the Investment Formula</a></dd>
|
||||
<dt>Wed 03 February 2016</dt>
|
||||
<dd><a href="/guaranteed-money-maker.html">Guaranteed Money Maker</a></dd>
|
||||
<dt>Sat 23 January 2016</dt>
|
||||
<dd><a href="/cloudy-in-seattle.html">Cloudy in Seattle</a></dd>
|
||||
<dt>Fri 01 January 2016</dt>
|
||||
<dd><a href="/complaining-about-the-weather.html">Complaining about the Weather</a></dd>
|
||||
<dt>Sat 26 December 2015</dt>
|
||||
<dd><a href="/testing-cramer.html">Testing Cramer</a></dd>
|
||||
<dt>Fri 27 November 2015</dt>
|
||||
<dd><a href="/autocallable-bonds.html">Autocallable Bonds</a></dd>
|
||||
<dt>Thu 19 November 2015</dt>
|
||||
<dd><a href="/welcome-and-an-algorithm.html">Welcome, and an algorithm</a></dd>
|
||||
</dl>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Content -->
|
||||
|
||||
<!-- Footer -->
|
||||
<div class="footer gradient-2">
|
||||
<div class="container footer-container ">
|
||||
<div class="row">
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
<div class="footer-title"></div>
|
||||
<ul class="list-unstyled">
|
||||
<li><a href="https://github.com/bspeice" target="_blank">Github</a></li>
|
||||
<li><a href="https://www.linkedin.com/in/bradleespeice" target="_blank">LinkedIn</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
<div class="col-xs-4 col-sm-3 col-md-3 col-lg-3">
|
||||
</div>
|
||||
<div class="col-xs-12 col-sm-3 col-md-3 col-lg-3">
|
||||
<p class="pull-right text-right">
|
||||
<small><em>Proudly powered by <a href="http://docs.getpelican.com/" target="_blank">pelican</a></em></small><br/>
|
||||
<small><em>Theme and code by <a href="https://github.com/molivier" target="_blank">molivier</a></em></small><br/>
|
||||
<small></small>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<!-- /Footer -->
|
||||
</body>
|
||||
</html>
|
850
cloudy-in-seattle.html
Normal file
850
cloudy-in-seattle.html
Normal file
File diff suppressed because one or more lines are too long
870
complaining-about-the-weather.html
Normal file
870
complaining-about-the-weather.html
Normal file
File diff suppressed because one or more lines are too long
@ -1,58 +0,0 @@
|
||||
Title: Welcome, and an algorithm
|
||||
Date: 2015-11-19
|
||||
Tags: introduction, trading
|
||||
Modified: 2015-12-05
|
||||
Category: Blog
|
||||
|
||||
<script type="text/x-mathjax-config">
|
||||
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\(','\)']]}});
|
||||
</script>
|
||||
<script async src='https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML'></script>
|
||||
|
||||
Hello! Glad to meet you. I'm currently a student at Columbia University
|
||||
studying Financial Engineering, and want to give an overview of the projects
|
||||
I'm working on!
|
||||
|
||||
To start things off, Columbia has been hosting a trading competition that
|
||||
myself and another partner are competing in. I'm including a notebook of the
|
||||
algorithm that we're using, just to give a simple overview of a miniature
|
||||
algorithm.
|
||||
|
||||
The competition is scored in 3 areas:
|
||||
|
||||
- Total return
|
||||
- [Sharpe ratio](1)
|
||||
- Maximum drawdown
|
||||
|
||||
Our algorithm uses a basic momentum strategy: in the given list of potential
|
||||
portfolios, pick the stocks that have been performing well in the past 30
|
||||
days. Then, optimize for return subject to the drawdown being below a specific
|
||||
level. We didn't include the Sharpe ratio as a constraint, mostly because
|
||||
we were a bit late entering the competition.
|
||||
|
||||
I'll be updating this post with the results of our algorithm as they come along!
|
||||
|
||||
---
|
||||
|
||||
**UPDATE 12/5/2015**: Now that the competition has ended, I wanted to update
|
||||
how the algorithm performed. Unfortunately, it didn't do very well. I'm planning
|
||||
to make some tweaks over the coming weeks, and do another forward test in January.
|
||||
|
||||
- After week 1: Down .1%
|
||||
- After week 2: Down 1.4%
|
||||
- After week 3: Flat
|
||||
|
||||
And some statistics for all teams participating in the competition:
|
||||
|
||||
| | |
|
||||
|--------------------|--------|
|
||||
| Max Return | 74.1% |
|
||||
| Min Return | -97.4% |
|
||||
| Average Return | -.1% |
|
||||
| Std Dev of Returns | 19.6% |
|
||||
|
||||
---
|
||||
|
||||
{% notebook 2015-11-14-welcome.ipynb %}
|
||||
|
||||
[1]: https://en.wikipedia.org/wiki/Sharpe_ratio
|
@ -1,20 +0,0 @@
|
||||
Title: Autocallable Bonds
|
||||
Date: 2015-11-27
|
||||
Category: Blog
|
||||
Tags: finance, simulation, monte carlo
|
||||
Authors: Bradlee Speice
|
||||
Summary: For a final project, my group was tasked with understanding three exotic derivatives: The Athena, Phoenix without memory, and Phoenix with memory autocallable products.
|
||||
[//]: <> "Modified:"
|
||||
|
||||
<script type="text/x-mathjax-config">
|
||||
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\(','\)']]}});
|
||||
</script>
|
||||
<script async src='https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML'></script>
|
||||
|
||||
My only non-core class this semester has been in Structure Products. We've been surveying a wide variety of products, and the final project was to pick one to report on.
|
||||
Because these are all very similar, we decided to demonstrate all 3 products at once.
|
||||
|
||||
What follows below is a notebook demonstrating the usage of [Julia](http://julialang.com) for Monte-Carlo simulation of some exotic products.
|
||||
|
||||
{% notebook 2015-11-27-autocallable.ipynb language[julia] %}
|
||||
|
@ -1,14 +0,0 @@
|
||||
Title: Testing Cramer
|
||||
Date: 2015-12-26
|
||||
Category: Blog
|
||||
Tags: futures, data science
|
||||
Authors: Bradlee Speice
|
||||
Summary:
|
||||
[//]: <> "Modified: "
|
||||
|
||||
<script type="text/x-mathjax-config">
|
||||
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\(','\)']]}});
|
||||
</script>
|
||||
<script async src='https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML'></script>
|
||||
|
||||
{% notebook 2015-12-26-testing_cramer.ipynb %}
|
@ -1,14 +0,0 @@
|
||||
Title: Complaining about the Weather
|
||||
Date: 2016-01-01
|
||||
Category: Blog
|
||||
Tags: weather
|
||||
Authors: Bradlee Speice
|
||||
Summary: Figuring out whether people should be complaining about the recent weather in NC.
|
||||
[//]: <> "Modified: "
|
||||
|
||||
<script type="text/x-mathjax-config">
|
||||
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\(','\)']]}});
|
||||
</script>
|
||||
<script async src='https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML'></script>
|
||||
|
||||
{% notebook 2016-1-1-complaining-about-weather.ipynb %}
|
@ -1,14 +0,0 @@
|
||||
Title: Cloudy in Seattle
|
||||
Date: 2016-01-23
|
||||
Category: Blog
|
||||
Tags: weather, data science
|
||||
Authors: Bradlee Speice
|
||||
Summary: Building on prior analysis, is Seattle's reputation as a depressing city actually well-earned?
|
||||
[//]: <> "Modified: "
|
||||
|
||||
<script type="text/x-mathjax-config">
|
||||
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\(','\)']]}});
|
||||
</script>
|
||||
<script async src='https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML'></script>
|
||||
|
||||
{% notebook 2016-1-23-cloudy-in-seattle.ipynb %}
|
@ -1,14 +0,0 @@
|
||||
Title: Profitability using the Investment Formula
|
||||
Date: 2016-02-26
|
||||
Category: Blog
|
||||
Tags: algorithmic-trading, python
|
||||
Authors: Bradlee Speice
|
||||
Summary: After developing a formula to guide our investing, how do we actually evaluate its performance in the real world?
|
||||
[//]: <> "Modified: "
|
||||
|
||||
<script type="text/x-mathjax-config">
|
||||
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\\(','\\)']]}});
|
||||
</script>
|
||||
<script async src='https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML'></script>
|
||||
|
||||
{% notebook 2016-2-26-profitability-using-the-investment-formula.ipynb %}
|
@ -1,14 +0,0 @@
|
||||
Title: Guaranteed Money Maker
|
||||
Date: 2016-02-03
|
||||
Category: Blog
|
||||
Tags: martingale, strategy
|
||||
Authors: Bradlee Speice
|
||||
Summary: Developing an investment strategy based on the Martingale betting strategy
|
||||
[//]: <> "Modified: "
|
||||
|
||||
<script type="text/x-mathjax-config">
|
||||
MathJax.Hub.Config({tex2jax: {inlineMath: [['$','$'], ['\(','\)']]}});
|
||||
</script>
|
||||
<script async src='https://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS_CHTML'></script>
|
||||
|
||||
{% notebook 2016-2-3-guaranteed-money-maker.ipynb %}
|
@ -1,293 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Trading Competition Optimization\n",
|
||||
"\n",
|
||||
"### Goal: Max return given maximum Sharpe and Drawdown"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"from IPython.display import display\n",
|
||||
"import Quandl\n",
|
||||
"from datetime import datetime, timedelta\n",
|
||||
"\n",
|
||||
"tickers = ['XOM', 'CVX', 'CLB', 'OXY', 'SLB']\n",
|
||||
"market_ticker = 'GOOG/NYSE_VOO'\n",
|
||||
"lookback = 30\n",
|
||||
"d_col = 'Close'\n",
|
||||
"\n",
|
||||
"data = {tick: Quandl.get('YAHOO/{}'.format(tick))[-lookback:] for tick in tickers}\n",
|
||||
"market = Quandl.get(market_ticker)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Calculating the Return\n",
|
||||
"We first want to know how much each ticker returned over the prior period."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'CLB': -0.0016320202164526894,\n",
|
||||
" 'CVX': 0.0010319531629488911,\n",
|
||||
" 'OXY': 0.00093418904454400551,\n",
|
||||
" 'SLB': 0.00098431254720448159,\n",
|
||||
" 'XOM': 0.00044165797556096868}"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"returns = {tick: data[tick][d_col].pct_change() for tick in tickers}\n",
|
||||
"\n",
|
||||
"display({tick: returns[tick].mean() for tick in tickers})"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Calculating the Sharpe ratio\n",
|
||||
"Sharpe: ${R - R_M \\over \\sigma}$\n",
|
||||
"\n",
|
||||
"We use the average return over the lookback period, minus the market average return, over the ticker standard deviation to calculate the Sharpe. Shorting a stock turns a negative Sharpe positive."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'CLB': -0.10578734457846127,\n",
|
||||
" 'CVX': 0.027303529817677398,\n",
|
||||
" 'OXY': 0.022622210057414487,\n",
|
||||
" 'SLB': 0.026950946344858676,\n",
|
||||
" 'XOM': -0.0053519259698605499}"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"market_returns = market.pct_change()\n",
|
||||
"\n",
|
||||
"sharpe = lambda ret: (ret.mean() - market_returns[d_col].mean()) / ret.std()\n",
|
||||
"sharpes = {tick: sharpe(returns[tick]) for tick in tickers}\n",
|
||||
"\n",
|
||||
"display(sharpes)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Calculating the drawdown\n",
|
||||
"This one is easy - what is the maximum daily change over the lookback period? That is, because we will allow short positions, we are not concerned strictly with maximum downturn, but in general, what is the largest 1-day change?"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"{'CLB': 0.043551495607375035,\n",
|
||||
" 'CVX': 0.044894389686214398,\n",
|
||||
" 'OXY': 0.051424517867144637,\n",
|
||||
" 'SLB': 0.034774627850375328,\n",
|
||||
" 'XOM': 0.035851524605672758}"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"drawdown = lambda ret: ret.abs().max()\n",
|
||||
"drawdowns = {tick: drawdown(returns[tick]) for tick in tickers}\n",
|
||||
"\n",
|
||||
"display(drawdowns)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Performing the optimization\n",
|
||||
"\n",
|
||||
"$\\begin{align}\n",
|
||||
"max\\ \\ & \\mu \\cdot \\omega\\\\\n",
|
||||
"s.t.\\ \\ & \\vec{1} \\omega = 1\\\\\n",
|
||||
"& \\vec{S} \\omega \\ge s\\\\\n",
|
||||
"& \\vec{D} \\cdot | \\omega | \\le d\\\\\n",
|
||||
"& \\left|\\omega\\right| \\le l\\\\\n",
|
||||
"\\end{align}$\n",
|
||||
"\n",
|
||||
"We want to maximize average return subject to having a full portfolio, Sharpe above a specific level, drawdown below a level, and leverage not too high - that is, don't have huge long/short positions."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'Optimization terminated successfully.'"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"\"Holdings: [('XOM', 5.8337945679814904), ('CVX', 42.935064321851307), ('CLB', -124.5), ('OXY', 36.790387773552119), ('SLB', 39.940753336615096)]\""
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'Expected Return: 32.375%'"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
},
|
||||
{
|
||||
"data": {
|
||||
"text/plain": [
|
||||
"'Expected Max Drawdown: 4.34%'"
|
||||
]
|
||||
},
|
||||
"metadata": {},
|
||||
"output_type": "display_data"
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"import numpy as np\n",
|
||||
"from scipy.optimize import minimize\n",
|
||||
"\n",
|
||||
"#sharpe_limit = .1\n",
|
||||
"drawdown_limit = .05\n",
|
||||
"leverage = 250\n",
|
||||
"\n",
|
||||
"# Use the map so we can guarantee we maintain the correct order\n",
|
||||
"# sharpe_a = np.array(list(map(lambda tick: sharpes[tick], tickers))) * -1 # So we can write as upper-bound\n",
|
||||
"dd_a = np.array(list(map(lambda tick: drawdowns[tick], tickers)))\n",
|
||||
"returns_a = np.array(list(map(lambda tick: returns[tick].mean(), tickers))) # Because minimizing\n",
|
||||
"\n",
|
||||
"meets_sharpe = lambda x: sum(abs(x) * sharpe_a) - sharpe_limit\n",
|
||||
"def meets_dd(x):\n",
|
||||
" portfolio = sum(abs(x))\n",
|
||||
" if portfolio < .1:\n",
|
||||
" # If there are no stocks in the portfolio,\n",
|
||||
" # we can accidentally induce division by 0,\n",
|
||||
" # or division by something small enough to cause infinity\n",
|
||||
" return 0\n",
|
||||
" \n",
|
||||
" return drawdown_limit - sum(abs(x) * dd_a) / sum(abs(x))\n",
|
||||
"\n",
|
||||
"is_portfolio = lambda x: sum(x) - 1\n",
|
||||
"\n",
|
||||
"def within_leverage(x):\n",
|
||||
" return leverage - sum(abs(x))\n",
|
||||
"\n",
|
||||
"objective = lambda x: sum(x * returns_a) * -1 # Because we're minimizing\n",
|
||||
"bounds = ((None, None),) * len(tickers)\n",
|
||||
"x = np.zeros(len(tickers))\n",
|
||||
"\n",
|
||||
"constraints = [\n",
|
||||
" {\n",
|
||||
" 'type': 'eq',\n",
|
||||
" 'fun': is_portfolio\n",
|
||||
" }, {\n",
|
||||
" 'type': 'ineq',\n",
|
||||
" 'fun': within_leverage\n",
|
||||
" #}, {\n",
|
||||
" # 'type': 'ineq',\n",
|
||||
" # 'fun': meets_sharpe\n",
|
||||
" }, {\n",
|
||||
" 'type': 'ineq',\n",
|
||||
" 'fun': meets_dd\n",
|
||||
" }\n",
|
||||
"]\n",
|
||||
"\n",
|
||||
"optimal = minimize(objective, x, bounds=bounds, constraints=constraints,\n",
|
||||
" options={'maxiter': 500})\n",
|
||||
"\n",
|
||||
"# Optimization time!\n",
|
||||
"display(optimal.message)\n",
|
||||
"\n",
|
||||
"display(\"Holdings: {}\".format(list(zip(tickers, optimal.x))))\n",
|
||||
"\n",
|
||||
"expected_return = optimal.fun * -100 # multiply by -100 to scale, and compensate for minimizing\n",
|
||||
"display(\"Expected Return: {:.3f}%\".format(expected_return))\n",
|
||||
"\n",
|
||||
"expected_drawdown = sum(abs(optimal.x) * dd_a) / sum(abs(optimal.x)) * 100\n",
|
||||
"display(\"Expected Max Drawdown: {0:.2f}%\".format(expected_drawdown))\n",
|
||||
"\n",
|
||||
"# TODO: Calculate expected Sharpe"
|
||||
]
|
||||
}
|
||||
],
|
||||
"metadata": {
|
||||
"kernelspec": {
|
||||
"display_name": "Python 3",
|
||||
"language": "python",
|
||||
"name": "python3"
|
||||
},
|
||||
"language_info": {
|
||||
"codemirror_mode": {
|
||||
"name": "ipython",
|
||||
"version": 3
|
||||
},
|
||||
"file_extension": ".py",
|
||||
"mimetype": "text/x-python",
|
||||
"name": "python",
|
||||
"nbconvert_exporter": "python",
|
||||
"pygments_lexer": "ipython3",
|
||||
"version": "3.5.0"
|
||||
}
|
||||
},
|
||||
"nbformat": 4,
|
||||
"nbformat_minor": 0
|
||||
}
|
File diff suppressed because one or more lines are too long
@ -1,428 +0,0 @@
|
||||
{
|
||||
"cells": [
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 1,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"import requests\n",
|
||||
"import pandas as pd\n",
|
||||
"import numpy as np\n",
|
||||
"from dateutil import parser as dtparser\n",
|
||||
"from dateutil.relativedelta import relativedelta\n",
|
||||
"from datetime import datetime\n",
|
||||
"from html.parser import HTMLParser\n",
|
||||
"from copy import copy\n",
|
||||
"import Quandl"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Testing Cramer\n",
|
||||
"\n",
|
||||
"Pursuant to attending a graduate school studying Financial Engineering, I've been a fan of the [Mad Money][3] TV show featuring the bombastic Jim Cramer. One of the things that he's said is that you shouldn't use the futures to predict where the stock market is going to go. But he says it often enough, I've begun to wonder - who is he trying to convince?\n",
|
||||
"\n",
|
||||
"It makes sense that because futures on things like the S&P 500 are traded continuously, they would price in market information before the stock market opens. So is Cramer right to be convinced that strategies based on the futures are a poor idea? I wanted to test it out.\n",
|
||||
"\n",
|
||||
"The first question is where to get the future's data. I've been part of [Seeking Alpha][2] for a bit, and they publish the [Wall Street Breakfast][3] newsletter which contains daily future's returns as of 6:20 AM EST. I'd be interested in using that data to see if we can actually make some money.\n",
|
||||
"\n",
|
||||
"First though, let's get the data:\n",
|
||||
"\n",
|
||||
"# Downloading Futures data from Seeking Alpha\n",
|
||||
"\n",
|
||||
"We're going to define two HTML parsing classes - one to get the article URL's from a page, and one to get the actual data from each article.\n",
|
||||
"\n",
|
||||
"[1]: http://www.cnbc.com/mad-money/\n",
|
||||
"[2]: http://seekingalpha.com/\n",
|
||||
"[3]: http://seekingalpha.com/author/wall-street-breakfast?s=wall-street-breakfast"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 2,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class ArticleListParser(HTMLParser):\n",
|
||||
" \"\"\"Given a web page with articles on it, parse out the article links\"\"\"\n",
|
||||
" \n",
|
||||
" articles = []\n",
|
||||
" \n",
|
||||
" def handle_starttag(self, tag, attrs):\n",
|
||||
" #if tag == 'div' and (\"id\", \"author_articles_wrapper\") in attrs:\n",
|
||||
" # self.fetch_links = True\n",
|
||||
" if tag == 'a' and ('class', 'dashboard_article_link') in attrs:\n",
|
||||
" href = list(filter(lambda x: x[0] == 'href', attrs))[0][1]\n",
|
||||
" self.articles.append(href)\n",
|
||||
" \n",
|
||||
"base_url = \"http://seekingalpha.com/author/wall-street-breakfast/articles\"\n",
|
||||
"article_page_urls = [base_url] + [base_url + '/{}'.format(i) for i in range(2, 20)]\n",
|
||||
"\n",
|
||||
"global_articles = []\n",
|
||||
"for page in article_page_urls:\n",
|
||||
" # We need to switch the user agent, as SA blocks the standard requests agent\n",
|
||||
" articles_html = requests.get(page,\n",
|
||||
" headers={\"User-Agent\": \"Wget/1.13.4\"})\n",
|
||||
" parser = ArticleListParser()\n",
|
||||
" parser.feed(articles_html.text)\n",
|
||||
" global_articles += (parser.articles)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 3,
|
||||
"metadata": {
|
||||
"collapsed": false,
|
||||
"scrolled": true
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"class ArticleReturnParser(HTMLParser):\n",
|
||||
" \"Given an article, parse out the futures returns in it\"\n",
|
||||
" \n",
|
||||
" record_font_tags = False\n",
|
||||
" in_font_tag = False\n",
|
||||
" counter = 0\n",
|
||||
" # data = {} # See __init__\n",
|
||||
" \n",
|
||||
" def __init__(self, *args, **kwargs):\n",
|
||||
" super().__init__(*args, **kwargs)\n",
|
||||
" self.data = {}\n",
|
||||
" \n",
|
||||
" def handle_starttag(self, tag, attrs):\n",
|
||||
" if tag == 'span' and ('itemprop', 'datePublished') in attrs:\n",
|
||||
" date_string = list(filter(lambda x: x[0] == 'content', attrs))[0][1]\n",
|
||||
" date = dtparser.parse(date_string)\n",
|
||||
" self.data['date'] = date\n",
|
||||
" \n",
|
||||
" self.in_font_tag = tag == 'font'\n",
|
||||
" \n",
|
||||
" def safe_float(self, string):\n",
|
||||
" try:\n",
|
||||
" return float(string[:-1]) / 100\n",
|
||||
" except ValueError:\n",
|
||||
" return np.NaN\n",
|
||||
" \n",
|
||||
" def handle_data(self, content):\n",
|
||||
" if not self.record_font_tags and \"Futures at 6\" in content:\n",
|
||||
" self.record_font_tags = True\n",
|
||||
" \n",
|
||||
" if self.record_font_tags and self.in_font_tag:\n",
|
||||
" if self.counter == 0:\n",
|
||||
" self.data['DOW'] = self.safe_float(content)\n",
|
||||
" elif self.counter == 1:\n",
|
||||
" self.data['S&P'] = self.safe_float(content)\n",
|
||||
" elif self.counter == 2:\n",
|
||||
" self.data['NASDAQ'] = self.safe_float(content)\n",
|
||||
" elif self.counter == 3:\n",
|
||||
" self.data['Crude'] = self.safe_float(content)\n",
|
||||
" elif self.counter == 4:\n",
|
||||
" self.data['Gold'] = self.safe_float(content)\n",
|
||||
" \n",
|
||||
" self.counter += 1\n",
|
||||
" \n",
|
||||
" def handle_endtag(self, tag):\n",
|
||||
" self.in_font_tag = False\n",
|
||||
"\n",
|
||||
"def retrieve_data(url):\n",
|
||||
" sa = \"http://seekingalpha.com\"\n",
|
||||
" article_html = requests.get(sa + url,\n",
|
||||
" headers={\"User-Agent\": \"Wget/1.13.4\"})\n",
|
||||
" parser = ArticleReturnParser()\n",
|
||||
" parser.feed(article_html.text)\n",
|
||||
" parser.data.update({\"url\": url})\n",
|
||||
" parser.data.update({\"text\": article_html.text})\n",
|
||||
" return parser.data\n",
|
||||
"\n",
|
||||
"# This copy **MUST** be in place. I'm not sure why,\n",
|
||||
"# as you'd think that the data being returned would already\n",
|
||||
"# represent a different memory location. Even so, it blows up\n",
|
||||
"# if you don't do this.\n",
|
||||
"article_list = list(set(global_articles))\n",
|
||||
"article_data = [copy(retrieve_data(url)) for url in article_list]\n",
|
||||
"# If there's an issue downloading the article, drop it.\n",
|
||||
"article_df = pd.DataFrame.from_dict(article_data).dropna()"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Fetching the Returns data\n",
|
||||
"\n",
|
||||
"Now that we have the futures data, we're going to compare across 4 different indices - the S&P 500 index, Dow Jones Industrial, Russell 2000, and NASDAQ 100. Let's get the data off of Quandl to make things easier!"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 4,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [],
|
||||
"source": [
|
||||
"# article_df is sorted by date, so we get the first row.\n",
|
||||
"start_date = article_df.sort_values(by='date').iloc[0]['date'] - relativedelta(days=1)\n",
|
||||
"SPY = Quandl.get(\"GOOG/NYSE_SPY\", trim_start=start_date)\n",
|
||||
"DJIA = Quandl.get(\"GOOG/AMS_DIA\", trim_start=start_date)\n",
|
||||
"RUSS = Quandl.get(\"GOOG/AMEX_IWM\", trim_start=start_date)\n",
|
||||
"NASDAQ = Quandl.get(\"GOOG/EPA_QQQ\", trim_start=start_date)"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"# Running the Comparison\n",
|
||||
"\n",
|
||||
"There are two types of tests I want to determine: How accurate each futures category is at predicting the index's opening change over the close before, and predicting the index's daily return.\n",
|
||||
"\n",
|
||||
"Let's first calculate how good each future is at predicting the opening return over the previous day. I expect that the futures will be more than 50% accurate, since the information is recorded 3 hours before the markets open."
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "code",
|
||||
"execution_count": 5,
|
||||
"metadata": {
|
||||
"collapsed": false
|
||||
},
|
||||
"outputs": [
|
||||
{
|
||||
"name": "stdout",
|
||||
"output_type": "stream",
|
||||
"text": [
|
||||
"Articles Checked: \n",
|
||||
" DJIA NASDAQ RUSS SPY\n",
|
||||
"Crude 268 268 271 271\n",
|
||||
"DOW 268 268 271 271\n",
|
||||
"Gold 268 268 271 271\n",
|
||||
"NASDAQ 268 268 271 271\n",
|
||||
"S&P 268 268 271 271\n",
|
||||
"\n",
|
||||
"Prediction Accuracy:\n",
|
||||
" DJIA NASDAQ RUSS SPY\n",
|
||||
"Crude 0.544776 0.522388 0.601476 0.590406\n",
|
||||
"DOW 0.611940 0.604478 0.804428 0.841328\n",
|
||||
"Gold 0.462687 0.455224 0.464945 0.476015\n",
|
||||
"NASDAQ 0.615672 0.608209 0.797048 0.830258\n",
|
||||
"S&P 0.604478 0.597015 0.811808 0.848708\n"
|
||||
]
|
||||
}
|
||||
],
|
||||
"source": [
|
||||
"def calculate_opening_ret(frame):\n",
|
||||
" # I'm not a huge fan of the appending for loop,\n",
|
||||
" # but it's a bit verbose for a comprehension\n",
|
||||
" data = {}\n",
|
||||
" for i in range(1, len(frame)):\n",
|
||||
" date = frame.iloc[i].name\n",
|
||||
" prior_close = frame.iloc[i-1]['Close']\n",
|
||||
" open_val = frame.iloc[i]['Open']\n",
|
||||
" data[date] = (open_val - prior_close) / prior_close\n",
|
||||
" \n",
|
||||
" return data\n",
|
||||
"\n",
|
||||
"SPY_open_ret = calculate_opening_ret(SPY)\n",
|
||||
"DJIA_open_ret = calculate_opening_ret(DJIA)\n",
|
||||
"RUSS_open_ret = calculate_opening_ret(RUSS)\n",
|
||||
"NASDAQ_open_ret = calculate_opening_ret(NASDAQ)\n",
|
||||
"\n",
|
||||
"def signs_match(list_1, list_2):\n",
|
||||
" # This is a surprisingly difficult task - we have to match\n",
|
||||
" # up the dates in order to check if opening returns actually match\n",
|
||||
" index_dict_dt = {key.to_datetime(): list_2[key] for key in list_2.keys()}\n",
|
||||
" \n",
|
||||
" matches = []\n",
|
||||
" for row in list_1.iterrows():\n",
|
||||
" row_dt = row[1][1]\n",
|
||||
" row_value = row[1][0]\n",
|
||||
" index_dt = datetime(row_dt.year, row_dt.month, row_dt.day)\n",
|
||||
" if index_dt in list_2:\n",
|
||||
" index_value = list_2[index_dt]\n",
|
||||
" if (row_value > 0 and index_value > 0) or \\\n",
|
||||
" (row_value < 0 and index_value < 0) or \\\n",
|
||||
" (row_value == 0 and index_value == 0):\n",
|
||||
" matches += [1]\n",
|
||||
" else:\n",
|
||||
" matches += [0]\n",
|
||||
" #print(\"{}\".format(list_2[index_dt]))\n",
|
||||
" return matches\n",
|
||||
" \n",
|
||||
" \n",
|
||||
"prediction_dict = {}\n",
|
||||
"matches_dict = {}\n",
|
||||
"count_dict = {}\n",
|
||||
"index_dict = {\"SPY\": SPY_open_ret, \"DJIA\": DJIA_open_ret, \"RUSS\": RUSS_open_ret, \"NASDAQ\": NASDAQ_open_ret}\n",
|
||||
"indices = [\"SPY\", \"DJIA\", \"RUSS\", \"NASDAQ\"]\n",
|
||||
"futures = [\"Crude\", \"Gold\", \"DOW\", \"NASDAQ\", \"S&P\"]\n",
|
||||
"for index in indices:\n",
|
||||
" matches_dict[index] = {future: signs_match(article_df[[future, 'date']],\n",
|
||||
" index_dict[index]) for future in futures}\n",
|
||||
" count_dict[index] = {future: len(matches_dict[index][future]) for future in futures}\n",
|
||||
" prediction_dict[index] = {future: np.mean(matches_dict[index][future])\n",
|
||||
" for future in futures}\n",
|
||||
"print(\"Articles Checked: \")\n",
|
||||
"print(pd.DataFrame.from_dict(count_dict))\n",
|
||||
"print()\n",
|
||||
"print(\"Prediction Accuracy:\")\n",
|
||||
"print(pd.DataFrame.from_dict(prediction_dict))"
|
||||
]
|
||||
},
|
||||
{
|
||||
"cell_type": "markdown",
|
||||
"metadata": {},
|
||||
"source": [
|
||||
"This data is very interesting. Some insights:\n",
|
||||
"\n",
|
||||
"- Both DOW and NASDAQ futures are pretty bad at predicting their actual market openings\n",
|
||||
"- NASDAQ and Dow are fairly unpredictable; Russell 2000 and S&P are very predictable\n",
|
||||
"- Gold is a poor predictor in general - intuitively Gold should move inverse to the market, but it appears to be about as accurate as a coin flip.\n",
|
||||
"\n",
|
||||
"All said though it appears that futures data is important for determining market direction for both the S&P 500 and Russell 2000. Cramer is half-right: futures data isn't very helpful for the Dow and NASDAQ indices, but is great for the S&P and Russell indices.\n",
|
||||
"\n",
|
||||
"# The next step - Predicting the close\n",
|
||||
"\n",
|
||||
"Given the code we currently have, I'd like to predict the close of the market as well. We can re-use most of the code, so let's see what happens:"
|
||||
]
|
||||