Skip to content
This repository has been archived by the owner on Apr 27, 2022. It is now read-only.

Use Student's T to totally overfit expected stock returns #10

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,3 +2,4 @@ Flask==0.12
Flask-SocketIO==2.8.2
gevent==1.2.1
gevent-websocket==0.9.5
scipy==0.17.1
15 changes: 8 additions & 7 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,9 @@ def simstep(pf, params, prng):
symbol.
"""
def daily_return(sym):
mean, stddev = params[sym]
change = (prng.normalvariate(mean, stddev) + 100) / 100.0
from scipy.stats import t
df, loc, scale = params[sym]
change = (t.ppf(prng.uniform(0, 1), df=df, loc=loc, scale=scale) + 100) / 100.0
return change
return {s: daily_return(s) * v for s, v in pf.items()}

Expand Down Expand Up @@ -93,6 +94,7 @@ def processing_loop(spark_master, input_queue, output_queue, wikieod_file):
import pyspark
from pyspark import sql as pysql
from pyspark.sql import functions as pyfuncs
from scipy.stats import t

spark = pysql.SparkSession.builder.master(spark_master).getOrCreate()
sc = spark.sparkContext
Expand All @@ -104,11 +106,10 @@ def processing_loop(spark_master, input_queue, output_queue, wikieod_file):
'change', (pyfuncs.col('close') / pyfuncs.lag('close', 1).over(
pysql.Window.partitionBy('ticker').orderBy(
df['date'])) - 1.0) * 100)

mv = ddf.groupBy('ticker').agg(pyfuncs.avg('change').alias('mean'),
pyfuncs.sqrt(pyfuncs.variance('change')).alias('stddev'))

dist_map = mv.rdd.map(lambda r: (r[0], (r[1], r[2]))).collectAsMap()

changes = ddf.groupBy("ticker").agg(pyfuncs.collect_list("change").alias("changes"))

dist_map = changes.rdd.map(lambda r: (r[0], t.fit(r[1]))).collectAsMap()

priceDF = ddf.orderBy('date', ascending=False).groupBy('ticker').agg(
pyfuncs.first('close').alias('price'),
Expand Down