Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Init snapshot #1

Open
wants to merge 16 commits into
base: as_arrays
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -10,4 +10,7 @@ outTest/
*.iws
*.iml
tmp/
**/.DS_Store
**/.DS_Store
*.pyc
*.pyo
*.csv
4 changes: 3 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@ java {
dependencies {
implementation platform("io.deephaven:deephaven-bom:${dhcVersion}"),
"io.deephaven:deephaven-engine-table",
"it.unimi.dsi:fastutil:8.5.13"
"it.unimi.dsi:fastutil:8.5.13",
"io.deephaven:deephaven-engine-tuple",
"io.deephaven:deephaven-engine-tuplesource"
runtimeOnly "io.deephaven:deephaven-log-to-slf4j",
'ch.qos.logback:logback-classic:1.4.5'
}
Expand Down
Binary file added docker/data/Quotes.parquet
Binary file not shown.
11 changes: 5 additions & 6 deletions docker/data/storage/notebooks/Example.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@
from deephaven import TableReplayer
import bookbuilder

static_data = read("/data/Quotes.parquet")
rp = TableReplayer("2017-08-25T09:30:00 ET", "2017-08-25T23:59:59 ET")
ticking_data = rp.add_table(static_data, "Timestamp")
rp.start()
static_data_ = read("/data/Quotes.parquet")
rp_ = TableReplayer("2017-08-25T09:30:00 ET", "2017-08-25T23:59:59 ET")
ticking_data_ = rp_.add_table(static_data_, "Timestamp")
rp_.start()

book = bookbuilder.build_book(ticking_data) \
.last_by("Key")
book_ = bookbuilder.build_book(ticking_data_).last_by("Sym").rename_columns("SYMB=Sym")
Binary file not shown.
26 changes: 26 additions & 0 deletions docker/data/storage/notebooks/arrays.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from deephaven.parquet import read
from deephaven import TableReplayer, merge
import bookbuilder

# OAK: new order ack; CRAK: cancel replace ack; CC: order cancel;
# INF: internal fill; AWF: away market fill.

EVT_map = {"Order Ack": 1, "Cancel Replace Ack" : 1}#2}
static_data_ = read("/data/Quotes.parquet")

static_data = order_sample.update("EPOCH_TS = Instant.ofEpochSecond((long) (EPOCH_TS/SECOND), EPOCH_TS % SECOND)")\
.update_view("EVT_ID = EVT_map.containsKey(EVT_TYP) ? (int) EVT_map[EVT_TYP] : null")


rp = TableReplayer("2024-10-10T02:30:00 ET", "2024-10-25T02:40:00 ET")
ticking_data = rp.add_table(static_data, "EPOCH_TS")
rp.start()

book = bookbuilder.build_book(source=ticking_data,\
book_depth = 1,\
timestamp_col = "EPOCH_TS",\
size_col = "QTY",\
side_col = "SIDE",\
op_col = "EVT_ID",\
price_col = "PRC",\
group_cols = ["SYMB"])
20 changes: 17 additions & 3 deletions docker/data/storage/notebooks/bookbuilder.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,31 @@
from typing import Union, List

import jpy
from deephaven.table import Table

_J_BookBuilder = jpy.get_type("io.deephaven.book.PriceBook")

def build_book(source: Table,
book_depth: int = 5,
book_depth: int = 2,
batch_timestamps: bool = False,
timestamp_col: str = "Timestamp",
size_col: str = "Size",
side_col: str = "Side",
op_col: str = "Op",
price_col: str = "Price",
group_cols: Union[str, List[str]] = ["Sym"]):
return Table(_J_BookBuilder.build(source.j_object, book_depth, batch_timestamps, timestamp_col, size_col, side_col, op_col, price_col, group_cols))

def build_book_with_snap(source: Table,
snapshot: Table,
book_depth: int = 2,
batch_timestamps: bool = False,
timestamp_col: str = "Timestamp",
size_col: str = "Size",
side_col: str = "Side",
op_col: str = "Op",
price_col: str = "Price",
group_cols: Union[str, List[str]] = ["Sym"]):
return Table(_J_BookBuilder.build(source.j_object, book_depth, batch_timestamps, timestamp_col, size_col, side_col, op_col, price_col, group_cols))
return Table(_J_BookBuilder.build(source.j_object, snapshot.j_object, book_depth, batch_timestamps, timestamp_col, size_col, side_col, op_col, price_col, group_cols))



72 changes: 72 additions & 0 deletions docker/data/storage/notebooks/client_book.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
'''
Questions
Contract ord limit prc vs not-contract prc in fills
Which order id?
ORD_ID_PUB_PAR? ORD_ID_PUB?
1 == BUY?
2 == SELL?

SNAPSHOT
'''

from deephaven.parquet import read
from deephaven import TableReplayer, merge, read_csv
import bookbuilder

# OAK: new order ack;
# CRAK: cancel replace ack;
# CC: order cancel;
# INF: internal fill;
# AWF: away market fill.

EVT_map = {"Order Ack": 1, "Cancel Replace Ack" : 2, "Cancel Order": 3, "Internal Fill": 4, "Away Market Fill": 5}#2}

# Consolidate everything into a single table
order_sample_raw = read_csv("/data/order_sample.csv")
fill_sample_raw = read_csv("/data/fill_sample.csv")
cancel_sample_raw = read_csv("/data/cancel_sample.csv")

order_sample = read_csv("/data/order_sample.csv").view(["EVT_TYP", "SYMB", "EPOCH_TS", "ORD_ID=CLIENT_ORD_ID", "ORD_QTY=QTY", "EXEC_QTY=(int) null", "CXL_QTY=(int) null", "PRC", "SIDE"])
fill_sample = read_csv("/data/fill_sample.csv").view(["EVT_TYP", "SYMB", "EPOCH_TS", "ORD_ID=ORD_ID_PUB", "ORD_QTY", "EXEC_QTY", "CXL_QTY=(int) null", "PRC=ORD_LIMIT_PRC", "SIDE=ORD_SIDE"])
cancel_sample = read_csv("/data/cancel_sample.csv").view(["EVT_TYP", "SYMB", "EPOCH_TS", "ORD_ID=(long) ORD_CLIENT_ORD_ID", "ORD_QTY","EXEC_QTY=(int) null", "CXL_QTY", "PRC=ORD_LIMIT_PRC", "SIDE=ORD_SIDE"])

all_events = merge([order_sample, fill_sample, cancel_sample])\
.update_view(["EPOCH_TS = Instant.ofEpochSecond((long) (EPOCH_TS/SECOND), EPOCH_TS % SECOND)",
"EVT_ID = 1"])\
.sort("EPOCH_TS")
#EVT_map.containsKey(EVT_TYP) ? (int) EVT_map[EVT_TYP] : null"])\

rp = TableReplayer("2024-10-10T02:30:00 ET", "2024-10-25T02:40:00 ET")
ticking_data = rp.add_table(all_events, "EPOCH_TS")
rp.start()

# Try with just orders...
order_sample = order_sample.update_view(["EPOCH_TS = Instant.ofEpochSecond((long) (EPOCH_TS/SECOND), EPOCH_TS % SECOND)",
"EVT_ID = 1"])

rp = TableReplayer("2024-10-10T02:30:00 ET", "2024-10-25T02:40:00 ET")
ticking_data = rp.add_table(order_sample, "EPOCH_TS")
rp.start()


book = bookbuilder.build_book(source=order_sample,\
book_depth = 2,\
timestamp_col = "EPOCH_TS",\
size_col = "ORD_QTY",\
side_col = "SIDE",\
op_col = "EVT_ID",\
price_col = "PRC",\
group_cols = ["SYMB"]).last_by("SYMB")













53 changes: 53 additions & 0 deletions docker/data/storage/notebooks/test_snapshot.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
from deephaven.parquet import read
from deephaven import TableReplayer, merge, read_csv
import bookbuilder

# OAK: new order ack;
# CRAK: cancel replace ack;
# CC: order cancel;
# INF: internal fill;
# AWF: away market fill.


EVT_map = {"Order Ack": 1, "Cancel Replace Ack" : 2, "Cancel Order": 3, "Internal Fill": 4, "Away Market Fill": 5}#2}

order_sample = read_csv("/data/order_sample.csv").view(["EVT_TYP", "SYMB", "EPOCH_TS", "ORD_ID=CLIENT_ORD_ID", "ORD_QTY=QTY", "EXEC_QTY=(int) null", "CXL_QTY=(int) null", "PRC", "SIDE"])

# Try with just orders... make everything a 1
order_sample = order_sample.update_view(["EPOCH_TS = Instant.ofEpochSecond((long) (EPOCH_TS/SECOND), EPOCH_TS % SECOND)",
"EVT_ID = 1"])


# Get some old book
old_data = order_sample.where("EPOCH_TS < '2024-10-10T02:30:01.007 ET'")
# .update_view("SYMB = `AMZN`")

old_book = bookbuilder.build_book(old_data,\
book_depth = 3,\
timestamp_col = "EPOCH_TS",\
size_col = "ORD_QTY",\
side_col = "SIDE",\
op_col = "EVT_ID",\
price_col = "PRC",\
group_cols = ["SYMB"]).last_by("SYMB")
# old_book = old_book.snapshot()


new_data = order_sample.where("EPOCH_TS >= '2024-10-10T02:30:01.007 ET'")

rp = TableReplayer("2024-10-10T02:29:55 ET", "2024-10-25T02:40:00 ET")
ticking_data = rp.add_table(new_data, "EPOCH_TS")
rp.start()

# Make new book starting with old one
book = bookbuilder.build_book_with_snap(source=ticking_data,\
snapshot = old_book,\
book_depth = 3,\
timestamp_col = "EPOCH_TS",\
size_col = "ORD_QTY",\
side_col = "SIDE",\
op_col = "EVT_ID",\
price_col = "PRC",\
group_cols = ["SYMB"]).last_by("SYMB")


Loading