Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Implement Rest API, Websocket, and Frontend #6

Open
wants to merge 18 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ db.sqlite3
/data/
/mseed/
*.mseed
*.zip
*.pick

# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.

Expand Down
4 changes: 4 additions & 0 deletions .prettierrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"tabWidth": 2,
"useTabs": false
}
87 changes: 87 additions & 0 deletions docker-compose.kafka.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
version: "3.3"

services:
zookeeper:
restart: always
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
image: confluentinc/cp-zookeeper:latest
ports:
- "2181:2181/tcp"
volumes:
- ./zookeeper/data:/data
- ./zookeeper/data/datalog:/datalog

kafka:
restart: always
environment:
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://34.128.127.171:9092
KAFKA_BROKER_ID: 1
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_JMX_HOSTNAME: 34.128.127.171
KAFKA_JMX_PORT: 9999
image: confluentinc/cp-kafka:latest
user: root
ports:
- "9092:9092"
- "9999:9999"
volumes:
- ./kafka/data/kafka-1:/var/lib/kafka/data
depends_on:
- zookeeper

kafka-2:
restart: always
environment:
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-2:29092,PLAINTEXT_HOST://34.128.127.171:9093
KAFKA_BROKER_ID: 2
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_JMX_HOSTNAME: 34.128.127.171
KAFKA_JMX_PORT: 9999
image: confluentinc/cp-kafka:latest
user: root
ports:
- "9093:9093"
- "9998:9999"
volumes:
- ./kafka/data/kafka-2:/var/lib/kafka/data
depends_on:
- zookeeper

kafka-3:
restart: always
environment:
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka-3:29092,PLAINTEXT_HOST://34.128.127.171:9094
KAFKA_BROKER_ID: 3
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_JMX_HOSTNAME: 34.128.127.171
KAFKA_JMX_PORT: 9999
image: confluentinc/cp-kafka:latest
user: root
ports:
- "9094:9094"
- "9997:9999"
volumes:
- ./kafka/data/kafka-3:/var/lib/kafka/data
depends_on:
- zookeeper
- ./cassandra-sink/kafka-connect/plugins:/data/connectors

# Utility
kafka-ui:
image: provectuslabs/kafka-ui
container_name: kafka-ui
ports:
- "9080:8080"
restart: always
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: PLAINTEXT://kafka:29092,PLAINTEXT://kafka-2:29092,PLAINTEXT://kafka-3:29092
KAFKA_CLUSTERS_0_SCHEMAREGISTRY: http://kafka-schema-registry:8081/
41 changes: 40 additions & 1 deletion docker-compose.yml
Original file line number Diff line number Diff line change
@@ -1,11 +1,18 @@
version: '3'

services:
nginx_lb:
image: nginx
ports:
- 8080:8080
volumes:
- ./nginx/nginx.conf:/etc/nginx/nginx.conf

mongodb:
image: mongo
restart: always
ports:
- 27018:27017
- 27017:27017
volumes:
- ./data/mongodb:/data/db

Expand Down Expand Up @@ -35,6 +42,36 @@ services:
- ./data/zookeeper/data:/data
- ./data/zookeeper/data/datalog:/datalog

spark-master:
image: risw/spark-python:v-1
ports:
- "8080:8080"
environment:
- SPARK_MODE=master
volumes:
- ./data/spark/master:/data

spark-worker-1:
image: risw/spark-python:v-1
environment:
- SPARK_MODE=worker
- SPARK_MASTER_URL=spark://spark-master:7077
depends_on:
- spark-master
volumes:
- ./data/spark/worker-1:/data

spark-worker-2:
image: risw/spark-python:v-1
environment:
- SPARK_MODE=worker
- SPARK_MASTER_URL=spark://spark-master:7077
depends_on:
- spark-master
volumes:
- ./data/spark/worker-2:/data


kafka:
restart: always
environment:
Expand Down Expand Up @@ -65,3 +102,5 @@ services:
environment:
KAFKA_CLUSTERS_0_NAME: local
KAFKA_CLUSTERS_0_BOOTSTRAPSERVERS: kafka:29092
KAFKA_CLUSTERS_1_NAME: cloud
KAFKA_CLUSTERS_1_BOOTSTRAPSERVERS: 34.101.119.214:9092,34.101.119.214:9093,34.101.119.214:9094
2 changes: 0 additions & 2 deletions eews_backend/database/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +0,0 @@
from .mongodb import *
from .influxdb import *
10 changes: 4 additions & 6 deletions eews_backend/database/influxdb.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
from influxdb_client.client.write_api import SYNCHRONOUS
from influxdb_client import InfluxDBClient, WriteOptions
from influxdb_client import InfluxDBClient
from dotenv import load_dotenv
import os

Expand All @@ -12,8 +12,6 @@
INFLUXDB_URL = os.getenv("INFLUXDB_URL")
INFLUXDB_TOKEN = os.getenv("INFLUXDB_TOKEN")

client = InfluxDBClient(
url=INFLUXDB_URL,
org=INFLUXDB_ORG,
token=INFLUXDB_TOKEN
)

def influx_client():
return InfluxDBClient(url=INFLUXDB_URL, org=INFLUXDB_ORG, token=INFLUXDB_TOKEN)
12 changes: 7 additions & 5 deletions eews_backend/database/mongodb.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,14 @@

DEFAULT_MONGO_DATABASE = "db"
MONGO_URL = os.getenv("MONGO_URL")
MONGO_DATABASE = os.getenv("MONGO_DATABASE") if os.getenv("MONGO_DATABASE") else DEFAULT_MONGO_DATABASE
MONGO_DATABASE = (
os.getenv("MONGO_DATABASE")
if os.getenv("MONGO_DATABASE")
else DEFAULT_MONGO_DATABASE
)

client = motor_asyncio.AsyncIOMotorClient(MONGO_URL)
db = client[MONGO_DATABASE]

def new_client():

def mongo_client():
mongo_url = MONGO_URL
mongo_db = MONGO_DATABASE
client = motor_asyncio.AsyncIOMotorClient(mongo_url)
Expand Down
85 changes: 85 additions & 0 deletions eews_backend/generator/main.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
from ast import List
import requests
from obspy import read, Stream, UTCDateTime, Trace
import time
import os
import shutil
import threading
import multiprocessing

MSEED_FOLDER = "eews_backend/mseed/"
SOURCE_MSEED = "eews_backend/generator/20150920_151412.mseed"
REST_URL = "http://127.0.0.1:8000"
MSEED_RANGE_IN_SECONDS = 10

global process_list
process_list = []


def main():
split_mseed()
global process_list
print("Start sending file to", REST_URL)
for station in os.listdir(MSEED_FOLDER):
send_process = multiprocessing.Process(target=send, args=(station,))
send_process.name = station
process_list.append(send_process)
for process in process_list:
process.start()


def send(station: str):
folder = f"{MSEED_FOLDER}/{station}/"
BHE: List[str] = os.listdir(f"{folder}BHE/")
BHN: List[str] = os.listdir(f"{folder}BHN/")
BHZ: List[str] = os.listdir(f"{folder}BHZ/")

for index in range(len(BHE)):
bhe_mseed: bytes = open(f"{MSEED_FOLDER}/{station}/BHE/{BHE[index]}", "rb")
bhn_mseed: bytes = open(f"{MSEED_FOLDER}/{station}/BHN/{BHN[index]}", "rb")
bhz_mseed: bytes = open(f"{MSEED_FOLDER}/{station}/BHZ/{BHZ[index]}", "rb")

threading.Thread(
target=post, args=(f"{REST_URL}/mseed", {"file": bhe_mseed})
).start()
threading.Thread(
target=post, args=(f"{REST_URL}/mseed", {"file": bhn_mseed})
).start()
threading.Thread(
target=post, args=(f"{REST_URL}/mseed", {"file": bhz_mseed})
).start()

time.sleep(10)


def post(url, files):
requests.post(url, files=files)


def split_mseed():
print("Mseed will be saved to folder", MSEED_FOLDER)
print("Splitting mseed")
st: Stream = read(SOURCE_MSEED)
first_starttime = min([trace.stats["starttime"] for trace in st])
dt = UTCDateTime(first_starttime)
last_endtime = max([trace.stats["endtime"] for trace in st])
trace: Trace
shutil.rmtree(MSEED_FOLDER)
while dt <= last_endtime:
trimmed = st.slice(dt, dt + MSEED_RANGE_IN_SECONDS)
for trace in trimmed:
stats = trace.stats
filename = f"{MSEED_FOLDER}{stats['station']}/{stats['channel']}/{dt.strftime('%Y%m%d')}_{stats['starttime'].strftime('%H%M%S')}.mseed"
os.makedirs(os.path.dirname(filename), exist_ok=True)
trace.write(filename=filename, format="MSEED")
dt += MSEED_RANGE_IN_SECONDS
print("Finished splitting mseed")


if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
for process in process_list:
process.terminate()
process.join()
File renamed without changes.
72 changes: 0 additions & 72 deletions eews_backend/requirements2.txt

This file was deleted.

11 changes: 11 additions & 0 deletions eews_backend/rest/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
FROM python:3.10-slim

ENV APP_HOME /app
WORKDIR $APP_HOME
COPY . ./

ENV PORT 8000
EXPOSE $PORT
RUN pip install --no-cache-dir -r requirements.txt

CMD exec uvicorn rest.main:app --host 0.0.0.0 --port ${PORT} --workers 8
Loading