Skip to content

Commit

Permalink
Update main.py
Browse files Browse the repository at this point in the history
  • Loading branch information
vshie authored Oct 4, 2023
1 parent 9a563df commit 3fd0b49
Showing 1 changed file with 104 additions and 58 deletions.
162 changes: 104 additions & 58 deletions app/main.py
Original file line number Diff line number Diff line change
@@ -1,60 +1,106 @@
#!/usr/bin/env python3

from flask import Flask, render_template, send_file
import requests
import logging.handlers
from pathlib import Path
from litestar import Litestar, get, MediaType
from litestar.controller import Controller
from litestar.datastructures import State
from litestar.logging import LoggingConfig
from litestar.static_files.config import StaticFilesConfig

class CountController(Controller):
COUNT_VAR = 'quickstart_backend_perm_count'
def __init__(self, *args, **kwargs):
self._temp_count = 0
super().__init__(*args, **kwargs)

@get("/temp_count", sync_to_thread=False)
def increment_temp_count(self) -> dict[str, int]:
self._temp_count += 1
return {"value": self._temp_count}

@get("/persistent_count", sync_to_thread=True)
def increment_persistent_count(self, state: State) -> dict[str, int]:
# read the existing persistent count value (from the BlueOS "Bag of Holding" service API)
import csv
import time
from datetime import datetime

app = Flask(__name__)

# Global variable to control the logging thread
logging_active = False
distance_url = 'http://localhost:6040/mavlink/vehicles/1/components/194/messages/DISTANCE_SENSOR'
gps_url = 'http://localhost:6040/mavlink/vehicles/1/components/1/messages/GLOBAL_POSITION_INT'
log_file = 'sensor_data.csv'
log_rate = 2

# Define the feedback interval (in seconds)
feedback_interval = 5

# Initialize a counter for the number of rows added
row_counter = 0
@app.route('/')
def home():
return render_template('static/index.html')

@app.route('/start')
def start_logging():
global logging_active
if not logging_active:
logging_active = True
main() # Start the logging script directly
return 'Started'

@app.route('/stop')
def stop_logging():
global logging_active
logging_active = False
return 'Stopped'

@app.route('/download')
def download_file():
return send_file('sensor_data.csv', as_attachment=True)

def main():
# Initialize a counter for the number of rows added
row_counter = 0

# Main loop for logging data
while (logging_active == True):
try:
response = requests.get(f'{state.bag_url}/get/{self.COUNT_VAR}')
response.raise_for_status()
value = response.json()['value']
except Exception as e: # TODO: specifically except HTTP error 400 (using response.status_code?)
value = 0
value += 1
# write the incremented value back out
output = {'value': value}
requests.post(f'{state.bag_url}/set/{self.COUNT_VAR}', json=output)
return output

logging_config = LoggingConfig(
loggers={
__name__: dict(
level='INFO',
handlers=['queue_listener'],
)
},
)

log_dir = Path('/root/.config/logs')
log_dir.mkdir(parents=True, exist_ok=True)
fh = logging.handlers.RotatingFileHandler(log_dir / 'lumber.log', maxBytes=2**16, backupCount=1)

app = Litestar(
route_handlers=[CountController],
state=State({'bag_url':'http://host.docker.internal/bag/v1.0'}),
static_files_config=[
StaticFilesConfig(directories=['app/static'], path='/', html_mode=True)
],
logging_config=logging_config,
)

app.logger.addHandler(fh)
# Send GET requests to the REST APIs
distance_response = requests.get(distance_url)
gps_response = requests.get(gps_url)

# Check if the requests were successful
if distance_response.status_code == 200 and gps_response.status_code == 200:
# Extract the data from the responses
distance_data = distance_response.json()['message']
gps_data = gps_response.json()['message']

# Define the column labels for the log file
column_labels = ['Unix Timestamp', 'Year', 'Month', 'Day', 'Hour', 'Minute', 'Second', 'Distance (cm)', 'Latitude', 'Longitude']

# Extract the values for each column
timestamp = int(time.time() * 1000) # Convert current time to milliseconds
dt = datetime.fromtimestamp(timestamp / 1000) # Convert timestamp to datetime object
unix_timestamp = timestamp
year, month, day, hour, minute, second = dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second

distance = distance_data['current_distance']
latitude = gps_data['lat'] / 1e7
longitude = gps_data['lon'] / 1e7

column_values = [unix_timestamp, year, month, day, hour, minute, second, distance, latitude, longitude]

# Create or append to the log file and write the data
with open(log_file, 'a', newline='') as csvfile:
writer = csv.writer(csvfile)

# Write the column labels as the header row (only for the first write)
if csvfile.tell() == 0:
writer.writerow(column_labels)

# Write the data as a new row
writer.writerow(column_values)

# Increment the row counter
row_counter += 1

else:
# Print an error message if any of the requests were unsuccessful
print(f"Error: Distance - {distance_response.status_code} - {distance_response.reason}")
print(f"Error: GPS - {gps_response.status_code} - {gps_response.reason}")

# Provide feedback every 5 seconds
if row_counter % (log_rate * feedback_interval) == 0:
print(f"Rows added to CSV: {row_counter}")

# Wait for the specified log rate
time.sleep(1 / log_rate)

except Exception as e:
print(f"An error occurred: {e}")
break

if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)

0 comments on commit 3fd0b49

Please sign in to comment.