forked from goodrobots/vision_landing
-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathvision_landing
executable file
·1428 lines (1206 loc) · 56.3 KB
/
vision_landing
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
vision_landing
https://github.com/kripper/vision-landing-2
This python script connects to arducopter using dronekit, to control precision landing.
The control is purely visual using pose estimation from fiducial markers (eg. aruco or april tags), no additional rangefinder is needed.
It launches a separate program track_targets which does the actual cv work and captures the resulting vectors from non-blocking thread handler.
TODO:
- Controller:
- Detect velocity using last measurements after eg. 2 [s] (to remove the transient)
- Measure total time for different durations (eg. 1 [s] and 2 [s]) to calculate the transient at start and end of motion.
- We asume the transient are the same of different durations
- This way, we can model the total time and velocity-output requried for a given distance and time
- Stop descending if marker is getting our of sight (drone should center before continue descending)
"""
from threading import Thread, Event
from Queue import LifoQueue, Queue, Empty
from subprocess import Popen, PIPE
from dronekit import connect, VehicleMode, LocationGlobal, LocationGlobalRelative
from pymavlink import mavutil
from time import sleep, time
from datetime import datetime, timedelta
from sys import exit, stdout, stderr
from os import path, makedirs, symlink, remove
from math import pi
from re import sub, search
from collections import deque
from math import sqrt
import signal
import logging
import ctypes
import math
import atexit
import numpy as np
import sys
useSmartLanding = True
if useSmartLanding:
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
sys.path.append(dir_path + '/../SmartLanding/')
#from smart_landing.drone_controller import DroneController, Target
from smart_landing.drone_controller import DroneController, Target, Simulator
# TODO: Tune
cameraDelay = 5
actionDelay = 2
drone = DroneController(False, cameraDelay, actionDelay)
if False:
sim = Simulator(cameraDelay, False)
sim.interactive = True
sim.renderAbsolute = True
drone.simulator = sim
else:
sim = None
### ================================
### Define Functions
### ================================
def printNoLF(str):
# TODO: Python 3: print(str, end="")
sys.stdout.write(str)
sys.stdout.flush()
# Convert forward,right -> north,east
# r is clockwise in radians with 0 pointing north
# Use negative r to convert north,east -> forward,right
def rotate(forward,right, r):
c = math.cos(r)
s = math.sin(r)
return forward * c - right * s, forward * s + right * c
# Get shortest angular direction between two angles
def getRotationDirection(alpha, beta):
phi = abs(beta - alpha) % 360 # This is either the distance or 360 - distance
distance = 360 - phi if phi > 180 else phi
return distance if (alpha - beta >= 0 and alpha - beta <= 180) or (alpha - beta <= -180 and alpha - beta >= -360) else distance * -1
# Return correctly typed config parser option
def get_config_value(parser, section, option):
"""Parses config value as python type"""
try:
return parser.getint(section, option)
except ValueError:
pass
try:
return parser.getfloat(section, option)
except ValueError:
pass
try:
return parser.getboolean(section, option)
except ValueError:
pass
return parser.get(section, option)
# Setup monotonic clock
CLOCK_MONOTONIC_RAW = 4
class timespec(ctypes.Structure):
_fields_ = [
('tv_sec', ctypes.c_long),
('tv_nsec', ctypes.c_long)
]
librt = ctypes.CDLL('librt.so.1', use_errno=True)
clock_gettime = librt.clock_gettime
clock_gettime.argtypes = [ctypes.c_int, ctypes.POINTER(timespec)]
def monotonic_time():
t = timespec()
if clock_gettime(CLOCK_MONOTONIC_RAW , ctypes.pointer(t)) != 0:
errno_ = ctypes.get_errno()
raise OSError(errno_, os.strerror(errno_))
return (t.tv_sec * 1e+9) + t.tv_nsec
### ================================
### End Define Functions
### ================================
### ================================
### Define Classes
### ================================
class Stats:
def __init__(self):
self.yaw = 0
def reset(self):
self.lastX = None
self.lastTime = None
self.stillCounter = 0
self.waitUntilAtRest = False
self.lastTargetVisible = None
self.printTelemetry = False
def compute(self, nowTime, targetVisible, north, east):
if self.lastTargetVisible is not None and targetVisible != self.lastTargetVisible:
if targetVisible:
print("Target visible")
else:
print("Target not visible")
self.lastTargetVisible = targetVisible
if not targetVisible:
self.reset()
return
self.x = np.array([east, north])
if self.lastX is not None:
self.dx = self.x - self.lastX # Distance between samples
self.sec = (nowTime - self.lastTime) / 1e+9 # Time between samples
self.v = np.linalg.norm(self.dx) / self.sec # Velocity between samples
if self.printTelemetry:
print("\tv: " + str(self.v))
self.yawRate = (self.yaw - self.lastYaw) / self.sec
else:
self.dx = np.array([0,0])
self.lastX = self.x
self.lastTime = nowTime
self.lastYaw = self.yaw
# Threaded Reader class connects the output from the track_targets process to a non-blocking reader, and adds any messages to a queue.
# The queue is then read by the main loop and if anything is waiting it calls a method to process from the track_targets object.
class ThreadedReader:
def __init__(self, stdout, stderr):
self.stdout = stdout
self.stderr = stderr
self.queue = LifoQueue()
self._stop = Event()
def insertQueue(pipe, queue, qtype="stdout"):
while not self.stopped():
self.clear() # Clear the queue before adding anything - we only want the latest data
line = pipe.readline().rstrip() + ":{:.0f}".format(monotonic_time())
if args.verbose:
log.debug("Adding line: {} : {}".format(qtype, line))
if line and qtype == "stdout":
queue.put(line)
elif line and qtype == "stderr":
queue.put("error:" + line)
self.threadout = Thread(target = insertQueue, args = (self.stdout, self.queue))
self.threadout.daemon = True
self.threadout.start()
self.threaderr = Thread(target = insertQueue, args = (self.stderr, self.queue, "stderr"))
self.threaderr.daemon = True
self.threaderr.start()
def size(self):
return self.queue.qsize()
def readline(self, timeout = None):
try:
line = self.queue.get(block = timeout is not None, timeout = timeout)
self.queue.task_done()
return line
except Empty:
log.debug("empty queue")
return None
def clear(self):
#with self.queue.mutex:
# self.queue.clear()
# TODO: BUG: Important line ("initcomp") is added and immediately removed without being processed, because of multiple OpenCV/Gstreamer initialization messages.
#while self.size() > 10:
while self.size() > 100:
# self.readline()
tmpline = self.queue.get(False)
log.debug("clearing queue: {} : {}".format(self.size(), tmpline))
self.queue.task_done()
return
def stop(self):
self._stop.set()
def stopped(self):
return self._stop.isSet()
# TrackTime class is used to synchronise time between the companion computer and the flight controller.
# This is necessary to ensure that the vision frames are matched as closely as possible with the inertial frames.
# Note: This requires a Craft object to be passed, in order to perform the actual sync.
class TrackTime:
def __init__(self, craft):
self.fctime_nanos = 0
self.mytime_nanos = 0
self.local_tracker = {}
self.delta = None
self.difference = None
self.delta_buffer = deque(maxlen=50) # Circular buffer to track timesync deltas
self.difference_buffer = deque(maxlen=50) # Circular buffer to track difference between two systems
self.debug = False
self.vehicle = craft.vehicle
# Setup time struct to hold local monotonic clock queries
self.t = timespec()
# Setup listener decorator
@self.vehicle.on_message("TIMESYNC")
def listener_timesync(subself, name, message):
try:
if message.ts1 < self.mytime_nanos:
#log.info("Newer timesync message already received, ignoring")
return False
self.mytime_nanos = message.ts1 # Set the time that the message was sent
_cts = self.current_timestamp()
_avg_ts = (self.local_tracker[message.ts1] + _cts)/2
self._delta = _cts - _avg_ts # Calculuate the time delta
self.delta_buffer.append(self._delta) # Push the delta into the circular buffer
# If the averaged delta has been reached, use that, and update the delta from the latest buffer
if self.delta:
self.delta = sum(list(self.delta_buffer)) / len(list(self.delta_buffer))
self.fctime_nanos = message.tc1 - self.delta
# Otherwise use the spot delta from the incoming message
else:
self.fctime_nanos = message.tc1 - self._delta
self.difference_buffer.append(self.mytime_nanos - self.fctime_nanos)
self.difference = sum(list(self.difference_buffer)) / len(list(self.difference_buffer))
if self.debug:
log.debug("Timesync message received:"+", ".join([str(message), str(self._delta), str(self.fctime_nanos)]))
except:
if self.debug:
log.debug("Error, unexpected timesync timestamp received:"+str(message.ts1))
# Blocking method to call once during startup, to calculate delta
def initial_sync(self):
# Keep sending timesync requests until the delta buffer is full, or a timeout is reached
start_timesync = self.current_timestamp()
while len(list(self.delta_buffer)) < 50 and self.current_timestamp() - start_timesync < (15 * 1000000000):
self.update()
sleep(0.1) # Ardupilot can't cope with too many timesync messages being sent in a short time period, the results are nonsense
# When full, average the delta buffer and then set that to use as the ongoing delta
if len(self.delta_buffer) == 50:
self.delta = sum(list(self.delta_buffer)) / len(list(self.delta_buffer))
else:
log.warning("Error creating timesync delta buffer - unpredictable results will follow")
# Method to return monotonic clock time
def current_timestamp(self):
return monotonic_time()
# Request a timesync update from the flight controller
def update(self, ts=0, tc=0):
if ts == 0:
ts = self.current_timestamp()
self.local_tracker[ts] = self.current_timestamp() # Create entry to correlate sending timestamp with actual localtime
msg = self.vehicle.message_factory.timesync_encode(
tc, # tc1
ts # ts1
)
self.vehicle.send_mavlink(msg)
self.vehicle.flush()
# Note: This returns the 'actual' time of the FC with the delay compensated from the last sync event
def actual(self):
return self.fctime_nanos
# Return actual + time elapsed since actual set
def estimate(self):
estmynanos = self.current_timestamp() - self.difference
estfcnanos = self.actual() + (self.current_timestamp() - self.mytime_nanos)
#log.debug("Estimate time: current-difference:"+str(estmynanos/1000000)+", fctime_nanos+elapsed:"+str(estfcnanos/1000000)+", difference:"+str((estmynanos - estfcnanos) / 1000000))
return estmynanos
def toFcTime(self, inTime):
return inTime - self.difference
# TrackTargets class launches, controls and handles the separate track_targets process that does the actual vision processing.
class TrackTargets:
def __init__(self, args, calibration):
self.state = None
self.shutdown = False
# Setup timing containers
self.frame_times = []
self.start_t = time()
# Work out starting parameters
self.setup(args, calibration)
# Launch tracking process and process output
log.info("Launching track_targets with arguments:" + " ".join(self.track_arguments))
self.launch()
def setup(self, args, calibration):
# Define the basic arguments to track_targets
self.track_arguments = [cwd + '/track_targets']
# If marker dictionary is set, pass it through
if args.markerdict:
self.track_arguments.extend(['-d', args.markerdict])
# If output is set, pass it through
if args.output:
now = datetime.now()
nowtime = now.strftime("%Y-%m-%d-%H-%M-%S")
args_output = sub("-xxx", "-"+str(nowtime), args.output)
self.track_arguments.extend(['-o', args_output])
# If marker id is set, pass it through
if args.markerid:
self.track_arguments.extend(['-i', args.markerid])
# If width is set, pass it through
if args.width:
self.track_arguments.extend(['-w', str(args.width)])
# If height is set, pass it through
if args.height:
self.track_arguments.extend(['-g', str(args.height)])
# If fps is set, pass it through
if args.fps:
self.track_arguments.extend(['-f', str(args.fps)])
# If verbose is set, pass it through
if args.verbose:
self.track_arguments.extend(['-v'])
# If brightness is set, pass it through
if args.brightness:
self.track_arguments.extend(['-b', str(args.brightness)])
# If fourcc codec is set, pass it through
if args.fourcc:
self.track_arguments.extend(['-c', args.fourcc])
# If sizemapping is set, pass it through
if args.sizemapping:
self.track_arguments.extend(['-z', args.sizemapping])
# If markerhistory is set, pass it through
if args.markerhistory:
self.track_arguments.extend(['--markerhistory', str(args.markerhistory)])
# If markerthreshold is set, pass it through
if args.markerthreshold:
self.track_arguments.extend(['--markerthreshold', str(args.markerthreshold)])
# Add positional arguments
self.track_arguments.extend([args.input, calibration, str(args.markersize)])
def launch(self):
global cwd
self.process = Popen(self.track_arguments, stdin = PIPE, stdout = PIPE, stderr = PIPE, shell = False, bufsize=1, cwd = cwd)
self.reader = ThreadedReader(self.process.stdout, self.process.stderr)
self.state = "initialising"
def processline(self, line):
#print("Processing line: " + line)
# Unpack data from track_targets data
trackdata = line.rstrip().split(":")
# If not target data, log and skip to next data
if trackdata[0] != "target":
datatype = trackdata[0]
if trackdata[0] == "error":
log.error("track_targets: " + str(trackdata[1:]))
elif trackdata[0] == "debug" and args.verbose:
log.debug("track_targets: " + datatype[0].upper() + datatype[1:] + str(trackdata[1:]))
elif trackdata[0] == "info":
log.info("track_targets: " + str(trackdata[1:]))
if trackdata[1] == "initcomp":
self.state = "initialised"
return
# Unpack and cast target data to correct types
try:
[id,x,y,z,processtime,sensorTimestamp,sendTimestamp,relMarkerOrientation,receiveTimestamp,imgYaw] = trackdata[1:]
[id,x,y,z,processtime,sensorTimestamp,sendTimestamp,relMarkerOrientation,receiveTimestamp,imgYaw] = [int(id),float(x),float(y),float(z),float(processtime),float(sensorTimestamp),float(sendTimestamp),float(relMarkerOrientation),float(imgYaw),float(receiveTimestamp)]
except:
log.debug("track_targets: error in data: " + str(line.rstrip()))
return
# If fake rangefinder distance is required, send it
if args.fakerangefinder:
craft.send_distance_message(int(z*100))
# Send the landing_target message to arducopter
# NOTE: This sends the current estimated fctime, NOT the time the frame was actually captured.
# So this does not allow for the CV processing time. This should be sending when the frame is actually captured, before processing.
nowTime = monotonic_time()
# If data is less than 250ms since the image shuttr, send the message.
if nowTime - sensorTimestamp < 250 * 1e+6:
if useSmartLanding:
# --- IX ---
# PROBLEM: The marker coords are in forward-right coordinate frame (relative to the drone's yaw at the time of the image).
# If the drone rotates, the direction to the marker also rotates and is not valid anymore.
# SOLUTION: We will express directions in a drone's yaw invariant coordinate frame (local NED).
# We don't use a global coordinate frame, because the drone won't always know its global location and orientation.
# Local NED is not global, but relative to the initial takeoff coordinates.
if imgYaw == 1000:
# track_targets is not providing the yaw with the image => use current yaw
yawRad = craft.vehicle.attitude.yaw
craft.stats.yaw = math.degrees(yawRad)
else:
yawRad = math.radians(imgYaw)
craft.stats.yaw = imgYaw
# relMarkerOrientation: orientation of the marker (0° = top side of camera = "drone forward"; -90° = top side of marker is on the right side of the screen = "drone right")
# eg. if relMarkerOrientation = 0 and imgYaw 90° (drone is heading east) => markerOrientation is -90° (west).
forward,right = -y,x
north,east = rotate(forward,right,yawRad)
craft.processState(nowTime, True, sensorTimestamp, north, east, relMarkerOrientation, z)
if sim is not None:
sim.obsDroneX = sim.obsDroneX + craft.outV
else:
craft.send_land_message(x, y, z, tracktime.toFcTime(sensorTimestamp) / 1000, id) # Send sensor timestamp adjusted to timesync, in micros
else:
print("Target timed out since: " + str((nowTime - sensorTimestamp) / 1e+9))
# Track frame timing to calculate fps, running average over last 10 frames
end_t = time()
time_taken = end_t - self.start_t
self.start_t = end_t
self.frame_times.append(time_taken)
self.frame_times = self.frame_times[-10:]
fps = len(self.frame_times) / sum(self.frame_times)
log.info("Fctime:{:.0f}:{:.0f}, Fps:{:.2f}, Alt:{}, Rng:{}, Marker:{}, Distance:{}, xOffset:{}, yOffset:{}, processTime:{:.0f}, sensorTime:{:.0f}, sendTime:{:.0f}, receiveTime:{:.0f}, currentTime:{:.0f}, sensorDiff:{}, sendDiff:{}, receiveDiff:{}".format(tracktime.actual(), tracktime.estimate(), fps, craft.vehicle.location.global_relative_frame.alt, craft.vehicle.rangefinder.distance, id, z, x, y, processtime, sensorTimestamp, sendTimestamp, receiveTimestamp, nowTime, nowTime - sensorTimestamp, nowTime - sendTimestamp, nowTime - receiveTimestamp))
def start(self):
if self.state == "initialised":
log.info("Requesting track_targets to start tracking:"+str(craft.vehicle.mode))
self.process.send_signal(signal.SIGUSR1)
self.state = "started"
else:
log.info("Request to start track_targets tracking failed, state is not 'initialised'")
def stop(self):
log.info("Requesting track_targets to stop tracking:"+str(craft.vehicle.mode))
self.state = "initialised"
self.process.send_signal(signal.SIGUSR2)
def end(self):
log.info("Shutting down track_targets")
# Stop threaded reader
self.reader.stop()
# track_targets should always be stopped by now, but be sure
self.process.poll()
if self.process.returncode == None:
self.process.terminate()
self.shutdown = True
def waitImage(self):
printNoLF("Waiting for video streamer...")
# Clear the vision tracking queue, we don't want to blast mavlink messages for everything stacked up
while True:
if self.process.poll() is not None:
print("\nERROR: track_targets is not running")
exit(1)
line = self.reader.readline()
if not line:
sleep(0.1)
elif not search("^target:", line):
if search("^error:", line):
log.error("track_targets: "+sub("^error:","",line))
# Make sure to catch and process initcomp if it's in this initial queue
elif search("initcomp", line):
self.processline(line)
print("ok")
return
else:
log.info("track_targets: "+line)
# The Craft class connects to the flight controller and sends controlling messages
class Craft:
def __init__(self, connectstr):
self.debug = False
self.mode = None
self.connected = None
self.vehicle = None
self.connect(connectstr)
self.precloiter_opt = self.find_precloiter_opt()
if self.precloiter_opt:
log.info("Precision loiter switch found: Channel "+str(self.precloiter_opt))
def startActions(self):
"""
In self.actions we define actions and states that will be exectued.
We can use self.nextState() to jump to next state.
The format is:
<action 1> : [
<state 1>,
<state 2>,
...
],
<action 2> : [
<state 1>,
<state 2>,
...
]
"""
self.actions = {
"land" : [
"start"
],
"measure" : [
"yaw-rate-start",
"yaw-rate-run",
"yaw-rate-start",
"yaw-rate-run",
"yaw-rate-start",
"yaw-rate-run",
"yaw-rate-start",
"yaw-rate-run",
"yaw-rate-start",
"yaw-rate-run",
"yaw-rate-start",
"yaw-rate-run",
"yaw-rate-start",
"yaw-rate-run",
]
}
"""
Here we are leaving the disabled actions:
"measure" : [
"stability-start",
"stability",
"visual-lag-start",
"visual-lag",
"velocities"
]
"""
self.visualLag = None # Meastured visual lag
self.count = 0
np.set_printoptions(suppress=True)
self.actionIterator = iter(self.actions)
self._nextAction()
# Init vars
self.stats.reset()
def processState(self, nowTime, targetVisible, sensorTimestamp = None, north = None, east = None, markerOrientation = None, z = None):
if not craft.connected:
return
self.lastDetection = nowTime
self.stats.compute(nowTime, targetVisible, north, east)
if self.action == "measure":
if targetVisible:
self.measure(nowTime, north, east, markerOrientation)
elif self.action == "land":
self.land(nowTime, targetVisible, sensorTimestamp, north, east, markerOrientation, z)
def waitAtRest(self):
print("Waiting until at rest...")
self.waitUntilAtRest = True
self.stillCounter = 0
def waitTransient(self):
print("Waiting until transient is over")
# TODO: Not implemented.
def _nextAction(self):
self.action = next(self.actionIterator, None)
if self.action is None:
print("Ready")
exit(0)
else:
print "Action: " + self.action
self.stateIterator = iter(self.actions[self.action])
self.nextState()
def nextState(self):
self.state = next(self.stateIterator, None)
if self.state is None:
self._nextAction()
else:
print "\n--- State: " + self.state
# --- Actions ---
# Here we implement a sequence of tests to measure the drone dynamics.
def measure(self, nowTime, north, east, markerOrientation):
stats = self.stats
if self.waitUntilAtRest:
if v < 0.05:
self.stillCounter += 1
if self.stillCounter >= 10:
print("Drone is at rest")
self.waitUntilAtRest = False
else:
self.stillCounter = 0
elif self.state == "visual-lag-start":
# Perform a sudden movement that can be easily detected to measure the visual latency.
# TODO: Detect yaw rate?
# TODO: Measture accumulated distances instead of velocities.
print("\nMeasuring visual lag...")
self.motionDirection = x / np.linalg.norm(x)
self.startX = x
self.testStartTime = nowTime
self.testEndTime = nowTime + 5 * 1e+9
self.maxVel = 0
self.maxVelTime = None
self.state = "visual-lag"
# Sudden movement easy to detect
destVel = self.motionDirection * 0.2
print("\nKicking")
self.move(destVel[1], destVel[0], 0, 0)
elif self.state == "visual-lag":
projVel = -np.dot(stats.dx, self.motionDirection) # Velocity in projected direction
if projVel > self.maxVel:
self.maxVel = projVel
self.maxVelTime = nowTime
if nowTime >= self.testEndTime:
if self.maxVelTime is None:
print("ERROR: Test failed => repeat")
self.state = ""
else:
self.visualLag = self.maxVelTime - self.testStartTime
drone.setCameraDelay(self.visualLag / 1e+9)
dist = np.linalg.norm(x - self.startX)
print("Visual lag: " + str(self.visualLag / 1e+9) + ", dist: " + str(dist))
self.count += 1
if self.count >= 10:
self.nextState()
else:
# Repeat
self.state = "visual-lag-start"
elif self.state == "velocities":
self.count = 0
self.state = "velocity-start"
elif self.state == "velocity-start":
self.vScalar = self.count * 0.05
print("\nVelocity test: " + str(self.vScalar) + " [m/s]")
if self.vScalar > 0.5:
print("DANGER: Velocity to high?")
exit(1)
self.destVel = x / np.linalg.norm(x) * self.vScalar
self.startX = x
self.testStartTime = nowTime
self.state = "velocity-rest"
self.waitAtRest()
print("TODO: Remove transient")
elif self.state == "velocity-rest":
self.state = "velocity-run"
self.testEndTime = nowTime + 2 * 1e+9
# TODO: Do waitTransient()
elif self.state == "velocity-run":
self.move(self.destVel[1], self.destVel[0], 0, 0)
if nowTime >= self.testEndTime:
dist = np.linalg.norm(x - self.startX)
sec = (nowTime - self.testStartTime) / 1e+9
avgVel = dist / sec
print("Avg. velocity: " + str(avgVel))
if self.vScalar:
self.velFactor = v / self.vScalar
print("Velocity factor: ", self.velFactor)
if self.count >= 4:
self.nextState()
else:
self.count += 1
self.state = "velocity-start"
elif self.state == "yaw-rate-start":
self.startYaw = math.degrees(self.vehicle.attitude.yaw)
self.testStartTime = nowTime
self.testEndTime = nowTime + 2 * 1e+9
self.nextState()
elif self.state == "yaw-rate-run":
currentYaw = math.degrees(self.vehicle.attitude.yaw)
deltaYaw = currentYaw - self.startYaw
# print("deltaYaw: " + str(deltaYaw) + ";" + str(markerOrientation))
self.move(0, 0, 0, 5)
print("yaw: " + str(currentYaw) + ";" + str(markerOrientation))
if nowTime >= self.testEndTime:
self.nextState()
elif self.state == "stability-start":
print("Measuring stability...")
self.positions = []
self.testStartTime = nowTime
self.testEndTime = nowTime + 5 * 1e+9
self.nextState()
elif self.state == "stability":
self.positions.append(x)
if nowTime >= self.testEndTime:
arr = np.array(self.positions)
center = np.mean(self.positions, axis=0)
#std_dev = np.std(self.positions)
minX = np.amin(arr[:, 0])
maxX = np.amax(arr[:, 0])
minY = np.amin(arr[:, 1])
maxY = np.amax(arr[:, 1])
self.stability = {
'errX': maxX - minX,
'errY': maxY - minY
}
print(self.stability)
self.nextState()
def land(self, nowTime, targetVisible, sensorTimestamp, north, east, markerOrientation, z):
MAX_SQR_DIST = 0.2 **2
LOCKED_ALT_CENTER = 1 # Will only continue descending if we are centered
LOCKED_ALT_STAY = 0.7 # Will stay here
testFactor = 2 # TODO: TEMP:
LOCKED_SQR_DIST = 0.2 **2
LOCKED_MAX_VEL = 0.1 * testFactor
LOCKED_MAX_YAW_RATE = 0.05 * testFactor
# TODO: alt and z differe too much! Calibrate
alt = self.vehicle.location.global_relative_frame.alt
if not self.gimbalIsDown:
print("Moving gimbal down...")
self.setServo(9, -90)
self.gimbalIsDown = True
if not targetVisible:
craft.goBack()
return
if self.state == "start":
self.state = "descend"
self.lockdownCounter = 0
self.setServo(9, -90)
elif self.state == "descend":
nortVel = 0
eastVel = 0
downVel = 0
yawRate = 0
currentYaw = math.degrees(self.vehicle.attitude.yaw)
if False: # TODO: Use SmartLanding
dv = drone.process(nowTime / 1e+9, sensorTimestamp / 1e+9, np.array([east, north]))
self.outV = self.outV + dv
nortVel = self.outV[1]
eastVel = self.outV[0]
#print("dv: " + str(dv) + ", v: " + str(self.outV))
else:
# TODO: CHECK: Should be equivalent, but has some error
# TODO: Use getRotationDirection() ?
#dstYaw = currentYaw - 2 * pi + (-markerOrientation)
dstYaw = currentYaw - markerOrientation
yawRate = (dstYaw - currentYaw) * 0.5
factor = 0.15
nortVel = north * factor
eastVel = east * factor
# Down motion
dist = east **2 + north **2
# TODO: Also use z?
# TODO: Smooth with pid or other function
if dist < MAX_SQR_DIST:
if alt < LOCKED_ALT_CENTER:
downVel = 0 # Stay here unless dist < LOCKED_SQR_DIST
elif alt < 2:
downVel = 0.15
else:
downVel = alt / 5
# Final landing
locked = False
if dist < LOCKED_SQR_DIST:
if alt < LOCKED_ALT_STAY:
downVel = 0
if self.stats.v < LOCKED_MAX_VEL and self.stats.yawRate < LOCKED_MAX_YAW_RATE:
locked = True
self.lockdownCounter += 1
if self.lockdownCounter >= 20:
print("Drone stabilized. Proceed with final landing.")
self.send_nav_land_message()
self.gimbalIsDown = False # If landing is interrupted, the gimbal needs to rotate down again
sleep(10) # HACK: Avoid goBack() while landing
if self.vehicle.armed:
print("ERROR: Landing failed (drone is still armed)..retrying")
else:
global doExit
doExit = True
print("Vehicle disarmed")
if not locked and self.lockdownCounter > 0:
print("Drone moved: v: " + str(self.stats.v) + ", yawRate: " + str(self.stats.yawRate) + "\nLockdown counter reset")
elif alt < LOCKED_ALT_CENTER:
downVel = 0.05
else:
print("Drone not centered. distance: " + str(dist))
if not locked:
self.lockdownCounter = 0
if nortVel != 0 or eastVel != 0 or downVel != 0 or yawRate != 0:
print("alt: " + str(alt) + ", z: " + str(z) + ", D: " + str(downVel) + ", E: " + str(east) + ", N: " + str(north))
self.move(nortVel, eastVel, -downVel, yawRate)
def connect(self, connectstr):
try:
# self.vehicle = connect(connectstr, wait_ready=True, rate=1)
# self.vehicle = connect(connectstr, wait_ready=True)
# self.vehicle = connect(connectstr, wait_ready=['system_status','mode'], baud=921600)
self.vehicle = connect(connectstr, wait_ready=['system_status','mode'])
self.connected = True
except Exception,e:
log.critical("Error connecting to vehicle:"+str(repr(e)))
self.connected = False
# Define function to send distance_message mavlink message for mavlink based rangefinder, must be >10hz
# http://mavlink.org/messages/common#DISTANCE_SENSOR
def send_distance_message(self, dist):
msg = self.vehicle.message_factory.distance_sensor_encode(
0, # time since system boot, not used
1, # min distance cm
10000, # max distance cm
dist, # current distance, must be int
0, # type = laser?
0, # onboard id, not used
mavutil.mavlink.MAV_SENSOR_ROTATION_PITCH_270, # must be set to MAV_SENSOR_ROTATION_PITCH_270 for mavlink rangefinder, represents downward facing
0 # covariance, not used
)
self.vehicle.send_mavlink(msg)
self.vehicle.flush()
if args.verbose:
log.debug("Sending mavlink distance_message:" +str(dist))
# Return to initial altitude
# TODO: Before that, try to return to last position where we saw the marker. If that fails, return to initial altitude and position.
def goBack(self):
alt = self.vehicle.location.global_relative_frame.alt
if alt > self.maxLandingAltitude:
print("Too high, going down")
upVel = -0.8
self.move(0, 0, upVel, 0)
elif self.initialAltitude < self.maxLandingAltitude and alt < self.initialAltitude:
print("Lost marker, going up")
upVel = 0.2
self.move(0, 0, upVel, 0)
else:
# TODO: Loiter arround to find marker
print("Lost marker")
def setServo(self, servo, val):
msg = self.vehicle.message_factory.command_long_encode(
0, # time_boot_ms (not used)
0, # target system, target component
mavutil.mavlink.MAV_CMD_DO_SET_SERVO,
0,
servo, # RC channel...
1500+(val*5.5), # RC value
0, 0, 0, 0, 0)
self.vehicle.send_mavlink(msg)
self.vehicle.flush()
def move(self, vx, vy, vz, yawRate, time_usec=0):
if args.verbose:
log.debug("Sending mavlink set_position_target_local_ned - time_usec:{:.0f}, dx:{}, dy:{}".format(time_usec, str(vx), str(vy)))
msg = self.vehicle.message_factory.set_position_target_local_ned_encode(
0, # TODO: Crashes with time_usec
0, # SysId
0, # CompId
mavutil.mavlink.MAV_FRAME_LOCAL_NED, # frame
0b111, # mask (ignore x,y,z)
0, # x
0, # y
0, # z
vx, # vx
vy, # vy
vz, # vz
0, # afx
0, # afy
0, # afz
0, # yaw
yawRate, # yaw rate
)
self.vehicle.send_mavlink(msg)
self.vehicle.flush()
def send_nav_land_message(self):
msg = self.vehicle.message_factory.command_long_encode(
1, # system_id
1, # component_id,
mavutil.mavlink.MAV_CMD_NAV_LAND, # command ID
0, # confirmation
0, 0, 0, 0, # parameters 1-4 (not used)
0, # parameter 5: target altitude
0, 0 # parameters 6-8 (not used)
)
"""
param1=10, # descend time
param2=landing_target_msg.x,
param3=landing_target_msg.y,
param4=0, # desired yaw angle
param5=1, # target ID