Come camera ho ripreso la Maygion (gia' vista qui, un modello piuttosto vecchiotto da 0.3MPx) e l'oggetto da tracciare era la scatola a sinistra
Tramite OpenCV viene catturato lo stream video della camera IP, l'immagine viene elaborato alla ricerca del colore verde calcolando il centroide, a questo punto si calcola la posizione del centroide rispetto al centro dell'immagine e si pilota la camera per posizionare il centroide piu' vicino possibile al centro dell'immagine.
Questo e' il video della prova
---------------------------------------------
import numpy as np
import cv2
vcap = cv2.VideoCapture("http://192.168.43.207:81/videostream.cgi?stream=0&usr=admin&pwd=admin")
if vcap.isOpened():
rval,frame = vcap.read()
print "acquisito"
else:
rval = False
print "non acquisito"
while rval:
cv2.imshow("preview",frame)
rval,frame = vcap.read()
key = cv2.waitKey(20)
if key == 27:
break
cv2.destroyWindow("preview")
---------------------------------------------
Questo e' invece il codice per pilotare il movimento della camera (in pratica si mandano comandi via stringa http)
---------------------------------------------
import time
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
ur = "http://192.168.43.207:81/"
direzione = "up"
html=urlopen(ur+"moveptz.xml?dir="+direzione+"&user=admin&password=admin")
time.sleep(0.1)
html=urlopen(ur+"moveptz.xml?dir=stop&user=admin&password=admin")
---------------------------------------------
Per tracciare il movimento della sagoma verde ho ripreso, modificandolo leggermente, questo esempio di Conan Zhao and Simon D. Levy
---------------------------------------------
import cv2
import numpy as np
import time
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
ur = "http://192.168.43.207:81/"
# For OpenCV2 image display
WINDOW_NAME = 'GreenBallTracker'
def track(image):
# Blur the image to reduce noise
blur = cv2.GaussianBlur(image, (5,5),0)
# Convert BGR to HSV
hsv = cv2.cvtColor(blur, cv2.COLOR_BGR2HSV)
# Threshold the HSV image for only green colors
lower_green = np.array([40,70,70])
upper_green = np.array([80,200,200])
# Threshold the HSV image to get only green colors
mask = cv2.inRange(hsv, lower_green, upper_green)
# Blur the mask
bmask = cv2.GaussianBlur(mask, (5,5),0)
# Take the moments to get the centroid
moments = cv2.moments(bmask)
m00 = moments['m00']
centroid_x, centroid_y = None, None
if m00 != 0:
centroid_x = int(moments['m10']/m00)
centroid_y = int(moments['m01']/m00)
# Assume no centroid
ctr = (-1,-1)
# Use centroid if it exists
if centroid_x != None and centroid_y != None:
ctr = (centroid_x, centroid_y)
# Put black circle in at centroid in image
cv2.circle(image, ctr, 4, (255,0,0))
x_schermo = centroid_x-320
y_schermo = centroid_x-240
print "X="+str(x_schermo) + " ; Y=" + str(y_schermo)
tempo = 0.02
if (x_schermo > 0):
html=urlopen(ur+"moveptz.xml?dir=left&user=admin&password=admin")
time.sleep(tempo)
html=urlopen(ur+"moveptz.xml?dir=stop&user=admin&password=admin")
else:
html=urlopen(ur+"moveptz.xml?dir=right&user=admin&password=admin")
time.sleep(tempo)
html=urlopen(ur+"moveptz.xml?dir=stop&user=admin&password=admin")
# Display full-color image
cv2.imshow(WINDOW_NAME, image)
# Force image display, setting centroid to None on ESC key input
if cv2.waitKey(1) & 0xFF == 27:
ctr = None
# Return coordinates of centroid
return ctr
# Test with input from camera
if __name__ == '__main__':
capture = cv2.VideoCapture(ur+"videostream.cgi?stream=0&usr=admin&pwd=admin")
while True:
okay, image = capture.read()
if okay:
if not track(image):
break
if cv2.waitKey(1) & 0xFF == 27:
break
else:
print('Capture failed')
break
---------------------------------------------
Nessun commento:
Posta un commento