diff --git a/.gitignore b/.gitignore
index a955754..687d221 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,4 +14,6 @@ packages
.vs/
*.csproj.user
*.suo
-*.pdb
\ No newline at end of file
+*.pdb
+
+*.o
diff --git a/BaseStation/EntryPoint/EntryPoint.csproj b/BaseStation/EntryPoint/EntryPoint.csproj
index 74c4fc3..59dfb0f 100644
--- a/BaseStation/EntryPoint/EntryPoint.csproj
+++ b/BaseStation/EntryPoint/EntryPoint.csproj
@@ -89,6 +89,9 @@
..\packages\HuskyRobotics.Scarlet.0.5.1\lib\BBBCSIO.dll
+
+ ..\packages\SharpDX.DirectInput.4.2.0\lib\net45\SharpDX.DirectInput.dll
+
..\packages\HuskyRobotics.Scarlet.0.5.1\lib\System.Half.dll
@@ -117,12 +120,15 @@
-
+
+
-
-
-
-
-
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/BaseStation/EntryPoint/packages.config b/BaseStation/EntryPoint/packages.config
index ffadfc3..e8f578a 100644
--- a/BaseStation/EntryPoint/packages.config
+++ b/BaseStation/EntryPoint/packages.config
@@ -2,6 +2,7 @@
+
\ No newline at end of file
diff --git a/BaseStation/UserInterface/Elements/MainWindow.xaml.cs b/BaseStation/UserInterface/Elements/MainWindow.xaml.cs
index 9aca23f..e983007 100644
--- a/BaseStation/UserInterface/Elements/MainWindow.xaml.cs
+++ b/BaseStation/UserInterface/Elements/MainWindow.xaml.cs
@@ -131,14 +131,14 @@ private void LaunchStream(object sender, RoutedEventArgs e)
private void ThreadStartingPoint(int Port, string Name, int BufferingMs)
{
VideoWindow tempWindow;
- if (Name != "192.168.0.42" && Name != "127.0.0.1")
- {
- tempWindow = new RTPVideoWindow(Port, Name, Settings.RecordingPath, BufferingMs);
- }
- else
- {
- tempWindow = new RTSPVideoWindow(Port, Name, Settings.RecordingPath, BufferingMs);
- }
+ //if (Name != "192.168.0.42" && Name != "127.0.0.1")
+ //{
+ // tempWindow = new RTPVideoWindow(Port, Name, Settings.RecordingPath, BufferingMs);
+ //}
+ //else
+ //{
+ tempWindow = new RTSPVideoWindow(Port, Name, Settings.RecordingPath, BufferingMs);
+ //}
VideoWindows.Add(tempWindow);
diff --git a/BaseStation/UserInterface/VideoStreamer/RTSPVideoWindow.xaml.cs b/BaseStation/UserInterface/VideoStreamer/RTSPVideoWindow.xaml.cs
index 7d8518c..b9ac900 100644
--- a/BaseStation/UserInterface/VideoStreamer/RTSPVideoWindow.xaml.cs
+++ b/BaseStation/UserInterface/VideoStreamer/RTSPVideoWindow.xaml.cs
@@ -3,19 +3,15 @@
using System;
using System.Collections.Generic;
using System.ComponentModel;
+using System.Diagnostics;
using System.Linq;
+using System.Net.NetworkInformation;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using System.Windows;
-using System.Windows.Controls;
-using System.Windows.Data;
-using System.Windows.Documents;
-using System.Windows.Input;
using System.Windows.Interop;
-using System.Windows.Media;
-using System.Windows.Media.Imaging;
-using System.Windows.Shapes;
+using System.Windows.Threading;
namespace HuskyRobotics.UI.VideoStreamer
{
@@ -26,12 +22,13 @@ namespace HuskyRobotics.UI.VideoStreamer
// gst-launch-1.0 rtspsrc -v location = rtsp://192.168.0.42 user-id=admin user-pw=1234 latency=0 ! rtpjpegdepay ! jpegdec ! d3dvideosink
public partial class RTSPVideoWindow : Window, VideoWindow
{
- private Pipeline Pipeline;
+ public PipelineElements runningPipeline;
+ public PipelineElements pendingPipeline;
- private Bin RTSP = (Bin) ElementFactory.Make("rtspsrc");
- private Element Depay = ElementFactory.Make("rtpjpegdepay");
- private Element Dec = ElementFactory.Make("jpegdec");
- private Element VideoSink = ElementFactory.Make("d3dvideosink");
+ NetworkInterface networkInterface;
+
+ double bytesRecvSpeed = 0;
+ long lastBytesRecv;
private int Port;
private int BufferSizeMs;
@@ -48,6 +45,15 @@ public string StreamName
}
}
+ public struct PipelineElements
+ {
+ public Pipeline pipeline;
+ public VideoOverlayAdapter overlay;
+ public Bin RTSP;
+ public Element Depay;
+ public Element VideoSink;
+ }
+
public RTSPVideoWindow(int Port, string StreamName, string RecordingPath, int BufferSizeMs = 200)
{
DataContext = this;
@@ -56,38 +62,64 @@ public RTSPVideoWindow(int Port, string StreamName, string RecordingPath, int Bu
this.BufferSizeMs = BufferSizeMs;
this.RecordingPath = RecordingPath;
- Pipeline = new Pipeline();
+ foreach (NetworkInterface currentNetworkInterface in NetworkInterface.GetAllNetworkInterfaces())
+ {
+ Console.WriteLine(currentNetworkInterface.Description);
+ if (currentNetworkInterface.Description == "Realtek USB GbE Family Controller") {
+ networkInterface = currentNetworkInterface;
+ }
+ }
+
+ IPv4InterfaceStatistics interfaceStatistic = networkInterface.GetIPv4Statistics();
+ lastBytesRecv = interfaceStatistic.BytesReceived;
+ DispatcherTimer dispatcherTimer = new DispatcherTimer();
+ dispatcherTimer.Tick += new EventHandler(GetNetworkInBackground);
+ dispatcherTimer.Interval = new TimeSpan(0, 0, 1);
+ dispatcherTimer.Start();
+
+ PipelineElements pipeline = CreatePipeline();
+ // Clean up running pipeline
+ runningPipeline = pipeline;
+ pendingPipeline = new PipelineElements();
+
+ Closing += OnCloseEvent;
+ InitializeComponent();
+ }
+
+ private PipelineElements CreatePipeline()
+ {
+ PipelineElements pipe = new PipelineElements();
+ pendingPipeline = pipe;
+ pipe.pipeline = new Pipeline();
+ pipe.RTSP = (Bin)ElementFactory.Make("rtspsrc");
+ pipe.Depay = ElementFactory.Make("rtpvrawdepay");
+ pipe.VideoSink = ElementFactory.Make("d3dvideosink");
Window window = GetWindow(this);
WindowInteropHelper wih = new WindowInteropHelper(this);
wih.EnsureHandle(); // Generate Window Handle if current HWND is NULL (always occurs because background window)
- VideoOverlayAdapter overlay = new VideoOverlayAdapter(VideoSink.Handle);
- overlay.WindowHandle = wih.Handle;
-
- Pipeline["message-forward"] = true;
- RTSP["location"] = "rtsp://" + StreamName;
- RTSP["user-id"] = "admin";
- RTSP["user-pw"] = "1234";
- RTSP["latency"] = BufferSizeMs;
-
+ pipe.overlay = new VideoOverlayAdapter(pipe.VideoSink.Handle);
+ pipe.overlay.WindowHandle = wih.Handle;
- Pipeline.Add(RTSP, Depay, Dec, VideoSink);
- RTSP.Link(Depay);
- RTSP.PadAdded += RTSPPadAdded;
- Depay.Link(Dec);
- Dec.Link(VideoSink);
+ pipe.pipeline["message-forward"] = true;
+ pipe.RTSP["location"] = "rtsp://" + StreamName;
+ pipe.RTSP["latency"] = BufferSizeMs;
+ pipe.pipeline.Add(pipe.RTSP, pipe.Depay, pipe.VideoSink);
+ pipe.RTSP.PadAdded += RTSPPadAdded;
+ pipe.RTSP.Link(pipe.Depay);
+ pipe.Depay.Link(pipe.VideoSink);
- Pipeline.SetState(State.Null);
+ pipe.pipeline.SetState(State.Null);
- Closing += OnCloseEvent;
- InitializeComponent();
+ return pipe;
}
+ // Called after pipeline state is set to playing
private void RTSPPadAdded(object o, PadAddedArgs args)
{
- Pad Sink = Depay.GetStaticPad("sink");
+ Pad Sink = runningPipeline.Depay.GetStaticPad("sink");
args.NewPad.Link(Sink);
}
@@ -96,7 +128,7 @@ private void RTSPPadAdded(object o, PadAddedArgs args)
///
public void StartStream()
{
- StateChangeReturn s = Pipeline.SetState(State.Playing);
+ StateChangeReturn s = runningPipeline.pipeline.SetState(State.Playing);
}
private string GetRecordingFilename()
@@ -110,12 +142,21 @@ private void OnCloseEvent(object sender, CancelEventArgs e)
// We have to wait for EOS to propogate through the pipeline
// Unclear how dependent delay is on machine's speed or other process usage
Console.WriteLine("Shutting down video");
- Pipeline.SendEvent(Event.NewEos());
+ runningPipeline.pipeline.SendEvent(Event.NewEos());
Thread.Sleep(1000);
// Cleanup the unmanaged class objects
- Pipeline.SetState(State.Null);
- Pipeline.Unref();
+ runningPipeline.pipeline.SetState(State.Null);
+ runningPipeline.pipeline.Unref();
+ }
+
+ private void GetNetworkInBackground(object sender, EventArgs e)
+ {
+ IPv4InterfaceStatistics interfaceStatistic = networkInterface.GetIPv4Statistics();
+ long bytesRecv = interfaceStatistic.BytesReceived;
+ bytesRecvSpeed = (bytesRecv - lastBytesRecv) * 8 / Math.Pow(1024, 2);
+ Console.WriteLine("Receiving data at: " + string.Format("{0:0.00}", bytesRecvSpeed) + " Mbps" + " over " + networkInterface.Description);
+ lastBytesRecv = bytesRecv;
}
}
}
diff --git a/Rover/GStreamer/.gitignore b/Rover/GStreamer/.gitignore
new file mode 100644
index 0000000..62535aa
--- /dev/null
+++ b/Rover/GStreamer/.gitignore
@@ -0,0 +1,3 @@
+device-scanner
+server
+server-wrapper
diff --git a/Rover/GStreamer/Makefile b/Rover/GStreamer/Makefile
new file mode 100644
index 0000000..a166b01
--- /dev/null
+++ b/Rover/GStreamer/Makefile
@@ -0,0 +1,30 @@
+CPP= g++
+CFLAGS= -g -Wall -Wpedantic -std=c++14
+GSFLAGS= `pkg-config --cflags --libs gstreamer-1.0` -lgstrtspserver-1.0
+HEADERS=
+DEV= device-scanner
+SERVER= server-wrapper
+
+all: $(DEV) $(SERVER)
+ rm *.o
+
+debug: $(DEV)_debug $(SERVER)_debug
+ rm *.o
+
+server: $(HEADERS) server.cpp
+ $(CPP) -O3 -c server.cpp $(GSFLAGS) $(CFLAGS)
+
+server-wrapper: $(HEADERS) $(SERVER).cpp server
+ $(CPP) -O3 $(SERVER).cpp -o $(SERVER) server.o $(GSFLAGS) $(CFLAGS)
+
+device-scanner: $(HEADERS) $(DEV).cpp
+ $(CPP) -O3 $(DEV).cpp -o $(DEV) $(GSFLAGS) $(CFLAGS)
+
+server-wrapper_debug: $(HEADERS) $(SERVER).cpp server
+ $(CPP) -ggdb -D DEBUG $(SERVER).cpp -o $(SERVER) $(GSFLAGS) $(CFLAGS)
+
+device-scanner_debug: $(HEADERS) $(DEV).cpp
+ $(CPP) -ggdb -D DEBUG $(DEV).cpp -o $(DEV) $(GSFLAGS) $(CFLAGS)
+
+clean:
+ rm $(DEV) $(SERVER)
diff --git a/Rover/GStreamer/README.md b/Rover/GStreamer/README.md
new file mode 100644
index 0000000..1b5fa80
--- /dev/null
+++ b/Rover/GStreamer/README.md
@@ -0,0 +1,2 @@
+Expects gstreamer and appropriate plugins to be installed based on pipeline.
+Expects gst-rtsp-server to be installed.
diff --git a/Rover/GStreamer/device-scanner.cpp b/Rover/GStreamer/device-scanner.cpp
new file mode 100644
index 0000000..2a21ec6
--- /dev/null
+++ b/Rover/GStreamer/device-scanner.cpp
@@ -0,0 +1,90 @@
+#include
+#include
+#include
+#include
+#include
+#include
+
+#ifdef DEBUG
+void structure_fields(const GstStructure *device)
+{
+ gint n_fields = gst_structure_n_fields(device);
+ for (int i = 0; i < n_fields; i++)
+ {
+ const gchar *name;
+ name = gst_structure_nth_field_name(device, i);
+ g_print("%s:\n%s\n", name, gst_structure_get_string(device, name));
+ }
+}
+#endif
+
+GstDeviceMonitor *device_monitor(void)
+{
+ GstDeviceMonitor *monitor;
+ GstBus *bus;
+ GstCaps *caps;
+
+ // starts the monitor for the devices
+ monitor = gst_device_monitor_new ();
+
+ // starts the bus for the monitor
+ bus = gst_device_monitor_get_bus (monitor);
+ gst_object_unref (bus);
+
+ // adds a filter to scan for only video devices
+ caps = gst_caps_new_empty_simple ("video/x-raw");
+ gst_device_monitor_add_filter (monitor, "Video/Source", caps);
+ gst_caps_unref (caps);
+
+ gst_device_monitor_start (monitor);
+
+ return monitor;
+}
+
+int main(int argc, char *argv[])
+{
+ GstDeviceMonitor *monitor;
+ GList *dev;
+ GMainLoop *loop;
+ gst_init(&argc, &argv);
+
+ // create the monitor
+ monitor = device_monitor();
+ dev = gst_device_monitor_get_devices(monitor);
+
+ // loop for the lists
+ GList *cur = g_list_first(dev);
+ while(cur != NULL)
+ {
+ GstDevice * devi = (GstDevice *) cur->data;
+ GstStructure * type = gst_device_get_properties(devi);
+
+#ifdef DEBUG
+ structure_fields(type);
+#endif
+ const char * name = gst_device_get_display_name(devi);
+ const char * path = gst_structure_get_string(type, "device.path");
+ g_print("Device-Scanner > Starting process for %s camera at %s\n", name, path);
+ int mypid = fork();
+ if(mypid == 0)
+ {
+ int ret;
+ if (strcmp(name, "ZED") == 0)
+ {
+ ret = execl("../zed_depth/zed_depth", argv[0]);
+ }
+ else
+ {
+ ret = execl("./server-wrapper", argv[0], name, path, NULL);
+ }
+ return ret; // Shouldn't reach this line
+ }
+ cur = g_list_next(cur);
+ }
+
+ loop = g_main_loop_new(NULL, FALSE);
+ g_main_loop_run(loop);
+ g_main_loop_unref(loop);
+
+ return 0;
+}
diff --git a/Rover/GStreamer/server-wrapper.cpp b/Rover/GStreamer/server-wrapper.cpp
new file mode 100644
index 0000000..8ce32fc
--- /dev/null
+++ b/Rover/GStreamer/server-wrapper.cpp
@@ -0,0 +1,7 @@
+#include "server.h"
+
+int
+main (int argc, char *argv[])
+{
+ return start_server(argc, argv);
+}
diff --git a/Rover/GStreamer/server.cpp b/Rover/GStreamer/server.cpp
new file mode 100644
index 0000000..89e8ef8
--- /dev/null
+++ b/Rover/GStreamer/server.cpp
@@ -0,0 +1,142 @@
+#include
+#include
+#include
+#include
+#include
+#include
+
+#include "server.h"
+
+using namespace std;
+
+unordered_map*, const char*>> table;
+
+char *
+construct_pipeline(const char * devpath, const char * input_type, int width, int height, float scale)
+{
+ char * output = (char *) malloc(512);
+
+ int scaled_width = width * scale;
+ int scaled_height = height * scale;
+
+ if (strcmp(input_type, "I420") != 0)
+ {
+ snprintf(output, 512, "intervideosrc ! video/x-raw, format=%s, width=%d, height=%d ! videoscale ! videoconvert ! video/x-raw, format=I420, width=%d, height=%d ! rtpvrawpay name=pay0 pt=96", input_type, width, height, scaled_width, scaled_height);
+ }
+ else
+ {
+ snprintf(output, 512, "intervideosrc ! video/x-raw, format=I420, width=%d, height=%d ! videoscale ! video/x-raw, format=I420, width=%d, height=%d ! rtpvrawpay name=pay0 pt=96", width, height, scaled_width, scaled_height);
+ }
+
+
+ return output;
+}
+
+void
+setup_map()
+{
+ vector * zedlist = new vector;
+ zedlist->push_back(0.25f);
+ zedlist->push_back(0.1f);
+ vector * usb2list = new vector;
+ usb2list->push_back(0.25f);
+ usb2list->push_back(0.1f);
+ table["ZED"] = make_tuple("5556", 4416, 1242, zedlist, "YUY2");
+ table["USB 2.0 Camera"] = make_tuple("5557", 1920, 1080, usb2list, "YUY2");
+}
+
+int
+start_server (int argc, char *argv[])
+{
+ GMainLoop *loop;
+ GstRTSPServer *server;
+ const char * devname;
+ const char * devpath;
+ const char * port;
+ vector pipelines;
+ setup_map();
+ gst_init(&argc, &argv);
+
+ if (argc == 3) // Want to run a predefined camera which is at defined path
+ {
+ devname = argv[1];
+ devpath = argv[2];
+ if (table.count(devname) == 0)
+ {
+ g_print("%s@%s > No pipeline found; provide pipeline argument\n", devname, devpath);
+ return -1;
+ }
+ else
+ {
+ string input = "v4l2src device=";
+ input += devpath;
+ input += " ! videorate ! video/x-raw, framerate=10/1 ! intervideosink";
+ GstElement * inputpipe = gst_parse_launch(input.c_str(), NULL);
+ int ret = gst_element_set_state(inputpipe, GST_STATE_PLAYING);
+
+ if (ret == GST_STATE_CHANGE_FAILURE)
+ {
+ exit(-1);
+ }
+ else
+ {
+ g_print("%s@%s > Opened camera successfully\n", devname, devpath);
+ }
+
+ tuple*, const char*> item = table.at(devname);
+ for (int i = 0; i < (int) get<3>(item)->size(); i++)
+ {
+ pipelines.push_back(construct_pipeline(devpath, get<4>(item), get<1>(item), get<2>(item), (*(get<3>(item)))[i]));
+#ifdef DEBUG
+ g_print("%s@%s > %s\n", devname, devpath, pipelines[i]);
+#endif
+ }
+
+ port = get<0>(item);
+ }
+ }
+ else if (argc == 5) // Likely from command line where everything is defined as an argument
+ {
+ devname = argv[1];
+ devpath = argv[2];
+ port = argv[3];
+ pipelines.push_back(argv[4]);
+ }
+ else
+ {
+ g_print("Usage ./serve \n");
+ return -1;
+ }
+
+ loop = g_main_loop_new (NULL, FALSE);
+
+ /* create a server instance */
+ server = gst_rtsp_server_new ();
+ g_object_set (server, "service", port, NULL);
+
+ /* get the mount points for this server, every server has a default object
+ * that be used to map uri mount points to media factories */
+ GstRTSPMountPoints * mounts = gst_rtsp_server_get_mount_points (server);
+ char attachment[32];
+
+ for (unsigned short i = 0; i < (int) pipelines.size(); i++)
+ {
+ GstRTSPMediaFactory * factory = gst_rtsp_media_factory_new ();
+ gst_rtsp_media_factory_set_launch (factory, pipelines[i]);
+ gst_rtsp_media_factory_set_shared (factory, TRUE);
+ snprintf(attachment, 10, "/feed%hu", i);
+ gst_rtsp_mount_points_add_factory (mounts, attachment, factory);
+ g_print ("%s@%s: stream ready at rtsp://127.0.0.1:%s/feed%d\n", devname, devpath, port, i);
+ }
+
+ /* don't need the ref to the mapper anymore */
+ g_object_unref (mounts);
+
+ /* attach the server to the default maincontext */
+ gst_rtsp_server_attach (server, NULL);
+
+ /* start serving */
+ g_main_loop_run (loop);
+ g_main_loop_unref(loop);
+ return 0;
+}
diff --git a/Rover/GStreamer/server.h b/Rover/GStreamer/server.h
new file mode 100644
index 0000000..29f9366
--- /dev/null
+++ b/Rover/GStreamer/server.h
@@ -0,0 +1,14 @@
+#ifndef SERVER_H_ /* Include guard */
+#define SERVER_H_
+
+#include
+
+struct g_server_data
+{
+ int argc;
+ const char *argv[8];
+};
+
+int start_server (int argc, char *argv[]);
+
+#endif // SERVER_H_
diff --git a/Rover/JetsonGStreamerServer/build b/Rover/JetsonGStreamerServer/build
deleted file mode 100755
index 0d6075a..0000000
--- a/Rover/JetsonGStreamerServer/build
+++ /dev/null
@@ -1 +0,0 @@
-g++ gserver.cpp -ggdb -std=c++14 -o gserver -lopencv_core -lopencv_video -lopencv_videoio -lopencv_imgproc -lopencv_highgui -pthread -I/usr/include/gstreamer-1.0 -I/usr/lib/aarch64-linux-gnu/gstreamer-1.0/include -I/usr/include/glib-2.0 -I/usr/lib/aarch64-linux-gnu/glib-2.0/include -lgstreamer-1.0 -lgobject-2.0 -lglib-2.0
diff --git a/Rover/Makefile b/Rover/Map/Makefile
similarity index 100%
rename from Rover/Makefile
rename to Rover/Map/Makefile
diff --git a/Rover/Map.cpp b/Rover/Map/Map.cpp
similarity index 100%
rename from Rover/Map.cpp
rename to Rover/Map/Map.cpp
diff --git a/Rover/Map.h b/Rover/Map/Map.h
similarity index 100%
rename from Rover/Map.h
rename to Rover/Map/Map.h
diff --git a/Rover/TestMap.cpp b/Rover/Map/TestMap.cpp
similarity index 100%
rename from Rover/TestMap.cpp
rename to Rover/Map/TestMap.cpp
diff --git a/Rover/README.md b/Rover/README.md
index 7490f6c..5cf8eb9 100644
--- a/Rover/README.md
+++ b/Rover/README.md
@@ -1 +1,3 @@
-# Default
+# Running GStreamer
+Run the device-scanner binary in the GStreamer folder.
+This binary will also start the zed-depth program.
diff --git a/Rover/JetsonGStreamerServer/Makefile b/Rover/RicohServer/Makefile
similarity index 100%
rename from Rover/JetsonGStreamerServer/Makefile
rename to Rover/RicohServer/Makefile
diff --git a/Rover/JetsonGStreamerServer/gserver.cpp b/Rover/RicohServer/gserver.cpp
similarity index 100%
rename from Rover/JetsonGStreamerServer/gserver.cpp
rename to Rover/RicohServer/gserver.cpp
diff --git a/Rover/JetsonGStreamerServer/gserver.h b/Rover/RicohServer/gserver.h
similarity index 100%
rename from Rover/JetsonGStreamerServer/gserver.h
rename to Rover/RicohServer/gserver.h
diff --git a/Rover/JetsonGStreamerServer/remap_x.txt b/Rover/RicohServer/remap_x.txt
similarity index 100%
rename from Rover/JetsonGStreamerServer/remap_x.txt
rename to Rover/RicohServer/remap_x.txt
diff --git a/Rover/JetsonGStreamerServer/remap_y.txt b/Rover/RicohServer/remap_y.txt
similarity index 100%
rename from Rover/JetsonGStreamerServer/remap_y.txt
rename to Rover/RicohServer/remap_y.txt
diff --git a/Rover/JetsonGStreamerServer/remapping.java b/Rover/RicohServer/remapping.java
similarity index 100%
rename from Rover/JetsonGStreamerServer/remapping.java
rename to Rover/RicohServer/remapping.java
diff --git a/Rover/JetsonGStreamerServer/ricoh.cpp b/Rover/RicohServer/ricoh.cpp
similarity index 100%
rename from Rover/JetsonGStreamerServer/ricoh.cpp
rename to Rover/RicohServer/ricoh.cpp
diff --git a/Rover/JetsonGStreamerServer/ricoh.h b/Rover/RicohServer/ricoh.h
similarity index 100%
rename from Rover/JetsonGStreamerServer/ricoh.h
rename to Rover/RicohServer/ricoh.h
diff --git a/Rover/ZedDepth/.gitignore b/Rover/ZedDepth/.gitignore
new file mode 100644
index 0000000..778879d
--- /dev/null
+++ b/Rover/ZedDepth/.gitignore
@@ -0,0 +1 @@
+zed_depth
diff --git a/Rover/ZedDepth/Makefile b/Rover/ZedDepth/Makefile
new file mode 100644
index 0000000..9dfe8f4
--- /dev/null
+++ b/Rover/ZedDepth/Makefile
@@ -0,0 +1,26 @@
+CPP= g++
+CFLAGS= -g -std=c++14
+GSFLAGS= `pkg-config --cflags --libs gstreamer-1.0` -lgstrtspserver-1.0
+CVFLAGS= `pkg-config --cflags --libs opencv4`
+INCLUDES= -I/usr/local/zed/include -I/usr/local/cuda/include -I../GStreamer
+LIBS= /usr/local/zed/lib/*.so -march=armv8-a+simd
+HEADERS=
+DEPTH= zed-depth
+
+all: $(DEPTH)
+ rm *.o
+
+debug: $(DEPTH)_debug
+ rm *.o
+
+server: $(HEADERS) ../GStreamer/server.cpp
+ $(CPP) -O3 -c ../GStreamer/server.cpp $(GSFLAGS) $(CFLAGS)
+
+zed-depth: $(HEADERS) $(DEPTH).cpp server
+ $(CPP) -O3 $(DEPTH).cpp -o $(DEPTH) server.o $(GSFLAGS) $(CVFLAGS) $(CFLAGS) $(INCLUDES) $(LIBS)
+
+zed-depth_debug: $(HEADERS) $(DEPTH).cpp server
+ $(CPP) -ggdb -D DEBUG $(DEPTH).cpp -o $(DEPTH) server.o $(GSFLAGS) $(CVFLAGS) $(CFLAGS) $(INCLUDES) $(LIBS)
+
+clean:
+ rm $(DEPTH)
diff --git a/Rover/ZedDepth/README.md b/Rover/ZedDepth/README.md
new file mode 100644
index 0000000..85f7bd1
--- /dev/null
+++ b/Rover/ZedDepth/README.md
@@ -0,0 +1 @@
+# DO NOT run zed-depth binary directly except for testing. Run device-scanner binary in /Rover/GStreamer/ to operate other cameras over the network
diff --git a/Rover/ZedDepth/build b/Rover/ZedDepth/build
new file mode 100755
index 0000000..8f58e6d
--- /dev/null
+++ b/Rover/ZedDepth/build
@@ -0,0 +1 @@
+make
diff --git a/Rover/ZedDepth/ground.png b/Rover/ZedDepth/ground.png
new file mode 100644
index 0000000..0c34efb
Binary files /dev/null and b/Rover/ZedDepth/ground.png differ
diff --git a/Rover/ZedDepth/run b/Rover/ZedDepth/run
new file mode 100755
index 0000000..4666cdb
--- /dev/null
+++ b/Rover/ZedDepth/run
@@ -0,0 +1 @@
+./zed-depth
diff --git a/Rover/ZedDepth/save-depth.cpp b/Rover/ZedDepth/save-depth.cpp
new file mode 100644
index 0000000..4601ebf
--- /dev/null
+++ b/Rover/ZedDepth/save-depth.cpp
@@ -0,0 +1,98 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+cv::Mat slMat2cvMat(const sl::Mat &input)
+{
+ // Mapping between MAT_TYPE and CV_TYPE
+ int cv_type = -1;
+ switch (input.getDataType())
+ {
+ case sl::MAT_TYPE_32F_C1: cv_type = CV_32FC1; break;
+ case sl::MAT_TYPE_32F_C2: cv_type = CV_32FC2; break;
+ case sl::MAT_TYPE_32F_C3: cv_type = CV_32FC3; break;
+ case sl::MAT_TYPE_32F_C4: cv_type = CV_32FC4; break;
+ case sl::MAT_TYPE_8U_C1: cv_type = CV_8UC1; break;
+ case sl::MAT_TYPE_8U_C2: cv_type = CV_8UC2; break;
+ case sl::MAT_TYPE_8U_C3: cv_type = CV_8UC3; break;
+ case sl::MAT_TYPE_8U_C4: cv_type = CV_8UC4; break;
+ default: break;
+ }
+
+ // Since cv::Mat data requires a uchar* pointer, we get the uchar1 pointer from sl::Mat (getPtr())
+ // cv::Mat and sl::Mat will share a single memory structure
+ return cv::Mat(input.getHeight(), input.getWidth(), cv_type, input.getPtr(sl::MEM_CPU));
+}
+
+int main(void)
+{
+ sl::InitParameters init_params;
+ init_params.camera_resolution = sl::RESOLUTION_HD1080;
+ init_params.depth_mode = sl::DEPTH_MODE_QUALITY;
+ init_params.coordinate_units = sl::UNIT_METER;
+
+ sl::Camera zed;
+ if(zed.open(init_params) != sl::SUCCESS)
+ {
+ std::cout << "Failed to open camera.\n";
+ return(1);
+ }
+
+ sl::RuntimeParameters runtime_params;
+ runtime_params.sensing_mode = sl::SENSING_MODE_STANDARD;
+ runtime_params.enable_point_cloud = false;
+ sl::Resolution image_size = zed.getResolution();
+ int32_t new_width = image_size.width / 2;
+ int32_t new_height = image_size.height / 2;
+
+ sl::Mat img_zed(new_width, new_height, sl::MAT_TYPE_8U_C4);
+ cv::Mat image_cv = slMat2cvMat(img_zed);
+
+ sl::Mat depth_img_zed(new_width, new_height, sl::MAT_TYPE_8U_C4);
+ cv::Mat depth_img_cv = slMat2cvMat(depth_img_zed);
+
+ sl::Mat sl_depth_f32;
+ cv::Mat cv_depth_f32;
+
+ constexpr int num_pics = 10;
+
+ if(zed.grab(runtime_params) == sl::SUCCESS)
+ {
+ zed.retrieveImage(img_zed, sl::VIEW_LEFT, sl::MEM_CPU, new_width, new_height);
+ zed.retrieveImage(depth_img_zed, sl::VIEW_DEPTH, sl::MEM_CPU, new_width, new_height);
+ zed.retrieveMeasure(sl_depth_f32, sl::MEASURE_DEPTH);
+ cv::Mat tmp = slMat2cvMat(sl_depth_f32);
+ cv_depth_f32 = tmp;
+ std::cout << "Frame 1" << std::endl;
+ }
+ else
+ {
+ std::cout << "Failed to grab frame" << std::endl;
+ }
+ for(int i = 0; i < num_pics - 1; i++)
+ {
+ if(zed.grab(runtime_params) == sl::SUCCESS)
+ {
+ zed.retrieveImage(img_zed, sl::VIEW_LEFT, sl::MEM_CPU, new_width, new_height);
+ zed.retrieveImage(depth_img_zed, sl::VIEW_DEPTH, sl::MEM_CPU, new_width, new_height);
+ zed.retrieveMeasure(sl_depth_f32, sl::MEASURE_DEPTH);
+ cv::Mat this_cv_depth_f32 = slMat2cvMat(sl_depth_f32);
+ cv_depth_f32 += this_cv_depth_f32;
+ std::cout << "Frame " << (i + 1) << std::endl;
+ }
+ else
+ {
+ std::cout << "Failed to grab frame.\n";
+ }
+ cv::waitKey(100);
+ }
+ cv_depth_f32 /= num_pics;
+ imwrite("ground.png", cv_depth_f32);
+ zed.close();
+ return(0);
+}
diff --git a/Rover/ZedDepth/zed-depth b/Rover/ZedDepth/zed-depth
new file mode 100755
index 0000000..348bfb8
Binary files /dev/null and b/Rover/ZedDepth/zed-depth differ
diff --git a/Rover/ZedDepth/zed-depth.cpp b/Rover/ZedDepth/zed-depth.cpp
new file mode 100644
index 0000000..7c3300a
--- /dev/null
+++ b/Rover/ZedDepth/zed-depth.cpp
@@ -0,0 +1,300 @@
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+#include "server.h"
+#include
+
+cv::Mat slMat2cvMat(const sl::Mat &input);
+sl::Camera zed;
+cv::VideoWriter writer;
+cv::VideoWriter writer_debug;
+int32_t new_width;
+int32_t new_height;
+
+void
+getSomeImages()
+{
+ sl::Mat img_zed(new_width, new_height, sl::MAT_TYPE_8U_C4);
+
+ for(;;)
+ {
+ zed.retrieveImage(img_zed, sl::VIEW_LEFT, sl::MEM_CPU, new_width, new_height);
+ cv::Mat three_channel_bgr;
+ cv::Mat img_cv = slMat2cvMat(img_zed);
+ cv::cvtColor(img_cv, three_channel_bgr, cv::COLOR_BGRA2BGR);
+ writer.write(three_channel_bgr);
+ std::this_thread::sleep_for(std::chrono::milliseconds(20));
+ }
+}
+
+cv::Mat slMat2cvMat(const sl::Mat &input)
+{
+ // Mapping between MAT_TYPE and CV_TYPE
+ int cv_type = -1;
+ switch (input.getDataType())
+ {
+ case sl::MAT_TYPE_32F_C1: cv_type = CV_32FC1; break;
+ case sl::MAT_TYPE_32F_C2: cv_type = CV_32FC2; break;
+ case sl::MAT_TYPE_32F_C3: cv_type = CV_32FC3; break;
+ case sl::MAT_TYPE_32F_C4: cv_type = CV_32FC4; break;
+ case sl::MAT_TYPE_8U_C1: cv_type = CV_8UC1; break;
+ case sl::MAT_TYPE_8U_C2: cv_type = CV_8UC2; break;
+ case sl::MAT_TYPE_8U_C3: cv_type = CV_8UC3; break;
+ case sl::MAT_TYPE_8U_C4: cv_type = CV_8UC4; break;
+ default: break;
+ }
+
+ // Since cv::Mat data requires a uchar* pointer, we get the uchar1 pointer from sl::Mat (getPtr())
+ // cv::Mat and sl::Mat will share a single memory structure
+ return cv::Mat(input.getHeight(), input.getWidth(), cv_type, input.getPtr(sl::MEM_CPU));
+}
+
+int main(int argc, char *argv[])
+{
+ sl::InitParameters init_params;
+ init_params.camera_resolution = sl::RESOLUTION_VGA;
+ init_params.depth_mode = sl::DEPTH_MODE_QUALITY;
+ init_params.coordinate_units = sl::UNIT_METER;
+
+ if(zed.open(init_params) != sl::SUCCESS)
+ {
+ std::cout << "Failed to open camera.\n";
+ return(1);
+ }
+
+ sl::RuntimeParameters runtime_params;
+ runtime_params.sensing_mode = sl::SENSING_MODE_STANDARD;
+ runtime_params.enable_point_cloud = false;
+ sl::Resolution image_size = zed.getResolution();
+
+ // scale image ?
+ //new_width = image_size.width / 2;
+ //new_height = image_size.height / 2;
+ new_width = image_size.width;
+ new_height = image_size.height;
+
+ sl::Mat img_zed(new_width, new_height, sl::MAT_TYPE_8U_C4);
+
+ // depth images
+ sl::Mat depth_img_zed(new_width, new_height, sl::MAT_TYPE_8U_C4);
+ cv::Mat depth_img_cv = slMat2cvMat(depth_img_zed);
+
+ sl::Mat sl_depth_f32;
+
+ g_server_data data;
+ data.argc = 5;
+ data.argv[0] = argv[0];
+ data.argv[1] = "intervideosrc";
+ data.argv[2] = "zed_depth";
+ data.argv[3] = "5556";
+ data.argv[4] = "intervideosrc channel=rgb ! rtpvrawpay name=pay0 pt=96";
+
+ writer.open("appsrc ! video/x-raw,format=BGR ! videoconvert ! video/x-raw,format=I420 ! intervideosink channel=rgb", 0, 10, cv::Size(img_zed.getWidth(), img_zed.getHeight()), true);
+
+ std::thread t1(start_server, data.argc, (char **) data.argv);
+ std::thread t2(getSomeImages);
+
+#ifdef DEBUG
+ g_server_data data2;
+ data2.argc = 5;
+ data2.argv[0] = argv[0];
+ data2.argv[1] = "intervideosrc";
+ data2.argv[2] = "zed_depth_debug";
+ data2.argv[3] = "8888";
+ data2.argv[4] = "intervideosrc channel=wshed ! rtpvrawpay name=pay0 pt=96";
+
+ writer_debug.open("appsrc ! video/x-raw,format=BGR ! videoconvert ! video/x-raw,format=I420 ! intervideosink channel=wshed", 0, 10, cv::Size(img_zed.getWidth(), img_zed.getHeight()), true);
+
+ std::thread t3(start_server, data2.argc, (char **) data2.argv);
+#endif
+
+ for(char key = ' '; key != 'q'; key = cv::waitKey(10))
+ {
+ if(zed.grab(runtime_params) == sl::SUCCESS)
+ {
+ // get depth map form zed
+ zed.retrieveImage(img_zed, sl::VIEW_LEFT, sl::MEM_CPU, new_width, new_height);
+ zed.retrieveImage(depth_img_zed, sl::VIEW_DEPTH, sl::MEM_CPU, new_width, new_height);
+ zed.retrieveMeasure(sl_depth_f32, sl::MEASURE_DEPTH);
+
+ std::vector hierarchy;
+ cv::Mat edges, blur_kern;
+ std::vector > contours;
+
+ cv::Mat img_cv = slMat2cvMat(img_zed);
+#define TIME std::chrono::duration(end - start).count()
+#define NOW std::chrono::high_resolution_clock::now();
+ auto start = NOW;
+ // blur image
+ cv::cvtColor(img_cv, img_cv, CV_8U, 3);
+ cv::Mat img_cv_blur(img_cv.size(), CV_8UC3);
+ cv::bilateralFilter(img_cv, img_cv_blur, 9, 150.0, 150.0, cv::BORDER_DEFAULT);
+
+ // setup for contours for regular image
+ cv::resize(img_cv_blur, img_cv_blur, cv::Size(new_width, new_height));
+ cv::Canny(img_cv_blur, edges, 100, 200);
+
+ auto end = NOW;
+ auto ms = TIME;
+ std::cout << "Preprocessing Time: " << ms << " ms\n";
+
+ start = NOW;
+ // make contours for watershed with 8-bit single-channel image
+ cv::findContours(edges, contours, hierarchy, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
+ if(hierarchy.empty())
+ continue;
+ // make contours to cv mat
+ cv::Mat contour_img(edges.size(), CV_32S);
+ cv::Mat contour_img_visible(edges.size(), CV_32S);
+
+ // reset contours
+ contour_img = 0;
+ contour_img_visible = 0;
+ int idx = 0;
+ int comp_count = 0;
+ for(; idx >= 0; idx = hierarchy[idx][0], comp_count++)
+ {
+ cv::drawContours(contour_img,
+ contours,
+ idx,
+ cv::Scalar::all(idx),
+ -1,
+ 8,
+ hierarchy,
+ 1);
+
+ cv::drawContours(contour_img_visible,
+ contours,
+ idx,
+ cv::Scalar::all(INT_MAX),
+ -1,
+ 8,
+ hierarchy,
+ 1);
+ }
+
+ // convert to cv mat
+ cv::Mat cv_depth_f32 = slMat2cvMat(sl_depth_f32);
+ cv::Mat depth_f32;
+ cv::resize(cv_depth_f32, depth_f32, cv::Size(new_width, new_height));
+ cv::Mat three_channel_img;
+ cv::cvtColor(img_cv_blur, three_channel_img, cv::COLOR_BGRA2BGR);
+
+ cv::Mat bork;
+ contour_img_visible.convertTo(bork, CV_8U, 255);
+ //cv::imshow("contours", bork);
+
+ end = NOW;
+ ms = TIME;
+ std::cout << "Contours Time: " << ms << " ms\n";
+
+ start = NOW;
+ // watershed the image
+ cv::watershed(three_channel_img, contour_img);
+ cv::Mat wshed(contour_img.size(), CV_8UC3);
+
+ end = NOW;
+ ms = TIME;
+ std::cout << "Watershed Time: " << ms << " ms\n";
+
+ start = NOW;
+ // for each segment in watershed image make color
+ cv::Mat temp, temp_f32;
+ std::vector colors(comp_count);
+ std::vector obstacle(comp_count);
+ std::vector > rects(comp_count);
+ for(int i = 0; i < comp_count; i++)
+ {
+ colors[i] = cv::Vec3b(0, rand() % 255, 0);
+ cv::inRange(contour_img, i, i, temp);
+
+ // convert this mat to f32
+ temp.convertTo(temp_f32, CV_32F, 1.0f / 255.0f);
+
+ // size in pixels of segments to filter out
+ double dist;
+ cv::minMaxLoc(depth_f32.mul(temp_f32), nullptr, &dist);
+ float z = static_cast(dist);
+
+ rects[i] = std::make_pair(cv::boundingRect(temp), z);
+
+ int nonzero = cv::countNonZero(temp);
+ obstacle[i] = 5000 <= nonzero && nonzero <= 200000
+ && rects[i].first.y + rects[i].first.height < contour_img.rows - 1
+ && z != NAN
+ && z > 0
+ && z <= 6.0;
+ }
+ end = NOW;
+ ms = TIME;
+ std::cout << "Obstacles Time: " << ms << " ms\n";
+
+ start = NOW;
+ // put color in segment
+ cv::Mat wshed_mask(contour_img.size(), CV_8UC3);
+ for(int i = 0; i < contour_img.rows; i++)
+ for(int j = 0; j < contour_img.cols; j++)
+ {
+ int index = contour_img.at(i, j);
+ if(index == -1)
+ wshed.at(i, j) = cv::Vec3b(255, 255, 255);
+ else if(index <= 0 || index > comp_count || !obstacle[index])
+ wshed.at(i, j) = cv::Vec3b(0, 0, 0);
+ else
+ wshed.at(i, j) = colors[index];
+ }
+ for(int i = 0; i < comp_count; i++)
+ {
+ constexpr int thickness = 6;
+ if(!obstacle[i])
+ continue;
+ auto rect = rects[i].first;
+ cv::rectangle(wshed, rect, cv::Scalar(0, 0, 255), thickness);
+ std::ostringstream ss;
+ ss << rects[i].second << " meters";
+ cv::putText(wshed,
+ ss.str(),
+ cv::Point(rect.x + thickness, rect.y + rect.height - thickness),
+ cv::FONT_HERSHEY_SIMPLEX,
+ 0.4,
+ cv::Scalar(0, 255, 0));
+
+ }
+ end = NOW;
+ ms = TIME;
+ std::cout << "Coloring Time: " << ms << " ms\n";
+ std::cout << std::endl;
+
+ //cv::imshow("watershed", wshed);
+#ifdef DEBUG
+ writer_debug.write(wshed);
+#endif
+#if 0
+ cv::Mat three_channel;
+ cv::cvtColor(depth_img_cv, three_channel, cv::COLOR_BGRA2BGR);
+ cv::applyColorMap(three_channel, bgr, cv::COLORMAP_JET);
+ cv::imshow("Original", image_cv);
+ cv::imshow("Depth", depth_img_cv);
+ cv::imshow("Depth", bgr);
+ cv::imshow("Depth2", cv_depth_u8);
+ cv::imshow("Point Cloud", point_cloud_cv);
+ cv::imshow("Depth Measure", cv_depth_f32);
+#endif
+ }
+ else
+ {
+ std::cout << "Failed to grab frame.\n";
+ }
+ }
+
+ zed.close();
+ return(0);
+}
diff --git a/Scripts/BUILD_OPENCV_4.0.1.sh b/Scripts/BUILD_OPENCV_4.0.1.sh
new file mode 100755
index 0000000..b5c3428
--- /dev/null
+++ b/Scripts/BUILD_OPENCV_4.0.1.sh
@@ -0,0 +1,62 @@
+OPENCV_VERSION=4.0.1 # Only works for OpenCV version 4+
+ARCH_BIN=6.2 # For Jetson TX2
+GCC_VERSION=6 # Version 6 required for CUDA 9
+GXX_VERSION=6 # Version 6 required for CUDA 9
+WORKING_DIR=$(pwd)
+
+# 1. KEEP UBUNTU OR DEBIAN UP TO DATE
+
+sudo apt-get -y update
+sudo apt-get -y upgrade
+sudo apt-get -y dist-upgrade
+sudo apt-get -y autoremove
+
+
+# 2. INSTALL THE DEPENDENCIES
+
+# Build tools:
+sudo apt-get install -y build-essential cmake
+
+# GUI (if you want to use GTK instead of Qt, replace 'qt5-default' with 'libgtkglext1-dev' and remove '-DWITH_QT=ON' option in CMake):
+sudo apt-get install -y qt5-default libvtk6-dev
+
+# Media I/O:
+sudo apt-get install -y zlib1g-dev libjpeg-dev libwebp-dev libpng-dev libtiff5-dev libjasper-dev libopenexr-dev libgdal-dev
+
+# Video I/O:
+sudo apt-get install -y libdc1394-22-dev libavcodec-dev libavformat-dev libswscale-dev libtheora-dev libvorbis-dev libxvidcore-dev libx264-dev yasm libopencore-amrnb-dev libopencore-amrwb-dev libv4l-dev libxine2-dev
+
+# Parallelism and linear algebra libraries:
+sudo apt-get install -y libtbb-dev libeigen3-dev
+
+# Python:
+sudo apt-get install -y python-dev python-tk python-numpy python3-dev python3-tk python3-numpy
+
+# Java:
+sudo apt-get install -y ant default-jdk
+
+# Documentation:
+sudo apt-get install -y doxygen
+
+
+# 3. INSTALL THE LIBRARY
+
+sudo apt-get install -y unzip wget
+
+wget http://github.com/opencv/opencv/archive/${OPENCV_VERSION}.zip
+unzip ${OPENCV_VERSION}.zip
+rm ${OPENCV_VERSION}.zip
+
+wget http://github.com/opencv/opencv_contrib/archive/${OPENCV_VERSION}.zip
+unzip ${OPENCV_VERSION}.zip
+rm ${OPENCV_VERSION}.zip
+
+cd opencv-${OPENCV_VERSION}
+mkdir build
+cd build
+
+cmake -DCMAKE_C_COMPILER=gcc-${GCC_VERSION} -DCMAKE_CXX_COMPILER=g++-${GXX_VERSION} -DCMAKE_BUILD_TYPE=RELEASE -DCMAKE_INSTALL_PREFIX=/usr/local -DWITH_CUDA=ON -DCUDA_ARCH_BIN=${ARCH_BIN} -DCUDA_ARCH_PTX="" -DWITH_CUBLAS=ON -DWITH_GSTREAMER=ON -DWITH_QT=ON -DWITH_OPENGL=ON -DBUILD_opencv_java=OFF -DOPENCV_GENERATE_PKGCONFIG=ON -DOPENCV_EXTRA_MODULES_PATH=${WORKING_DIR}/opencv_contrib-${OPENCV_VERSION}/modules -DENABLE_FAST_MATH=ON -DCUDA_FAST_MATH=ON -DBUILD_EXAMPLES=OFF -DBUILD_DOCS=OFF -DBUILD_PERF_TESTS=OFF -DBUILD_TESTS=OFF -DWITH_NVCUVID=ON ..
+
+make -j6
+sudo make install
+sudo ldconfig