Comparision between CNN-LSTM and Logistic Regression
Deep Learning has shown superior performance in object detection and image classification in drone imageries. Logistic Regression shows superior prediction with drone telemetry data. While they serve different purposes, they can be compared on a common use case for predicting deviation from trajectory and compensation based on past orientations and current. The cost function in the CNN-LSTM is a mean squared error. CNN-LSTM uses the vectorized output of edge-detected and gaussian-smoothed images captured sequentially from video to predict the next steering angle of the drone. But the same data can be emitted as telemetry along with additional telemetry from edge detections, trajectory and squared errors and their corresponding vectors can then be run through Logistic Regression as shown below:Sample usage of Logistic Regression:
#! /bin/python
import matplotlib.pyplot as plt
import pandas
import os
here = os.path.dirname(__file__) if "__file__" in locals() else "."
data_file = os.path.join(here, "data", "flight_errors", "data.csv")
data = pandas.read_csv(data_file, sep=",")
# y is the last column and the variable we want to predict. It has a boolean value.
data["y"] = data["y"].astype("category")
print(data.head(2))
print(data.shape)
data["y"] = data["y"].apply(lambda x: 1 if x == 1 else 0)
print(data[["y", "X1"]].groupby("y").count())
try:
from sklearn.model_selection import train_test_split
except ImportError:
from sklearn.cross_validation import train_test_split
train, test = train_test_split(data)
import numpy as np
from microsoftml import rx_fast_trees, rx_predict
features = [c for c in train.columns if c.startswith("X")]
model = rx_fast_trees("y ~ " + "+".join(features), data=train)
pred = rx_predict(model, test, extra_vars_to_write=["y"])
print(pred.head())
from sklearn.metrics import confusion_matrix
conf = confusion_matrix(pred["y"], pred["PredictedLabel"])
print(conf)
def train_test_hyperparameter(trainA, trainB, **hyper):
# Train a model
features = [c for c in train.columns if c.startswith("X")]
model = rx_fast_trees("y ~ " + "+".join(features), data=trainA, verbose=0, **hyper)
pred = rx_predict(model, trainB, extra_vars_to_write=["y"])
conf = confusion_matrix(pred["y"], pred["PredictedLabel"])
return (conf[0,0] + conf[1,1]) / conf.sum()
trainA, trainB = train_test_split(train)
hyper_values = [5, 10, 15, 20, 25, 30, 35, 40, 50, 100, 200]
perfs = []
for val in hyper_values:
acc = train_test_hyperparameter(trainA, trainB, num_leaves=val)
perfs.append(acc)
print("-- Training with hyper={0} performance={1}".format(val, acc))
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1, 1)
ax.plot(hyper_values, perfs, "o-")
ax.set_xlabel("num_leaves")
ax.set_ylabel("% correctly classified")
tries = max(zip(perfs, hyper_values))
print("max={0}".format(tries))
model = rx_fast_trees("y ~ " + "+".join(features), data=train, num_leaves=tries[1])
pred = rx_predict(model, test, extra_vars_to_write=["y"])
conf = confusion_matrix(pred["y"], pred["PredictedLabel"])
print(conf)
No comments:
Post a Comment