Python sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis() Examples

The following are 18 code examples of sklearn.discriminant_analysis.QuadraticDiscriminantAnalysis(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module sklearn.discriminant_analysis , or try the search function .
Example #1
Source File: test_discriminant_analysis.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_qda_regularization():
    # the default is reg_param=0. and will cause issues
    # when there is a constant variable
    clf = QuadraticDiscriminantAnalysis()
    with ignore_warnings():
        y_pred = clf.fit(X2, y6).predict(X2)
    assert_true(np.any(y_pred != y6))

    # adding a little regularization fixes the problem
    clf = QuadraticDiscriminantAnalysis(reg_param=0.01)
    with ignore_warnings():
        clf.fit(X2, y6)
    y_pred = clf.predict(X2)
    assert_array_equal(y_pred, y6)

    # Case n_samples_in_a_class < n_features
    clf = QuadraticDiscriminantAnalysis(reg_param=0.1)
    with ignore_warnings():
        clf.fit(X5, y5)
    y_pred5 = clf.predict(X5)
    assert_array_equal(y_pred5, y5) 
Example #2
Source File: test_discriminant_analysis.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_qda_store_covariance():
    # The default is to not set the covariances_ attribute
    clf = QuadraticDiscriminantAnalysis().fit(X6, y6)
    assert not hasattr(clf, 'covariance_')

    # Test the actual attribute:
    clf = QuadraticDiscriminantAnalysis(store_covariance=True).fit(X6, y6)
    assert hasattr(clf, 'covariance_')

    assert_array_almost_equal(
        clf.covariance_[0],
        np.array([[0.7, 0.45], [0.45, 0.7]])
    )

    assert_array_almost_equal(
        clf.covariance_[1],
        np.array([[0.33333333, -0.33333333], [-0.33333333, 0.66666667]])
    ) 
Example #3
Source File: test_discriminant_analysis.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_qda_regularization():
    # the default is reg_param=0. and will cause issues
    # when there is a constant variable
    clf = QuadraticDiscriminantAnalysis()
    with ignore_warnings():
        y_pred = clf.fit(X2, y6).predict(X2)
    assert np.any(y_pred != y6)

    # adding a little regularization fixes the problem
    clf = QuadraticDiscriminantAnalysis(reg_param=0.01)
    with ignore_warnings():
        clf.fit(X2, y6)
    y_pred = clf.predict(X2)
    assert_array_equal(y_pred, y6)

    # Case n_samples_in_a_class < n_features
    clf = QuadraticDiscriminantAnalysis(reg_param=0.1)
    with ignore_warnings():
        clf.fit(X5, y5)
    y_pred5 = clf.predict(X5)
    assert_array_equal(y_pred5, y5) 
Example #4
Source File: classifier.py    From libfaceid with MIT License 6 votes vote down vote up
def __init__(self, classifier=FaceClassifierModels.DEFAULT):
        self._clf = None
        if classifier == FaceClassifierModels.LINEAR_SVM:
            self._clf = SVC(C=1.0, kernel="linear", probability=True)
        elif classifier == FaceClassifierModels.NAIVE_BAYES:
            self._clf = GaussianNB()
        elif classifier == FaceClassifierModels.RBF_SVM:
            self._clf = SVC(C=1, kernel='rbf', probability=True, gamma=2)
        elif classifier == FaceClassifierModels.NEAREST_NEIGHBORS:
            self._clf = KNeighborsClassifier(1)
        elif classifier == FaceClassifierModels.DECISION_TREE:
            self._clf = DecisionTreeClassifier(max_depth=5)
        elif classifier == FaceClassifierModels.RANDOM_FOREST:
            self._clf = RandomForestClassifier(max_depth=5, n_estimators=10, max_features=1)
        elif classifier == FaceClassifierModels.NEURAL_NET:
            self._clf = MLPClassifier(alpha=1)
        elif classifier == FaceClassifierModels.ADABOOST:
            self._clf = AdaBoostClassifier()
        elif classifier == FaceClassifierModels.QDA:
            self._clf = QuadraticDiscriminantAnalysis()
        print("classifier={}".format(FaceClassifierModels(classifier))) 
Example #5
Source File: test_discriminant_analysis.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_qda_store_covariance():
    # The default is to not set the covariances_ attribute
    clf = QuadraticDiscriminantAnalysis().fit(X6, y6)
    assert_false(hasattr(clf, 'covariance_'))

    # Test the actual attribute:
    clf = QuadraticDiscriminantAnalysis(store_covariance=True).fit(X6, y6)
    assert_true(hasattr(clf, 'covariance_'))

    assert_array_almost_equal(
        clf.covariance_[0],
        np.array([[0.7, 0.45], [0.45, 0.7]])
    )

    assert_array_almost_equal(
        clf.covariance_[1],
        np.array([[0.33333333, -0.33333333], [-0.33333333, 0.66666667]])
    ) 
Example #6
Source File: BaggedQDA.py    From Awesome-Scripts with MIT License 6 votes vote down vote up
def main():
	# prepare data
	trainingSet=[]
	testSet=[]
	accuracy = 0.0
	split = 0.25
	loadDataset('../Dataset/combined.csv', split, trainingSet, testSet)
	print 'Train set: ' + repr(len(trainingSet))
	print 'Test set: ' + repr(len(testSet))
	# generate predictions
	predictions=[]
	trainData = np.array(trainingSet)[:,0:np.array(trainingSet).shape[1] - 1]
  	columns = trainData.shape[1] 
	X = np.array(trainData)
	y = np.array(trainingSet)[:,columns]
	clf = BaggingClassifier(QDA())
	clf.fit(X, y)
	testData = np.array(testSet)[:,0:np.array(trainingSet).shape[1] - 1]
	X_test = np.array(testData)
	y_test = np.array(testSet)[:,columns]
	accuracy = clf.score(X_test,y_test)
	accuracy *= 100
	print("Accuracy %:",accuracy) 
Example #7
Source File: test_discriminant_analysis.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_qda():
    # QDA classification.
    # This checks that QDA implements fit and predict and returns
    # correct values for a simple toy dataset.
    clf = QuadraticDiscriminantAnalysis()
    y_pred = clf.fit(X6, y6).predict(X6)
    assert_array_equal(y_pred, y6)

    # Assure that it works with 1D data
    y_pred1 = clf.fit(X7, y6).predict(X7)
    assert_array_equal(y_pred1, y6)

    # Test probas estimates
    y_proba_pred1 = clf.predict_proba(X7)
    assert_array_equal((y_proba_pred1[:, 1] > 0.5) + 1, y6)
    y_log_proba_pred1 = clf.predict_log_proba(X7)
    assert_array_almost_equal(np.exp(y_log_proba_pred1), y_proba_pred1, 8)

    y_pred3 = clf.fit(X6, y7).predict(X6)
    # QDA shouldn't be able to separate those
    assert np.any(y_pred3 != y7)

    # Classes should have at least 2 elements
    assert_raises(ValueError, clf.fit, X6, y4) 
Example #8
Source File: test_discriminant_analysis.py    From twitter-stock-recommendation with MIT License 6 votes vote down vote up
def test_qda():
    # QDA classification.
    # This checks that QDA implements fit and predict and returns
    # correct values for a simple toy dataset.
    clf = QuadraticDiscriminantAnalysis()
    y_pred = clf.fit(X6, y6).predict(X6)
    assert_array_equal(y_pred, y6)

    # Assure that it works with 1D data
    y_pred1 = clf.fit(X7, y6).predict(X7)
    assert_array_equal(y_pred1, y6)

    # Test probas estimates
    y_proba_pred1 = clf.predict_proba(X7)
    assert_array_equal((y_proba_pred1[:, 1] > 0.5) + 1, y6)
    y_log_proba_pred1 = clf.predict_log_proba(X7)
    assert_array_almost_equal(np.exp(y_log_proba_pred1), y_proba_pred1, 8)

    y_pred3 = clf.fit(X6, y7).predict(X6)
    # QDA shouldn't be able to separate those
    assert_true(np.any(y_pred3 != y7))

    # Classes should have at least 2 elements
    assert_raises(ValueError, clf.fit, X6, y4) 
Example #9
Source File: common.py    From gumpy with MIT License 5 votes vote down vote up
def __init__(self, **kwargs):
        super(QuadraticLDA, self).__init__()
        self.clf = _QuadraticDiscriminantAnalysis(**kwargs) 
Example #10
Source File: test_discriminant_analysis.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_qda_deprecation():
    # Test the deprecation
    clf = QuadraticDiscriminantAnalysis(store_covariances=True)
    assert_warns_message(DeprecationWarning, "'store_covariances' was renamed"
                         " to store_covariance in version 0.19 and will be "
                         "removed in 0.21.", clf.fit, X, y)

    # check that covariance_ (and covariances_ with warning) is stored
    assert_warns_message(DeprecationWarning, "Attribute covariances_ was "
                         "deprecated in version 0.19 and will be removed "
                         "in 0.21. Use covariance_ instead", getattr, clf,
                         'covariances_') 
Example #11
Source File: test_discriminant_analysis.py    From twitter-stock-recommendation with MIT License 5 votes vote down vote up
def test_qda_priors():
    clf = QuadraticDiscriminantAnalysis()
    y_pred = clf.fit(X6, y6).predict(X6)
    n_pos = np.sum(y_pred == 2)

    neg = 1e-10
    clf = QuadraticDiscriminantAnalysis(priors=np.array([neg, 1 - neg]))
    y_pred = clf.fit(X6, y6).predict(X6)
    n_pos2 = np.sum(y_pred == 2)

    assert_greater(n_pos2, n_pos) 
Example #12
Source File: test_discriminant_analysis.py    From pandas-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_objectmapper_deprecated(self):
        df = pdml.ModelFrame([])
        with tm.assert_produces_warning(FutureWarning):
            self.assertIs(df.lda.LinearDiscriminantAnalysis,
                          da.LinearDiscriminantAnalysis)
        with tm.assert_produces_warning(FutureWarning):
            self.assertIs(df.qda.QuadraticDiscriminantAnalysis,
                          da.QuadraticDiscriminantAnalysis) 
Example #13
Source File: test_discriminant_analysis.py    From pandas-ml with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def test_objectmapper(self):
        df = pdml.ModelFrame([])
        self.assertIs(df.discriminant_analysis.LinearDiscriminantAnalysis,
                      da.LinearDiscriminantAnalysis)
        self.assertIs(df.discriminant_analysis.QuadraticDiscriminantAnalysis,
                      da.QuadraticDiscriminantAnalysis)

        self.assertIs(df.da.LinearDiscriminantAnalysis,
                      da.LinearDiscriminantAnalysis)
        self.assertIs(df.da.QuadraticDiscriminantAnalysis,
                      da.QuadraticDiscriminantAnalysis) 
Example #14
Source File: quadratic_discriminant_analysis.py    From lale with Apache License 2.0 5 votes vote down vote up
def __init__(self, priors=None, reg_param=0.0, store_covariance=False, tol=0.0001, store_covariances=None):
        self._hyperparams = {
            'priors': priors,
            'reg_param': reg_param,
            'store_covariance': store_covariance,
            'tol': tol,
            'store_covariances': store_covariances}
        self._wrapped_model = Op(**self._hyperparams) 
Example #15
Source File: scikitlearn.py    From sia-cog with MIT License 5 votes vote down vote up
def getModels():
    result = []
    result.append("LinearRegression")
    result.append("BayesianRidge")
    result.append("ARDRegression")
    result.append("ElasticNet")
    result.append("HuberRegressor")
    result.append("Lasso")
    result.append("LassoLars")
    result.append("Rigid")
    result.append("SGDRegressor")
    result.append("SVR")
    result.append("MLPClassifier")
    result.append("KNeighborsClassifier")
    result.append("SVC")
    result.append("GaussianProcessClassifier")
    result.append("DecisionTreeClassifier")
    result.append("RandomForestClassifier")
    result.append("AdaBoostClassifier")
    result.append("GaussianNB")
    result.append("LogisticRegression")
    result.append("QuadraticDiscriminantAnalysis")
    return result 
Example #16
Source File: test_discriminant_analysis.py    From Mastering-Elasticsearch-7.0 with MIT License 5 votes vote down vote up
def test_qda_priors():
    clf = QuadraticDiscriminantAnalysis()
    y_pred = clf.fit(X6, y6).predict(X6)
    n_pos = np.sum(y_pred == 2)

    neg = 1e-10
    clf = QuadraticDiscriminantAnalysis(priors=np.array([neg, 1 - neg]))
    y_pred = clf.fit(X6, y6).predict(X6)
    n_pos2 = np.sum(y_pred == 2)

    assert_greater(n_pos2, n_pos) 
Example #17
Source File: iq_discriminators.py    From qiskit-ignis with Apache License 2.0 4 votes vote down vote up
def __init__(self, cal_results: Union[Result, List[Result]],
                 qubit_mask: List[int], expected_states: List[str] = None,
                 standardize: bool = False,
                 schedules: Union[List[str], List[Schedule]] = None,
                 discriminator_parameters: dict = None):
        """
        Args:
            cal_results (Union[Result, List[Result]]): calibration results,
                Result or list of Result used to fit the discriminator.
            qubit_mask (List[int]): determines which qubit's level 1 data to
                use in the discrimination process.
            expected_states (List[str]): a list that should have the same
                length as schedules. All results in cal_results are used if
                schedules is None. expected_states must have the corresponding
                length.
            standardize (bool): if true the discriminator will standardize the
                xdata using the internal method _scale_data.
            schedules (Union[List[str], List[Schedule]]): The schedules or a
                subset of schedules in cal_results used to train the
                discriminator. The user may also pass the name of the schedules
                instead of the schedules. If schedules is None, then all the
                schedules in cal_results are used.
            discriminator_parameters (dict): parameters for Sklearn's LDA.
        """
        if not discriminator_parameters:
            discriminator_parameters = {}

        store_cov = discriminator_parameters.get('store_covariance', False)
        tol = discriminator_parameters.get('tol', 1.0e-4)

        self._qda = QuadraticDiscriminantAnalysis(store_covariance=store_cov,
                                                  tol=tol)

        # Also sets the x and y data.
        IQDiscriminationFitter.__init__(self, cal_results, qubit_mask,
                                        expected_states, standardize,
                                        schedules)

        self._description = 'Quadratic IQ discriminator for measurement ' \
                            'level 1.'

        self.fit() 
Example #18
Source File: scikitlearn.py    From sia-cog with MIT License 4 votes vote down vote up
def getSKLearnModel(modelName):
    if modelName == 'LinearRegression':
        model = linear_model.LinearRegression()
    elif modelName == 'BayesianRidge':
        model = linear_model.BayesianRidge()
    elif modelName == 'ARDRegression':
        model = linear_model.ARDRegression()
    elif modelName == 'ElasticNet':
        model = linear_model.ElasticNet()
    elif modelName == 'HuberRegressor':
        model = linear_model.HuberRegressor()
    elif modelName == 'Lasso':
        model = linear_model.Lasso()
    elif modelName == 'LassoLars':
        model = linear_model.LassoLars()
    elif modelName == 'Rigid':
        model = linear_model.Ridge()
    elif modelName == 'SGDRegressor':
        model = linear_model.SGDRegressor()
    elif modelName == 'SVR':
        model = SVR()
    elif modelName=='MLPClassifier':
        model = MLPClassifier()
    elif modelName=='KNeighborsClassifier':
        model = KNeighborsClassifier()
    elif modelName=='SVC':
        model = SVC()
    elif modelName=='GaussianProcessClassifier':
        model = GaussianProcessClassifier()
    elif modelName=='DecisionTreeClassifier':
        model = DecisionTreeClassifier()
    elif modelName=='RandomForestClassifier':
        model = RandomForestClassifier()
    elif modelName=='AdaBoostClassifier':
        model = AdaBoostClassifier()
    elif modelName=='GaussianNB':
        model = GaussianNB()
    elif modelName=='LogisticRegression':
        model = linear_model.LogisticRegression()
    elif modelName=='QuadraticDiscriminantAnalysis':
        model = QuadraticDiscriminantAnalysis()

    return model