Python scipy.spatial.distance.euclidean() Examples

The following are 30 code examples of scipy.spatial.distance.euclidean(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module scipy.spatial.distance , or try the search function .
Example #1
Source File: distance_functions.py    From CorpusTools with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def generate_distance_matrix(source,target):
    """Generates a local distance matrix for use in dynamic time warping.

    Parameters
    ----------
    source : 2D array
        Source matrix with _features in the second dimension.
    target : 2D array
        Target matrix with _features in the second dimension.

    Returns
    -------
    2D array
        Local distance matrix.

    """

    sLen = source.shape[0]
    tLen = target.shape[0]
    distMat = zeros((sLen,tLen))
    for i in range(sLen):
        for j in range(tLen):
            distMat[i,j] = euclidean(source[i,:],target[j,:])
    return distMat 
Example #2
Source File: density_estimator.py    From jMetalPy with MIT License 6 votes vote down vote up
def compute_density_estimator(self, solutions: List[S]):
        solutions_size = len(solutions)
        if solutions_size <= self.k:
            return

        points = []
        for i in range(solutions_size):
            points.append(solutions[i].objectives)

        # Compute distance matrix
        self.distance_matrix = numpy.zeros(shape=(solutions_size, solutions_size))
        for i in range(solutions_size):
            for j in range(solutions_size):
                self.distance_matrix[i, j] = self.distance_matrix[j, i] = euclidean(solutions[i].objectives,
                                                                                    solutions[j].objectives)
        # Gets the k-nearest distance of all the solutions
        for i in range(solutions_size):
            distances = []
            for j in range(solutions_size):
                distances.append(self.distance_matrix[i, j])
            distances.sort()
            solutions[i].attributes['knn_density'] = distances[self.k] 
Example #3
Source File: wordmoverdist.py    From PyShortTextCategorization with MIT License 6 votes vote down vote up
def word_mover_distance(first_sent_tokens, second_sent_tokens, wvmodel, distancefunc=euclidean, lpFile=None):
    """ Compute the Word Mover's distance (WMD) between the two given lists of tokens.

    Using methods of linear programming, supported by PuLP, calculate the WMD between two lists of words. A word-embedding
    model has to be provided. WMD is returned.

    Reference: Matt J. Kusner, Yu Sun, Nicholas I. Kolkin, Kilian Q. Weinberger, "From Word Embeddings to Document Distances," *ICML* (2015).

    :param first_sent_tokens: first list of tokens.
    :param second_sent_tokens: second list of tokens.
    :param wvmodel: word-embedding models.
    :param distancefunc: distance function that takes two numpy ndarray.
    :param lpFile: log file to write out.
    :return: Word Mover's distance (WMD)
    :type first_sent_tokens: list
    :type second_sent_tokens: list
    :type wvmodel: gensim.models.keyedvectors.KeyedVectors
    :type distancefunc: function
    :type lpFile: str
    :rtype: float
    """
    prob = word_mover_distance_probspec(first_sent_tokens, second_sent_tokens, wvmodel,
                                        distancefunc=distancefunc, lpFile=lpFile)
    return pulp.value(prob.objective) 
Example #4
Source File: gso.py    From NiaPy with MIT License 6 votes vote down vote up
def setParameters(self, n=25, l0=5, nt=5, rho=0.4, gamma=0.6, beta=0.08, s=0.03, Distance=euclidean, **ukwargs):
		r"""Set the arguments of an algorithm.

		Arguments:
			n (Optional[int]): Number of glowworms in population.
			l0 (Optional[float]): Initial luciferin quantity for each glowworm.
			nt (Optional[float]): --
			rs (Optional]float]): Maximum sensing range.
			rho (Optional[float]): Luciferin decay constant.
			gamma (Optional[float]): Luciferin enhancement constant.
			beta (Optional[float]): --
			s (Optional[float]): --
			Distance (Optional[Callable[[numpy.ndarray, numpy.ndarray], float]]]): Measure distance between two individuals.
		"""
		ukwargs.pop('NP', None)
		Algorithm.setParameters(self, NP=n, **ukwargs)
		self.l0, self.nt, self.rho, self.gamma, self.beta, self.s, self.Distance = l0, nt, rho, gamma, beta, s, Distance 
Example #5
Source File: toolbox.py    From findit with MIT License 6 votes vote down vote up
def point_list_filter(
    point_list: typing.Sequence, distance: float, point_limit: int = None
) -> typing.Sequence:
    """ remove some points which are too close """
    if not point_limit:
        point_limit = 20

    point_list = sorted(list(set(point_list)), key=lambda o: o[0])
    new_point_list = [point_list[0]]
    for cur_point in point_list[1:]:
        for each_confirmed_point in new_point_list:
            cur_distance = euclidean(cur_point, each_confirmed_point)
            # existed
            if cur_distance < distance:
                break
        else:
            new_point_list.append(cur_point)
            if len(new_point_list) >= point_limit:
                break
    return new_point_list 
Example #6
Source File: gso.py    From NiaPy with MIT License 6 votes vote down vote up
def initPopulation(self, task):
		r"""Initialize population.

		Args:
			task (Task): Optimization task.

		Returns:
			Tuple[numpy.ndarray, numpy.ndarray[float], Dict[str, Any]]:
				1. Initialized population of glowwarms.
				2. Initialized populations function/fitness values.
				3. Additional arguments:
					* L (numpy.ndarray): TODO.
					* R (numpy.ndarray): TODO.
					* rs (numpy.ndarray): TODO.
		"""
		GS, GS_f, d = Algorithm.initPopulation(self, task)
		rs = euclidean(full(task.D, 0), task.bRange)
		L, R = full(self.NP, self.l0), full(self.NP, rs)
		d.update({'L': L, 'R': R, 'rs': rs})
		return GS, GS_f, d 
Example #7
Source File: test_dbscan.py    From Mastering-Elasticsearch-7.0 with MIT License 6 votes vote down vote up
def test_dbscan_feature():
    # Tests the DBSCAN algorithm with a feature vector array.
    # Parameters chosen specifically for this task.
    # Different eps to other test, because distance is not normalised.
    eps = 0.8
    min_samples = 10
    metric = 'euclidean'
    # Compute DBSCAN
    # parameters chosen for task
    core_samples, labels = dbscan(X, metric=metric, eps=eps,
                                  min_samples=min_samples)

    # number of clusters, ignoring noise if present
    n_clusters_1 = len(set(labels)) - int(-1 in labels)
    assert_equal(n_clusters_1, n_clusters)

    db = DBSCAN(metric=metric, eps=eps, min_samples=min_samples)
    labels = db.fit(X).labels_

    n_clusters_2 = len(set(labels)) - int(-1 in labels)
    assert_equal(n_clusters_2, n_clusters) 
Example #8
Source File: de.py    From NiaPy with MIT License 6 votes vote down vote up
def selection(self, pop, npop, xb, fxb, task, **kwargs):
		r"""Operator for selection of individuals.

		Args:
			pop (numpy.ndarray): Current population.
			npop (numpy.ndarray): New population.
			xb (numpy.ndarray): Current global best solution.
			fxb (float): Current global best solutions fitness/objective value.
			task (Task): Optimization task.
			kwargs (Dict[str, Any]): Additional arguments.

		Returns:
			Tuple[numpy.ndarray, numpy.ndarray, float]:
				1. New population.
				2. New global best solution.
				3. New global best solutions fitness/objective value.
		"""
		P = []
		for e in npop:
			i = argmin([euclidean(e, f) for f in pop])
			P.append(pop[i] if pop[i].f < e.f else e)
		return asarray(P), xb, fxb 
Example #9
Source File: network.py    From sakmapper with MIT License 6 votes vote down vote up
def gap(data, refs=None, nrefs=20, ks=range(1,11), method=None):
    shape = data.shape
    if refs is None:
        tops = data.max(axis=0)
        bots = data.min(axis=0)
        dists = scipy.matrix(scipy.diag(tops-bots))

        rands = scipy.random.random_sample(size=(shape[0], shape[1], nrefs))
        for i in range(nrefs):
            rands[:, :, i] = rands[:, :, i]*dists+bots
    else:
        rands = refs
    gaps = scipy.zeros((len(ks),))
    for (i, k) in enumerate(ks):
        g1 = method(n_clusters=k).fit(data)
        (kmc, kml) = (g1.cluster_centers_, g1.labels_)
        disp = sum([euclidean(data[m, :], kmc[kml[m], :]) for m in range(shape[0])])

        refdisps = scipy.zeros((rands.shape[2],))
        for j in range(rands.shape[2]):
            g2 = method(n_clusters=k).fit(rands[:, :, j])
            (kmc, kml) = (g2.cluster_centers_, g2.labels_)
            refdisps[j] = sum([euclidean(rands[m, :, j], kmc[kml[m],:]) for m in range(shape[0])])
        gaps[i] = scipy.log(scipy.mean(refdisps))-scipy.log(disp)
    return gaps 
Example #10
Source File: blink_detection.py    From face_recognition with MIT License 6 votes vote down vote up
def get_ear(eye):

	# compute the euclidean distances between the two sets of
	# vertical eye landmarks (x, y)-coordinates
	A = dist.euclidean(eye[1], eye[5])
	B = dist.euclidean(eye[2], eye[4])
 
	# compute the euclidean distance between the horizontal
	# eye landmark (x, y)-coordinates
	C = dist.euclidean(eye[0], eye[3])
 
	# compute the eye aspect ratio
	ear = (A + B) / (2.0 * C)
 
	# return the eye aspect ratio
	return ear 
Example #11
Source File: clustering.py    From Load-Forecasting with MIT License 6 votes vote down vote up
def kMeansClustering(x,k):

    # Convert list into numpy format
    conv = np.asarray(x)

    # Compute the centroids
    centroids = kmeans(conv,k,iter=10)[0]

    # Relabel the x's
    labels = []
    for y in range(len(x)):
        minDist = float('inf')
        minLabel = -1
        for z in range(len(centroids)):
            e = euclidean(conv[y],centroids[z])
            if (e < minDist):
                minDist = e
                minLabel = z
        labels.append(minLabel)

    # Return the list of centroids and labels
    return (centroids,labels)

# Performs a weighted clustering on the examples in xTest
# Returns a 1-d vector of predictions 
Example #12
Source File: decisionboundaryplot.py    From highdimensional-decision-boundary-plot with MIT License 6 votes vote down vote up
def _get_sorted_db_keypoint_distances(self, N=None):
        """Use a minimum spanning tree heuristic to find the N largest gaps in the
        line constituted by the current decision boundary keypoints.
        """
        if N == None:
            N = self.n_interpolated_keypoints
        edges = minimum_spanning_tree(
            squareform(pdist(self.decision_boundary_points_2d))
        )
        edged = np.array(
            [
                euclidean(
                    self.decision_boundary_points_2d[u],
                    self.decision_boundary_points_2d[v],
                )
                for u, v in edges
            ]
        )
        gap_edge_idx = np.argsort(edged)[::-1][: int(N)]
        edges = edges[gap_edge_idx]
        gap_distances = np.square(edged[gap_edge_idx])
        gap_probability_scores = gap_distances / np.sum(gap_distances)
        return edges, gap_distances, gap_probability_scores 
Example #13
Source File: density.py    From modAL with MIT License 6 votes vote down vote up
def information_density(X: modALinput, metric: Union[str, Callable] = 'euclidean') -> np.ndarray:
    """
    Calculates the information density metric of the given data using the given metric.

    Args:
        X: The data for which the information density is to be calculated.
        metric: The metric to be used. Should take two 1d numpy.ndarrays for argument.

    Todo:
        Should work with all possible modALinput.
        Perhaps refactor the module to use some stuff from sklearn.metrics.pairwise

    Returns:
        The information density for each sample.
    """
    # inf_density = np.zeros(shape=(X.shape[0],))
    # for X_idx, X_inst in enumerate(X):
    #     inf_density[X_idx] = sum(similarity_measure(X_inst, X_j) for X_j in X)
    #
    # return inf_density/X.shape[0]

    similarity_mtx = 1/(1+pairwise_distances(X, X, metric=metric))

    return similarity_mtx.mean(axis=1) 
Example #14
Source File: vrcomplex.py    From MoguTDA with MIT License 6 votes vote down vote up
def __init__(self,
                 points,
                 epsilon,
                 labels=None,
                 distfcn=distance.euclidean):
        self.pts = points
        self.labels = (range(len(self.pts))
                       if labels is None or len(labels) != len(self.pts)
                       else labels)
        self.epsilon = epsilon
        self.distfcn = distfcn
        self.network = self.construct_network(self.pts,
                                              self.labels,
                                              self.epsilon,
                                              self.distfcn)
        self.import_simplices(map(tuple, nx.find_cliques(self.network))) 
Example #15
Source File: color-sorter.py    From rubiks-color-resolver with MIT License 6 votes vote down vote up
def travelling_salesman(colors):
    colors_length = len(colors)

    # Distance matrix
    A = np.zeros([colors_length, colors_length])
    for x in range(0, colors_length - 1):
        for y in range(0, colors_length - 1):
            A[x, y] = distance.euclidean(colors[x], colors[y])

    # Nearest neighbour algorithm
    path = NN(A, 0)

    # Final array
    colors_nn = []
    for i in path:
        colors_nn.append(colors[i])

    return colors_nn 
Example #16
Source File: image-search-engine.py    From py-image-search-engine with MIT License 6 votes vote down vote up
def search(self, query):
        results = {}

        for name, feature in self.features.item():
            dist = euclidean(query, feature)
            results[name] = dist

        results = sorted([(d, n) for n, d in results.items()])
        return results

    # @staticmethod
    # def chi_squared(a, b, eps=1e-10):
    #     # compute the chi-squared distance
    #     dist = 0.5 * np.sum([pow(a - b, 2) / (a + b + eps)
    #                          for (a, b) in zip(a, b)])
    #     # return the chi-squared distance
    #     return dist 
Example #17
Source File: detect_blinks.py    From Eye-blink-detection with MIT License 6 votes vote down vote up
def eye_aspect_ratio(eye):
	# compute the euclidean distances between the two sets of
	# vertical eye landmarks (x, y)-coordinates
	A = dist.euclidean(eye[1], eye[5])
	B = dist.euclidean(eye[2], eye[4])

	# compute the euclidean distance between the horizontal
	# eye landmark (x, y)-coordinates
	C = dist.euclidean(eye[0], eye[3])

	# compute the eye aspect ratio
	ear = (A + B) / (2.0 * C)

	# return the eye aspect ratio
	return ear
 
# construct the argument parse and parse the arguments 
Example #18
Source File: learning.py    From science_rcn with MIT License 6 votes vote down vote up
def adjust_edge_perturb_radii(frcs,
                              graph,
                              perturb_factor=2):
    """Returns a new graph where the 'perturb_radius' has been adjusted to account for 
    rounding errors. See train_image for parameters and returns.
    """
    graph = graph.copy()

    total_rounding_error = 0
    for n1, n2 in nx.edge_dfs(graph):
        desired_radius = distance.euclidean(frcs[n1, 1:], frcs[n2, 1:]) / perturb_factor

        upper = int(np.ceil(desired_radius))
        lower = int(np.floor(desired_radius))
        round_up_error = total_rounding_error + upper - desired_radius
        round_down_error = total_rounding_error + lower - desired_radius
        if abs(round_up_error) < abs(round_down_error):
            graph.edge[n1][n2]['perturb_radius'] = upper
            total_rounding_error = round_up_error
        else:
            graph.edge[n1][n2]['perturb_radius'] = lower
            total_rounding_error = round_down_error
    return graph 
Example #19
Source File: DBCV.py    From DBCV with MIT License 6 votes vote down vote up
def DBCV(X, labels, dist_function=euclidean):
    """
    Density Based clustering validation

    Args:
        X (np.ndarray): ndarray with dimensions [n_samples, n_features]
            data to check validity of clustering
        labels (np.array): clustering assignments for data X
        dist_dunction (func): function to determine distance between objects
            func args must be [np.array, np.array] where each array is a point

    Returns: cluster_validity (float)
        score in range[-1, 1] indicating validity of clustering assignments
    """
    graph = _mutual_reach_dist_graph(X, labels, dist_function)
    mst = _mutual_reach_dist_MST(graph)
    cluster_validity = _clustering_validity_index(mst, labels)
    return cluster_validity 
Example #20
Source File: process.py    From edusense with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_unit_vec_armpose(x,y):
    feature = []
    for i in range(0,len(x)):
        if (x[i] == 0 or y[i] == 0):
            return -1
    shoulder_width = dist.euclidean([x[2],y[2]],[x[5],y[5]])
    neck_height = dist.euclidean([x[0],y[0]],[x[1],y[1]])

    for i in range(0,len(x)):
        for j in range(i,len(x)):
            vec = (x[i]-x[j],y[i]-y[j])
            p1 = [x[i],y[i]]
            p2 = [x[j],y[j]]
            if neck_height == 0:
                return -1
            A = dist.euclidean(p1, p2)/float(neck_height)
            feature.append(A)
            vec = unit_vector(vec)
            feature.append(vec[0])
            feature.append(vec[1])
    return feature 
Example #21
Source File: process.py    From edusense with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def get_unit_vec_sit_stand(x,y,c):
    feature = []
    for i in range(0,len(x)):
        if (x[i] == 0 or y[i] == 0):
            return -1
    for i in range(1,len(x)):
        for j in range(i+1,len(x)):
            vec = (x[j]-x[i],y[j]-y[i])
            vec_u = unit_vector(vec)
            feature.append(vec_u[0])
            feature.append(vec_u[1])
    top_point = [x[0],y[0]]
    left_point = [x[2],y[2]]
    right_point = [x[5],y[5]]
    down_point_left = [x[3],y[3]]
    down_point_right = [x[6],y[6]]
    v_dist1 = dist.euclidean(top_point, down_point_left)/dist.euclidean(top_point, left_point)
    v_dist2 = dist.euclidean(top_point, down_point_right)/dist.euclidean(top_point, right_point)
    return feature + [v_dist1,v_dist2] 
Example #22
Source File: maze_environment.py    From Hands-on-Neuroevolution-with-Python with MIT License 6 votes vote down vote up
def maze_novelty_metric_euclidean(first_item, second_item):
    """
    The function to calculate the novelty metric score as a distance between two
    data vectors in provided NoveltyItems
    Arguments:
        first_item:     The first NoveltyItem
        second_item:    The second NoveltyItem
    Returns:
        The novelty metric as a distance between two
        data vectors in provided NoveltyItems
    """
    if not (hasattr(first_item, "data") or hasattr(second_item, "data")):
        return NotImplemented

    if len(first_item.data) != len(second_item.data):
        # can not be compared
        return 0.0

    return distance.euclidean(first_item.data, second_item.data) 
Example #23
Source File: detect_drowsiness.py    From PyImageSearch-CV-DL-CrashCourse with MIT License 6 votes vote down vote up
def eye_aspect_ratio(eye):
	# compute the euclidean distances between the two sets of
	# vertical eye landmarks (x, y)-coordinates
	A = dist.euclidean(eye[1], eye[5])
	B = dist.euclidean(eye[2], eye[4])

	# compute the euclidean distance between the horizontal
	# eye landmark (x, y)-coordinates
	C = dist.euclidean(eye[0], eye[3])

	# compute the eye aspect ratio
	ear = (A + B) / (2.0 * C)

	# return the eye aspect ratio
	return ear

# import the necessary packages 
Example #24
Source File: detect_drowsiness.py    From PyImageSearch-CV-DL-CrashCourse with MIT License 6 votes vote down vote up
def eye_aspect_ratio(eye):
	# compute the euclidean distances between the two sets of
	# vertical eye landmarks (x, y)-coordinates
	A = dist.euclidean(eye[1], eye[5])
	B = dist.euclidean(eye[2], eye[4])

	# compute the euclidean distance between the horizontal
	# eye landmark (x, y)-coordinates
	C = dist.euclidean(eye[0], eye[3])

	# compute the eye aspect ratio
	ear = (A + B) / (2.0 * C)

	# return the eye aspect ratio
	return ear
 
# construct the argument parse and parse the arguments 
Example #25
Source File: detect_drowsiness.py    From PyImageSearch-CV-DL-CrashCourse with MIT License 6 votes vote down vote up
def eye_aspect_ratio(eye):
	# compute the euclidean distances between the two sets of
	# vertical eye landmarks (x, y)-coordinates
	A = dist.euclidean(eye[1], eye[5])
	B = dist.euclidean(eye[2], eye[4])

	# compute the euclidean distance between the horizontal
	# eye landmark (x, y)-coordinates
	C = dist.euclidean(eye[0], eye[3])

	# compute the eye aspect ratio
	ear = (A + B) / (2.0 * C)

	# return the eye aspect ratio
	return ear
 
# construct the argument parse and parse the arguments 
Example #26
Source File: detect_drowsiness.py    From PyImageSearch-CV-DL-CrashCourse with MIT License 6 votes vote down vote up
def eye_aspect_ratio(eye):
	# compute the euclidean distances between the two sets of
	# vertical eye landmarks (x, y)-coordinates
	A = dist.euclidean(eye[1], eye[5])
	B = dist.euclidean(eye[2], eye[4])

	# compute the euclidean distance between the horizontal
	# eye landmark (x, y)-coordinates
	C = dist.euclidean(eye[0], eye[3])

	# compute the eye aspect ratio
	ear = (A + B) / (2.0 * C)

	# return the eye aspect ratio
	return ear
 
# construct the argument parse and parse the arguments 
Example #27
Source File: detect_drowsiness.py    From PyImageSearch-CV-DL-CrashCourse with MIT License 6 votes vote down vote up
def eye_aspect_ratio(eye):
	# compute the euclidean distances between the two sets of
	# vertical eye landmarks (x, y)-coordinates
	A = dist.euclidean(eye[1], eye[5])
	B = dist.euclidean(eye[2], eye[4])

	# compute the euclidean distance between the horizontal
	# eye landmark (x, y)-coordinates
	C = dist.euclidean(eye[0], eye[3])

	# compute the eye aspect ratio
	ear = (A + B) / (2.0 * C)

	# return the eye aspect ratio
	return ear
 
# construct the argument parse and parse the arguments 
Example #28
Source File: Compute_similarity_euclidean_test.py    From RecSys2019_DeepLearning_Evaluation with GNU Affero General Public License v3.0 6 votes vote down vote up
def test_euclidean_similarity_integer(self):

        from Base.Similarity.Compute_Similarity_Euclidean import Compute_Similarity_Euclidean
        from scipy.spatial.distance import euclidean

        data_matrix = np.array([[1,1,0,1],[0,1,1,1],[1,0,1,0]])

        n_items = data_matrix.shape[0]

        similarity_object = Compute_Similarity_Euclidean(sps.csr_matrix(data_matrix).T, topK=100, normalize=False, similarity_from_distance_mode="lin")
        W_local = similarity_object.compute_similarity()

        for vector1 in range(n_items):
            for vector2 in range(n_items):

                scipy_distance = euclidean(data_matrix[vector1,:], data_matrix[vector2,:])

                if vector1 == vector2:
                    assert W_local[vector1, vector2] == 0.0, "W_local[{},{}] not matching control".format(vector1, vector2)

                else:
                    local_similarity = 1/W_local[vector1, vector2]

                    assert np.allclose(local_similarity, scipy_distance, atol=1e-4), "W_local[{},{}] not matching control".format(vector1, vector2) 
Example #29
Source File: detect_videofile_mouth.py    From mouth-open with MIT License 6 votes vote down vote up
def mouth_aspect_ratio(mouth):
	# compute the euclidean distances between the two sets of
	# vertical mouth landmarks (x, y)-coordinates
	A = dist.euclidean(mouth[2], mouth[9]) # 51, 59
	B = dist.euclidean(mouth[4], mouth[7]) # 53, 57

	# compute the euclidean distance between the horizontal
	# mouth landmark (x, y)-coordinates
	C = dist.euclidean(mouth[0], mouth[6]) # 49, 55

	# compute the mouth aspect ratio
	mar = (A + B) / (2.0 * C)

	# return the mouth aspect ratio
	return mar

# construct the argument parse and parse the arguments 
Example #30
Source File: detect_open_mouth.py    From mouth-open with MIT License 6 votes vote down vote up
def mouth_aspect_ratio(mouth):
	# compute the euclidean distances between the two sets of
	# vertical mouth landmarks (x, y)-coordinates
	A = dist.euclidean(mouth[2], mouth[10]) # 51, 59
	B = dist.euclidean(mouth[4], mouth[8]) # 53, 57

	# compute the euclidean distance between the horizontal
	# mouth landmark (x, y)-coordinates
	C = dist.euclidean(mouth[0], mouth[6]) # 49, 55

	# compute the mouth aspect ratio
	mar = (A + B) / (2.0 * C)

	# return the mouth aspect ratio
	return mar

# construct the argument parse and parse the arguments