Python util.normalize() Examples

The following are 11 code examples of util.normalize(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module util , or try the search function .
Example #1
Source File: river_network.py    From terrain-erosion-3-ways with MIT License 6 votes vote down vote up
def compute_height(points, neighbors, deltas, get_delta_fn=None):
  if get_delta_fn is None:
    get_delta_fn = lambda src, dst: deltas[dst]

  dim = len(points)
  result = [None] * dim
  seed_idx = min_index([sum(p) for p in points])
  q = [(0.0, seed_idx)]

  while len(q) > 0:
    (height, idx) = heapq.heappop(q)
    if result[idx] is not None: continue
    result[idx] = height
    for n in neighbors[idx]:
      if result[n] is not None: continue
      heapq.heappush(q, (get_delta_fn(idx, n) + height, n))
  return util.normalize(np.array(result))


# Same as above, but computes height taking into account river downcutting.
# `max_delta` determines the maximum difference in neighboring points (to
# give the effect of talus slippage). `river_downcutting_constant` affects how
# deeply rivers cut into terrain (higher means more downcutting). 
Example #2
Source File: matrix.py    From xy with MIT License 6 votes vote down vote up
def rotate(self, vector, angle):
        x, y, z = normalize(vector)
        s = sin(angle)
        c = cos(angle)
        m = 1 - c
        matrix = Matrix([
            m * x * x + c,
            m * x * y - z * s,
            m * z * x + y * s,
            0,
            m * x * y + z * s,
            m * y * y + c,
            m * y * z - x * s,
            0,
            m * z * x - y * s,
            m * y * z + x * s,
            m * z * z + c,
            0,
            0,
            0,
            0,
            1,
        ])
        return matrix * self 
Example #3
Source File: shapes.py    From xy with MIT License 6 votes vote down vote up
def _triangle_paths(self, detail, vertices):
        paths = []
        a, b, c = vertices
        r = self.radius
        p = self.center
        if detail == 0:
            v1 = tuple(r * a[i] + p[i] for i in xrange(3))
            v2 = tuple(r * b[i] + p[i] for i in xrange(3))
            v3 = tuple(r * c[i] + p[i] for i in xrange(3))
            paths.append((v1, v2))
            paths.append((v2, v3))
            paths.append((v3, v1))
        else:
            ab = util.normalize([(a[i] + b[i]) / 2.0 for i in xrange(3)])
            ac = util.normalize([(a[i] + c[i]) / 2.0 for i in xrange(3)])
            bc = util.normalize([(b[i] + c[i]) / 2.0 for i in xrange(3)])
            paths.extend(self._triangle_paths(detail - 1, (a, ab, ac)))
            paths.extend(self._triangle_paths(detail - 1, (b, bc, ab)))
            paths.extend(self._triangle_paths(detail - 1, (c, ac, bc)))
            paths.extend(self._triangle_paths(detail - 1, (ab, bc, ac)))
        return paths 
Example #4
Source File: pub.py    From oadoi with MIT License 5 votes vote down vote up
def page_matches_by_title_filtered(self):

        my_pages = []

        if not self.normalized_title:
            return my_pages

        for my_page in self.page_new_matches_by_title:
            # don't do this right now.  not sure if it helps or hurts.
            # don't check title match if we already know it belongs to a different doi
            # if my_page.doi and my_page.doi != self.doi:
            #     continue

            # double check author match
            match_type = "title"
            if self.first_author_lastname or self.last_author_lastname:
                if my_page.authors:
                    try:
                        pmh_author_string = normalize(u", ".join(my_page.authors))
                        if self.first_author_lastname and normalize(self.first_author_lastname) in pmh_author_string:
                            match_type = "title and first author"
                        elif self.last_author_lastname and normalize(self.last_author_lastname) in pmh_author_string:
                            match_type = "title and last author"
                        else:
                            # logger.info(
                            #    u"author check fails, so skipping this record. Looked for {} and {} in {}".format(
                            #       self.first_author_lastname, self.last_author_lastname, pmh_author_string))
                            # logger.info(self.authors)
                            # don't match if bad author match
                            continue
                    except TypeError:
                        pass  # couldn't make author string
            my_page.match_evidence = u"oa repository (via OAI-PMH {} match)".format(match_type)
            my_pages.append(my_page)
        return my_pages 
Example #5
Source File: pub.py    From oadoi with MIT License 5 votes vote down vote up
def is_same_publisher(self, publisher):
        if self.publisher:
            return normalize(self.publisher) == normalize(publisher)
        return False 
Example #6
Source File: ridge_noise.py    From terrain-erosion-3-ways with MIT License 5 votes vote down vote up
def main(argv):
  shape = (512,) * 2

  values = np.zeros(shape)
  for p in range(1, 10):
    a = 2 ** p
    values += np.abs(noise_octave(shape, a) - 0.5)/ a 
  result = (1.0 - util.normalize(values)) ** 2

  np.save('ridge', result) 
Example #7
Source File: generate_training_images.py    From terrain-erosion-3-ways with MIT License 5 votes vote down vote up
def clean_sample(sample):
  # Get rid of "out-of-bounds" magic values.
  sample[sample == np.finfo('float32').min] = 0.0

  # Ignore any samples with NaNs, for one reason or another.
  if np.isnan(sample).any(): return None

  # Only accept values that span a given range. This is to capture more
  # mountainous samples.
  if (sample.max() - sample.min()) < 40: return None
  
  # Filter out samples for which a significant portion is within a small 
  # threshold from the minimum value. This helps filter out samples that
  # contain a lot of water.
  near_min_fraction = (sample < (sample.min() + 8)).sum() / sample.size
  if near_min_fraction > 0.2: return None

  # Low entropy samples likely have some file corruption or some other artifact
  # that would make it unsuitable as a training sample.
  entropy = skimage.measure.shannon_entropy(sample)
  if entropy < 10.0: return None

  return util.normalize(sample)


# This function returns rotated and flipped variants of the provided array. This
# increases the number of training samples by a factor of 8. 
Example #8
Source File: matrix.py    From xy with MIT License 5 votes vote down vote up
def look_at(self, eye, center, up):
        up = normalize(up)
        f = normalize(sub(center, eye))
        s = cross(f, up)
        u = cross(s, f)
        matrix = Matrix([
            s[0], s[1], s[2], 0,
            u[0], u[1], u[2], 0,
            -f[0], -f[1], -f[2], 0,
            eye[0], eye[1], eye[2], 1,
        ]).inverse()
        return matrix * self 
Example #9
Source File: scene.py    From xy with MIT License 5 votes vote down vote up
def visible(self, eye, point):
        v = util.sub(eye, point)
        o = point
        d = util.normalize(v)
        t = self.intersect(o, d, 0, util.length(v))
        return t is None 
Example #10
Source File: product.py    From impactstory-tng with MIT License 5 votes vote down vote up
def normalized_title(self):
        return normalize(self.display_title) 
Example #11
Source File: river_network.py    From terrain-erosion-3-ways with MIT License 4 votes vote down vote up
def main(argv):
  dim = 512
  shape = (dim,) * 2
  disc_radius = 1.0
  max_delta = 0.05
  river_downcutting_constant = 1.3
  directional_inertia = 0.4
  default_water_level = 1.0
  evaporation_rate = 0.2

  print ('Generating...')

  print('  ...initial terrain shape')
  land_mask = remove_lakes(
      (util.fbm(shape, -2, lower=2.0) + bump(shape, 0.2 * dim) - 1.1) > 0)
  coastal_dropoff = np.tanh(util.dist_to_mask(land_mask) / 80.0) * land_mask
  mountain_shapes = util.fbm(shape, -2, lower=2.0, upper=np.inf)
  initial_height = ( 
      (util.gaussian_blur(np.maximum(mountain_shapes - 0.40, 0.0), sigma=5.0) 
        + 0.1) * coastal_dropoff)
  deltas = util.normalize(np.abs(util.gaussian_gradient(initial_height))) 

  print('  ...sampling points')
  points = util.poisson_disc_sampling(shape, disc_radius)
  coords = np.floor(points).astype(int)


  print('  ...delaunay triangulation')
  tri = sp.spatial.Delaunay(points)
  (indices, indptr) = tri.vertex_neighbor_vertices
  neighbors = [indptr[indices[k]:indices[k + 1]] for k in range(len(points))]
  points_land = land_mask[coords[:, 0], coords[:, 1]]
  points_deltas = deltas[coords[:, 0], coords[:, 1]]

  print('  ...initial height map')
  points_height = compute_height(points, neighbors, points_deltas)

  print('  ...river network')
  (upstream, downstream, volume) = compute_river_network(
      points, neighbors, points_height, points_land,
      directional_inertia, default_water_level, evaporation_rate)

  print('  ...final terrain height')
  new_height = compute_final_height(
      points, neighbors, points_deltas, volume, upstream, 
      max_delta, river_downcutting_constant)
  terrain_height = render_triangulation(shape, tri, new_height)

  np.savez('river_network', height=terrain_height, land_mask=land_mask)