id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequence | docstring
stringlengths 3
17.3k
| docstring_tokens
sequence | sha
stringlengths 40
40
| url
stringlengths 87
242
|
---|---|---|---|---|---|---|---|---|---|---|---|
600 | gem/oq-engine | openquake/hazardlib/geo/surface/multi.py | MultiSurface.get_bounding_box | def get_bounding_box(self):
"""
Compute bounding box for each surface element, and then return
the bounding box of all surface elements' bounding boxes.
:return:
A tuple of four items. These items represent western, eastern,
northern and southern borders of the bounding box respectively.
Values are floats in decimal degrees.
"""
lons = []
lats = []
for surf in self.surfaces:
west, east, north, south = surf.get_bounding_box()
lons.extend([west, east])
lats.extend([north, south])
return utils.get_spherical_bounding_box(lons, lats) | python | def get_bounding_box(self):
"""
Compute bounding box for each surface element, and then return
the bounding box of all surface elements' bounding boxes.
:return:
A tuple of four items. These items represent western, eastern,
northern and southern borders of the bounding box respectively.
Values are floats in decimal degrees.
"""
lons = []
lats = []
for surf in self.surfaces:
west, east, north, south = surf.get_bounding_box()
lons.extend([west, east])
lats.extend([north, south])
return utils.get_spherical_bounding_box(lons, lats) | [
"def",
"get_bounding_box",
"(",
"self",
")",
":",
"lons",
"=",
"[",
"]",
"lats",
"=",
"[",
"]",
"for",
"surf",
"in",
"self",
".",
"surfaces",
":",
"west",
",",
"east",
",",
"north",
",",
"south",
"=",
"surf",
".",
"get_bounding_box",
"(",
")",
"lons",
".",
"extend",
"(",
"[",
"west",
",",
"east",
"]",
")",
"lats",
".",
"extend",
"(",
"[",
"north",
",",
"south",
"]",
")",
"return",
"utils",
".",
"get_spherical_bounding_box",
"(",
"lons",
",",
"lats",
")"
] | Compute bounding box for each surface element, and then return
the bounding box of all surface elements' bounding boxes.
:return:
A tuple of four items. These items represent western, eastern,
northern and southern borders of the bounding box respectively.
Values are floats in decimal degrees. | [
"Compute",
"bounding",
"box",
"for",
"each",
"surface",
"element",
"and",
"then",
"return",
"the",
"bounding",
"box",
"of",
"all",
"surface",
"elements",
"bounding",
"boxes",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L291-L307 |
601 | gem/oq-engine | openquake/hazardlib/geo/surface/multi.py | MultiSurface._get_areas | def _get_areas(self):
"""
Return surface elements area values in a numpy array.
"""
if self.areas is None:
self.areas = []
for surf in self.surfaces:
self.areas.append(surf.get_area())
self.areas = numpy.array(self.areas)
return self.areas | python | def _get_areas(self):
"""
Return surface elements area values in a numpy array.
"""
if self.areas is None:
self.areas = []
for surf in self.surfaces:
self.areas.append(surf.get_area())
self.areas = numpy.array(self.areas)
return self.areas | [
"def",
"_get_areas",
"(",
"self",
")",
":",
"if",
"self",
".",
"areas",
"is",
"None",
":",
"self",
".",
"areas",
"=",
"[",
"]",
"for",
"surf",
"in",
"self",
".",
"surfaces",
":",
"self",
".",
"areas",
".",
"append",
"(",
"surf",
".",
"get_area",
"(",
")",
")",
"self",
".",
"areas",
"=",
"numpy",
".",
"array",
"(",
"self",
".",
"areas",
")",
"return",
"self",
".",
"areas"
] | Return surface elements area values in a numpy array. | [
"Return",
"surface",
"elements",
"area",
"values",
"in",
"a",
"numpy",
"array",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L350-L360 |
602 | gem/oq-engine | openquake/hazardlib/geo/surface/multi.py | MultiSurface._get_cartesian_edge_set | def _get_cartesian_edge_set(self):
"""
For the GC2 calculations a set of cartesian representations of the
fault edges are needed. In this present case we use a common cartesian
framework for all edges, as opposed to defining a separate orthographic
projection per edge
"""
# Get projection space for cartesian projection
edge_sets = numpy.vstack(self.edge_set)
west, east, north, south = utils.get_spherical_bounding_box(
edge_sets[:, 0],
edge_sets[:, 1])
self.proj = utils.OrthographicProjection(west, east, north, south)
for edges in self.edge_set:
# Project edges into cartesian space
px, py = self.proj(edges[:, 0], edges[:, 1])
# Store the two end-points of the trace
self.cartesian_endpoints.append(
numpy.array([[px[0], py[0], edges[0, 2]],
[px[-1], py[-1], edges[-1, 2]]]))
self.cartesian_edges.append(numpy.column_stack([px, py,
edges[:, 2]]))
# Get surface length vector for the trace - easier in cartesian
lengths = numpy.sqrt((px[:-1] - px[1:]) ** 2. +
(py[:-1] - py[1:]) ** 2.)
self.length_set.append(lengths)
# Get cumulative surface length vector
self.cum_length_set.append(
numpy.hstack([0., numpy.cumsum(lengths)]))
return edge_sets | python | def _get_cartesian_edge_set(self):
"""
For the GC2 calculations a set of cartesian representations of the
fault edges are needed. In this present case we use a common cartesian
framework for all edges, as opposed to defining a separate orthographic
projection per edge
"""
# Get projection space for cartesian projection
edge_sets = numpy.vstack(self.edge_set)
west, east, north, south = utils.get_spherical_bounding_box(
edge_sets[:, 0],
edge_sets[:, 1])
self.proj = utils.OrthographicProjection(west, east, north, south)
for edges in self.edge_set:
# Project edges into cartesian space
px, py = self.proj(edges[:, 0], edges[:, 1])
# Store the two end-points of the trace
self.cartesian_endpoints.append(
numpy.array([[px[0], py[0], edges[0, 2]],
[px[-1], py[-1], edges[-1, 2]]]))
self.cartesian_edges.append(numpy.column_stack([px, py,
edges[:, 2]]))
# Get surface length vector for the trace - easier in cartesian
lengths = numpy.sqrt((px[:-1] - px[1:]) ** 2. +
(py[:-1] - py[1:]) ** 2.)
self.length_set.append(lengths)
# Get cumulative surface length vector
self.cum_length_set.append(
numpy.hstack([0., numpy.cumsum(lengths)]))
return edge_sets | [
"def",
"_get_cartesian_edge_set",
"(",
"self",
")",
":",
"# Get projection space for cartesian projection",
"edge_sets",
"=",
"numpy",
".",
"vstack",
"(",
"self",
".",
"edge_set",
")",
"west",
",",
"east",
",",
"north",
",",
"south",
"=",
"utils",
".",
"get_spherical_bounding_box",
"(",
"edge_sets",
"[",
":",
",",
"0",
"]",
",",
"edge_sets",
"[",
":",
",",
"1",
"]",
")",
"self",
".",
"proj",
"=",
"utils",
".",
"OrthographicProjection",
"(",
"west",
",",
"east",
",",
"north",
",",
"south",
")",
"for",
"edges",
"in",
"self",
".",
"edge_set",
":",
"# Project edges into cartesian space",
"px",
",",
"py",
"=",
"self",
".",
"proj",
"(",
"edges",
"[",
":",
",",
"0",
"]",
",",
"edges",
"[",
":",
",",
"1",
"]",
")",
"# Store the two end-points of the trace",
"self",
".",
"cartesian_endpoints",
".",
"append",
"(",
"numpy",
".",
"array",
"(",
"[",
"[",
"px",
"[",
"0",
"]",
",",
"py",
"[",
"0",
"]",
",",
"edges",
"[",
"0",
",",
"2",
"]",
"]",
",",
"[",
"px",
"[",
"-",
"1",
"]",
",",
"py",
"[",
"-",
"1",
"]",
",",
"edges",
"[",
"-",
"1",
",",
"2",
"]",
"]",
"]",
")",
")",
"self",
".",
"cartesian_edges",
".",
"append",
"(",
"numpy",
".",
"column_stack",
"(",
"[",
"px",
",",
"py",
",",
"edges",
"[",
":",
",",
"2",
"]",
"]",
")",
")",
"# Get surface length vector for the trace - easier in cartesian",
"lengths",
"=",
"numpy",
".",
"sqrt",
"(",
"(",
"px",
"[",
":",
"-",
"1",
"]",
"-",
"px",
"[",
"1",
":",
"]",
")",
"**",
"2.",
"+",
"(",
"py",
"[",
":",
"-",
"1",
"]",
"-",
"py",
"[",
"1",
":",
"]",
")",
"**",
"2.",
")",
"self",
".",
"length_set",
".",
"append",
"(",
"lengths",
")",
"# Get cumulative surface length vector",
"self",
".",
"cum_length_set",
".",
"append",
"(",
"numpy",
".",
"hstack",
"(",
"[",
"0.",
",",
"numpy",
".",
"cumsum",
"(",
"lengths",
")",
"]",
")",
")",
"return",
"edge_sets"
] | For the GC2 calculations a set of cartesian representations of the
fault edges are needed. In this present case we use a common cartesian
framework for all edges, as opposed to defining a separate orthographic
projection per edge | [
"For",
"the",
"GC2",
"calculations",
"a",
"set",
"of",
"cartesian",
"representations",
"of",
"the",
"fault",
"edges",
"are",
"needed",
".",
"In",
"this",
"present",
"case",
"we",
"use",
"a",
"common",
"cartesian",
"framework",
"for",
"all",
"edges",
"as",
"opposed",
"to",
"defining",
"a",
"separate",
"orthographic",
"projection",
"per",
"edge"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L362-L392 |
603 | gem/oq-engine | openquake/hazardlib/geo/surface/multi.py | MultiSurface._get_gc2_coordinates_for_rupture | def _get_gc2_coordinates_for_rupture(self, edge_sets):
"""
Calculates the GC2 coordinates for the nodes of the upper edge of the
fault
"""
# Establish GC2 length - for use with Ry0
rup_gc2t, rup_gc2u = self.get_generalised_coordinates(
edge_sets[:, 0], edge_sets[:, 1])
# GC2 length should be the largest positive GC2 value of the edges
self.gc_length = numpy.max(rup_gc2u) | python | def _get_gc2_coordinates_for_rupture(self, edge_sets):
"""
Calculates the GC2 coordinates for the nodes of the upper edge of the
fault
"""
# Establish GC2 length - for use with Ry0
rup_gc2t, rup_gc2u = self.get_generalised_coordinates(
edge_sets[:, 0], edge_sets[:, 1])
# GC2 length should be the largest positive GC2 value of the edges
self.gc_length = numpy.max(rup_gc2u) | [
"def",
"_get_gc2_coordinates_for_rupture",
"(",
"self",
",",
"edge_sets",
")",
":",
"# Establish GC2 length - for use with Ry0",
"rup_gc2t",
",",
"rup_gc2u",
"=",
"self",
".",
"get_generalised_coordinates",
"(",
"edge_sets",
"[",
":",
",",
"0",
"]",
",",
"edge_sets",
"[",
":",
",",
"1",
"]",
")",
"# GC2 length should be the largest positive GC2 value of the edges",
"self",
".",
"gc_length",
"=",
"numpy",
".",
"max",
"(",
"rup_gc2u",
")"
] | Calculates the GC2 coordinates for the nodes of the upper edge of the
fault | [
"Calculates",
"the",
"GC2",
"coordinates",
"for",
"the",
"nodes",
"of",
"the",
"upper",
"edge",
"of",
"the",
"fault"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L454-L464 |
604 | gem/oq-engine | openquake/hazardlib/geo/surface/multi.py | MultiSurface._get_ut_i | def _get_ut_i(self, seg, sx, sy):
"""
Returns the U and T coordinate for a specific trace segment
:param seg:
End points of the segment edge
:param sx:
Sites longitudes rendered into coordinate system
:param sy:
Sites latitudes rendered into coordinate system
"""
p0x, p0y, p1x, p1y = seg[0, 0], seg[0, 1], seg[1, 0], seg[1, 1]
# Unit vector normal to strike
t_i_vec = [p1y - p0y, -(p1x - p0x), 0.0]
t_i_hat = t_i_vec / numpy.linalg.norm(t_i_vec)
# Unit vector along strike
u_i_vec = [p1x - p0x, p1y - p0y, 0.0]
u_i_hat = u_i_vec / numpy.linalg.norm(u_i_vec)
# Vectors from P0 to sites
rsite = numpy.column_stack([sx - p0x, sy - p0y])
return numpy.sum(u_i_hat[:-1] * rsite, axis=1),\
numpy.sum(t_i_hat[:-1] * rsite, axis=1) | python | def _get_ut_i(self, seg, sx, sy):
"""
Returns the U and T coordinate for a specific trace segment
:param seg:
End points of the segment edge
:param sx:
Sites longitudes rendered into coordinate system
:param sy:
Sites latitudes rendered into coordinate system
"""
p0x, p0y, p1x, p1y = seg[0, 0], seg[0, 1], seg[1, 0], seg[1, 1]
# Unit vector normal to strike
t_i_vec = [p1y - p0y, -(p1x - p0x), 0.0]
t_i_hat = t_i_vec / numpy.linalg.norm(t_i_vec)
# Unit vector along strike
u_i_vec = [p1x - p0x, p1y - p0y, 0.0]
u_i_hat = u_i_vec / numpy.linalg.norm(u_i_vec)
# Vectors from P0 to sites
rsite = numpy.column_stack([sx - p0x, sy - p0y])
return numpy.sum(u_i_hat[:-1] * rsite, axis=1),\
numpy.sum(t_i_hat[:-1] * rsite, axis=1) | [
"def",
"_get_ut_i",
"(",
"self",
",",
"seg",
",",
"sx",
",",
"sy",
")",
":",
"p0x",
",",
"p0y",
",",
"p1x",
",",
"p1y",
"=",
"seg",
"[",
"0",
",",
"0",
"]",
",",
"seg",
"[",
"0",
",",
"1",
"]",
",",
"seg",
"[",
"1",
",",
"0",
"]",
",",
"seg",
"[",
"1",
",",
"1",
"]",
"# Unit vector normal to strike",
"t_i_vec",
"=",
"[",
"p1y",
"-",
"p0y",
",",
"-",
"(",
"p1x",
"-",
"p0x",
")",
",",
"0.0",
"]",
"t_i_hat",
"=",
"t_i_vec",
"/",
"numpy",
".",
"linalg",
".",
"norm",
"(",
"t_i_vec",
")",
"# Unit vector along strike",
"u_i_vec",
"=",
"[",
"p1x",
"-",
"p0x",
",",
"p1y",
"-",
"p0y",
",",
"0.0",
"]",
"u_i_hat",
"=",
"u_i_vec",
"/",
"numpy",
".",
"linalg",
".",
"norm",
"(",
"u_i_vec",
")",
"# Vectors from P0 to sites",
"rsite",
"=",
"numpy",
".",
"column_stack",
"(",
"[",
"sx",
"-",
"p0x",
",",
"sy",
"-",
"p0y",
"]",
")",
"return",
"numpy",
".",
"sum",
"(",
"u_i_hat",
"[",
":",
"-",
"1",
"]",
"*",
"rsite",
",",
"axis",
"=",
"1",
")",
",",
"numpy",
".",
"sum",
"(",
"t_i_hat",
"[",
":",
"-",
"1",
"]",
"*",
"rsite",
",",
"axis",
"=",
"1",
")"
] | Returns the U and T coordinate for a specific trace segment
:param seg:
End points of the segment edge
:param sx:
Sites longitudes rendered into coordinate system
:param sy:
Sites latitudes rendered into coordinate system | [
"Returns",
"the",
"U",
"and",
"T",
"coordinate",
"for",
"a",
"specific",
"trace",
"segment"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L466-L489 |
605 | gem/oq-engine | openquake/hazardlib/geo/surface/multi.py | MultiSurface.get_rx_distance | def get_rx_distance(self, mesh):
"""
For each point determine the corresponding rx distance using the GC2
configuration.
See :meth:`superclass method
<.base.BaseSurface.get_rx_distance>`
for spec of input and result values.
"""
# If the GC2 calculations have already been computed (by invoking Ry0
# first) and the mesh is identical then class has GC2 attributes
# already pre-calculated
if not self.tmp_mesh or (self.tmp_mesh == mesh):
self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons,
mesh.lats)
# Update mesh
self.tmp_mesh = deepcopy(mesh)
# Rx coordinate is taken directly from gc2t
return self.gc2t | python | def get_rx_distance(self, mesh):
"""
For each point determine the corresponding rx distance using the GC2
configuration.
See :meth:`superclass method
<.base.BaseSurface.get_rx_distance>`
for spec of input and result values.
"""
# If the GC2 calculations have already been computed (by invoking Ry0
# first) and the mesh is identical then class has GC2 attributes
# already pre-calculated
if not self.tmp_mesh or (self.tmp_mesh == mesh):
self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons,
mesh.lats)
# Update mesh
self.tmp_mesh = deepcopy(mesh)
# Rx coordinate is taken directly from gc2t
return self.gc2t | [
"def",
"get_rx_distance",
"(",
"self",
",",
"mesh",
")",
":",
"# If the GC2 calculations have already been computed (by invoking Ry0",
"# first) and the mesh is identical then class has GC2 attributes",
"# already pre-calculated",
"if",
"not",
"self",
".",
"tmp_mesh",
"or",
"(",
"self",
".",
"tmp_mesh",
"==",
"mesh",
")",
":",
"self",
".",
"gc2t",
",",
"self",
".",
"gc2u",
"=",
"self",
".",
"get_generalised_coordinates",
"(",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
"# Update mesh",
"self",
".",
"tmp_mesh",
"=",
"deepcopy",
"(",
"mesh",
")",
"# Rx coordinate is taken directly from gc2t",
"return",
"self",
".",
"gc2t"
] | For each point determine the corresponding rx distance using the GC2
configuration.
See :meth:`superclass method
<.base.BaseSurface.get_rx_distance>`
for spec of input and result values. | [
"For",
"each",
"point",
"determine",
"the",
"corresponding",
"rx",
"distance",
"using",
"the",
"GC2",
"configuration",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L570-L588 |
606 | gem/oq-engine | openquake/hazardlib/geo/surface/multi.py | MultiSurface.get_ry0_distance | def get_ry0_distance(self, mesh):
"""
For each point determine the corresponding Ry0 distance using the GC2
configuration.
See :meth:`superclass method
<.base.BaseSurface.get_ry0_distance>`
for spec of input and result values.
"""
# If the GC2 calculations have already been computed (by invoking Ry0
# first) and the mesh is identical then class has GC2 attributes
# already pre-calculated
if not self.tmp_mesh or (self.tmp_mesh == mesh):
# If that's not the case, or the mesh is different then
# re-compute GC2 configuration
self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons,
mesh.lats)
# Update mesh
self.tmp_mesh = deepcopy(mesh)
# Default value ry0 (for sites within fault length) is 0.0
ry0 = numpy.zeros_like(self.gc2u, dtype=float)
# For sites with negative gc2u (off the initial point of the fault)
# take the absolute value of gc2u
neg_gc2u = self.gc2u < 0.0
ry0[neg_gc2u] = numpy.fabs(self.gc2u[neg_gc2u])
# Sites off the end of the fault have values shifted by the
# GC2 length of the fault
pos_gc2u = self.gc2u >= self.gc_length
ry0[pos_gc2u] = self.gc2u[pos_gc2u] - self.gc_length
return ry0 | python | def get_ry0_distance(self, mesh):
"""
For each point determine the corresponding Ry0 distance using the GC2
configuration.
See :meth:`superclass method
<.base.BaseSurface.get_ry0_distance>`
for spec of input and result values.
"""
# If the GC2 calculations have already been computed (by invoking Ry0
# first) and the mesh is identical then class has GC2 attributes
# already pre-calculated
if not self.tmp_mesh or (self.tmp_mesh == mesh):
# If that's not the case, or the mesh is different then
# re-compute GC2 configuration
self.gc2t, self.gc2u = self.get_generalised_coordinates(mesh.lons,
mesh.lats)
# Update mesh
self.tmp_mesh = deepcopy(mesh)
# Default value ry0 (for sites within fault length) is 0.0
ry0 = numpy.zeros_like(self.gc2u, dtype=float)
# For sites with negative gc2u (off the initial point of the fault)
# take the absolute value of gc2u
neg_gc2u = self.gc2u < 0.0
ry0[neg_gc2u] = numpy.fabs(self.gc2u[neg_gc2u])
# Sites off the end of the fault have values shifted by the
# GC2 length of the fault
pos_gc2u = self.gc2u >= self.gc_length
ry0[pos_gc2u] = self.gc2u[pos_gc2u] - self.gc_length
return ry0 | [
"def",
"get_ry0_distance",
"(",
"self",
",",
"mesh",
")",
":",
"# If the GC2 calculations have already been computed (by invoking Ry0",
"# first) and the mesh is identical then class has GC2 attributes",
"# already pre-calculated",
"if",
"not",
"self",
".",
"tmp_mesh",
"or",
"(",
"self",
".",
"tmp_mesh",
"==",
"mesh",
")",
":",
"# If that's not the case, or the mesh is different then",
"# re-compute GC2 configuration",
"self",
".",
"gc2t",
",",
"self",
".",
"gc2u",
"=",
"self",
".",
"get_generalised_coordinates",
"(",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
")",
"# Update mesh",
"self",
".",
"tmp_mesh",
"=",
"deepcopy",
"(",
"mesh",
")",
"# Default value ry0 (for sites within fault length) is 0.0",
"ry0",
"=",
"numpy",
".",
"zeros_like",
"(",
"self",
".",
"gc2u",
",",
"dtype",
"=",
"float",
")",
"# For sites with negative gc2u (off the initial point of the fault)",
"# take the absolute value of gc2u",
"neg_gc2u",
"=",
"self",
".",
"gc2u",
"<",
"0.0",
"ry0",
"[",
"neg_gc2u",
"]",
"=",
"numpy",
".",
"fabs",
"(",
"self",
".",
"gc2u",
"[",
"neg_gc2u",
"]",
")",
"# Sites off the end of the fault have values shifted by the",
"# GC2 length of the fault",
"pos_gc2u",
"=",
"self",
".",
"gc2u",
">=",
"self",
".",
"gc_length",
"ry0",
"[",
"pos_gc2u",
"]",
"=",
"self",
".",
"gc2u",
"[",
"pos_gc2u",
"]",
"-",
"self",
".",
"gc_length",
"return",
"ry0"
] | For each point determine the corresponding Ry0 distance using the GC2
configuration.
See :meth:`superclass method
<.base.BaseSurface.get_ry0_distance>`
for spec of input and result values. | [
"For",
"each",
"point",
"determine",
"the",
"corresponding",
"Ry0",
"distance",
"using",
"the",
"GC2",
"configuration",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/geo/surface/multi.py#L590-L622 |
607 | gem/oq-engine | openquake/hmtk/comparison/rate_grids.py | RateGrid.from_model_files | def from_model_files(cls, limits, input_model, investigation_time=1.0,
simple_mesh_spacing=1.0, complex_mesh_spacing=5.0,
mfd_width=0.1, area_discretisation=10.0):
"""
Reads the hazard model from a file
:param list limits:
Grid configuration [west, east, xspc, south, north, yspc,
upper, lower, zspc]
:param str input_model:
Path to input source model
:param float investigation_time:
Investigation time of Poisson model
:param float simple_mesh_spacing:
Rupture mesh spacing of simple fault (km)
:param float complex_mesh_spacing:
Rupture mesh spacing of complex fault (km)
:param float mfd_width:
Spacing (in magnitude units) of MFD
:param float area_discretisation:
Spacing of discretisation of area source (km)
"""
converter = SourceConverter(investigation_time,
simple_mesh_spacing,
complex_mesh_spacing,
mfd_width,
area_discretisation)
sources = []
for grp in nrml.to_python(input_model, converter):
sources.extend(grp.sources)
return cls(limits, sources, area_discretisation) | python | def from_model_files(cls, limits, input_model, investigation_time=1.0,
simple_mesh_spacing=1.0, complex_mesh_spacing=5.0,
mfd_width=0.1, area_discretisation=10.0):
"""
Reads the hazard model from a file
:param list limits:
Grid configuration [west, east, xspc, south, north, yspc,
upper, lower, zspc]
:param str input_model:
Path to input source model
:param float investigation_time:
Investigation time of Poisson model
:param float simple_mesh_spacing:
Rupture mesh spacing of simple fault (km)
:param float complex_mesh_spacing:
Rupture mesh spacing of complex fault (km)
:param float mfd_width:
Spacing (in magnitude units) of MFD
:param float area_discretisation:
Spacing of discretisation of area source (km)
"""
converter = SourceConverter(investigation_time,
simple_mesh_spacing,
complex_mesh_spacing,
mfd_width,
area_discretisation)
sources = []
for grp in nrml.to_python(input_model, converter):
sources.extend(grp.sources)
return cls(limits, sources, area_discretisation) | [
"def",
"from_model_files",
"(",
"cls",
",",
"limits",
",",
"input_model",
",",
"investigation_time",
"=",
"1.0",
",",
"simple_mesh_spacing",
"=",
"1.0",
",",
"complex_mesh_spacing",
"=",
"5.0",
",",
"mfd_width",
"=",
"0.1",
",",
"area_discretisation",
"=",
"10.0",
")",
":",
"converter",
"=",
"SourceConverter",
"(",
"investigation_time",
",",
"simple_mesh_spacing",
",",
"complex_mesh_spacing",
",",
"mfd_width",
",",
"area_discretisation",
")",
"sources",
"=",
"[",
"]",
"for",
"grp",
"in",
"nrml",
".",
"to_python",
"(",
"input_model",
",",
"converter",
")",
":",
"sources",
".",
"extend",
"(",
"grp",
".",
"sources",
")",
"return",
"cls",
"(",
"limits",
",",
"sources",
",",
"area_discretisation",
")"
] | Reads the hazard model from a file
:param list limits:
Grid configuration [west, east, xspc, south, north, yspc,
upper, lower, zspc]
:param str input_model:
Path to input source model
:param float investigation_time:
Investigation time of Poisson model
:param float simple_mesh_spacing:
Rupture mesh spacing of simple fault (km)
:param float complex_mesh_spacing:
Rupture mesh spacing of complex fault (km)
:param float mfd_width:
Spacing (in magnitude units) of MFD
:param float area_discretisation:
Spacing of discretisation of area source (km) | [
"Reads",
"the",
"hazard",
"model",
"from",
"a",
"file"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L114-L144 |
608 | gem/oq-engine | openquake/hmtk/comparison/rate_grids.py | RateGrid.get_rates | def get_rates(self, mmin, mmax=np.inf):
"""
Returns the cumulative rates greater than Mmin
:param float mmin:
Minimum magnitude
"""
nsrcs = self.number_sources()
for iloc, source in enumerate(self.source_model):
print("Source Number %s of %s, Name = %s, Typology = %s" % (
iloc + 1,
nsrcs,
source.name,
source.__class__.__name__))
if isinstance(source, CharacteristicFaultSource):
self._get_fault_rates(source, mmin, mmax)
elif isinstance(source, ComplexFaultSource):
self._get_fault_rates(source, mmin, mmax)
elif isinstance(source, SimpleFaultSource):
self._get_fault_rates(source, mmin, mmax)
elif isinstance(source, AreaSource):
self._get_area_rates(source, mmin, mmax)
elif isinstance(source, PointSource):
self._get_point_rates(source, mmin, mmax)
else:
print("Source type %s not recognised - skipping!" % source)
continue | python | def get_rates(self, mmin, mmax=np.inf):
"""
Returns the cumulative rates greater than Mmin
:param float mmin:
Minimum magnitude
"""
nsrcs = self.number_sources()
for iloc, source in enumerate(self.source_model):
print("Source Number %s of %s, Name = %s, Typology = %s" % (
iloc + 1,
nsrcs,
source.name,
source.__class__.__name__))
if isinstance(source, CharacteristicFaultSource):
self._get_fault_rates(source, mmin, mmax)
elif isinstance(source, ComplexFaultSource):
self._get_fault_rates(source, mmin, mmax)
elif isinstance(source, SimpleFaultSource):
self._get_fault_rates(source, mmin, mmax)
elif isinstance(source, AreaSource):
self._get_area_rates(source, mmin, mmax)
elif isinstance(source, PointSource):
self._get_point_rates(source, mmin, mmax)
else:
print("Source type %s not recognised - skipping!" % source)
continue | [
"def",
"get_rates",
"(",
"self",
",",
"mmin",
",",
"mmax",
"=",
"np",
".",
"inf",
")",
":",
"nsrcs",
"=",
"self",
".",
"number_sources",
"(",
")",
"for",
"iloc",
",",
"source",
"in",
"enumerate",
"(",
"self",
".",
"source_model",
")",
":",
"print",
"(",
"\"Source Number %s of %s, Name = %s, Typology = %s\"",
"%",
"(",
"iloc",
"+",
"1",
",",
"nsrcs",
",",
"source",
".",
"name",
",",
"source",
".",
"__class__",
".",
"__name__",
")",
")",
"if",
"isinstance",
"(",
"source",
",",
"CharacteristicFaultSource",
")",
":",
"self",
".",
"_get_fault_rates",
"(",
"source",
",",
"mmin",
",",
"mmax",
")",
"elif",
"isinstance",
"(",
"source",
",",
"ComplexFaultSource",
")",
":",
"self",
".",
"_get_fault_rates",
"(",
"source",
",",
"mmin",
",",
"mmax",
")",
"elif",
"isinstance",
"(",
"source",
",",
"SimpleFaultSource",
")",
":",
"self",
".",
"_get_fault_rates",
"(",
"source",
",",
"mmin",
",",
"mmax",
")",
"elif",
"isinstance",
"(",
"source",
",",
"AreaSource",
")",
":",
"self",
".",
"_get_area_rates",
"(",
"source",
",",
"mmin",
",",
"mmax",
")",
"elif",
"isinstance",
"(",
"source",
",",
"PointSource",
")",
":",
"self",
".",
"_get_point_rates",
"(",
"source",
",",
"mmin",
",",
"mmax",
")",
"else",
":",
"print",
"(",
"\"Source type %s not recognised - skipping!\"",
"%",
"source",
")",
"continue"
] | Returns the cumulative rates greater than Mmin
:param float mmin:
Minimum magnitude | [
"Returns",
"the",
"cumulative",
"rates",
"greater",
"than",
"Mmin"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L152-L178 |
609 | gem/oq-engine | openquake/hmtk/comparison/rate_grids.py | RateGrid._get_point_location | def _get_point_location(self, location):
"""
Returns the location in the output grid corresponding to the cell in
which the epicentre lays
:param location:
Source hypocentre as instance of :class:
openquake.hazardlib.geo.point.Point
:returns:
xloc - Location of longitude cell
yloc - Location of latitude cell
"""
if (location.longitude < self.xlim[0]) or\
(location.longitude > self.xlim[-1]):
return None, None
xloc = int(((location.longitude - self.xlim[0]) / self.xspc) + 1E-7)
if (location.latitude < self.ylim[0]) or\
(location.latitude > self.ylim[-1]):
return None, None
yloc = int(((location.latitude - self.ylim[0]) / self.yspc) + 1E-7)
return xloc, yloc | python | def _get_point_location(self, location):
"""
Returns the location in the output grid corresponding to the cell in
which the epicentre lays
:param location:
Source hypocentre as instance of :class:
openquake.hazardlib.geo.point.Point
:returns:
xloc - Location of longitude cell
yloc - Location of latitude cell
"""
if (location.longitude < self.xlim[0]) or\
(location.longitude > self.xlim[-1]):
return None, None
xloc = int(((location.longitude - self.xlim[0]) / self.xspc) + 1E-7)
if (location.latitude < self.ylim[0]) or\
(location.latitude > self.ylim[-1]):
return None, None
yloc = int(((location.latitude - self.ylim[0]) / self.yspc) + 1E-7)
return xloc, yloc | [
"def",
"_get_point_location",
"(",
"self",
",",
"location",
")",
":",
"if",
"(",
"location",
".",
"longitude",
"<",
"self",
".",
"xlim",
"[",
"0",
"]",
")",
"or",
"(",
"location",
".",
"longitude",
">",
"self",
".",
"xlim",
"[",
"-",
"1",
"]",
")",
":",
"return",
"None",
",",
"None",
"xloc",
"=",
"int",
"(",
"(",
"(",
"location",
".",
"longitude",
"-",
"self",
".",
"xlim",
"[",
"0",
"]",
")",
"/",
"self",
".",
"xspc",
")",
"+",
"1E-7",
")",
"if",
"(",
"location",
".",
"latitude",
"<",
"self",
".",
"ylim",
"[",
"0",
"]",
")",
"or",
"(",
"location",
".",
"latitude",
">",
"self",
".",
"ylim",
"[",
"-",
"1",
"]",
")",
":",
"return",
"None",
",",
"None",
"yloc",
"=",
"int",
"(",
"(",
"(",
"location",
".",
"latitude",
"-",
"self",
".",
"ylim",
"[",
"0",
"]",
")",
"/",
"self",
".",
"yspc",
")",
"+",
"1E-7",
")",
"return",
"xloc",
",",
"yloc"
] | Returns the location in the output grid corresponding to the cell in
which the epicentre lays
:param location:
Source hypocentre as instance of :class:
openquake.hazardlib.geo.point.Point
:returns:
xloc - Location of longitude cell
yloc - Location of latitude cell | [
"Returns",
"the",
"location",
"in",
"the",
"output",
"grid",
"corresponding",
"to",
"the",
"cell",
"in",
"which",
"the",
"epicentre",
"lays"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L180-L200 |
610 | gem/oq-engine | openquake/hmtk/comparison/rate_grids.py | RateGrid._get_area_rates | def _get_area_rates(self, source, mmin, mmax=np.inf):
"""
Adds the rates from the area source by discretising the source
to a set of point sources
:param source:
Area source as instance of :class:
openquake.hazardlib.source.area.AreaSource
"""
points = list(source)
for point in points:
self._get_point_rates(point, mmin, mmax) | python | def _get_area_rates(self, source, mmin, mmax=np.inf):
"""
Adds the rates from the area source by discretising the source
to a set of point sources
:param source:
Area source as instance of :class:
openquake.hazardlib.source.area.AreaSource
"""
points = list(source)
for point in points:
self._get_point_rates(point, mmin, mmax) | [
"def",
"_get_area_rates",
"(",
"self",
",",
"source",
",",
"mmin",
",",
"mmax",
"=",
"np",
".",
"inf",
")",
":",
"points",
"=",
"list",
"(",
"source",
")",
"for",
"point",
"in",
"points",
":",
"self",
".",
"_get_point_rates",
"(",
"point",
",",
"mmin",
",",
"mmax",
")"
] | Adds the rates from the area source by discretising the source
to a set of point sources
:param source:
Area source as instance of :class:
openquake.hazardlib.source.area.AreaSource | [
"Adds",
"the",
"rates",
"from",
"the",
"area",
"source",
"by",
"discretising",
"the",
"source",
"to",
"a",
"set",
"of",
"point",
"sources"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hmtk/comparison/rate_grids.py#L231-L242 |
611 | gem/oq-engine | openquake/hazardlib/gsim/megawati_pan_2010.py | MegawatiPan2010._get_distance_scaling | def _get_distance_scaling(self, C, mag, rhypo):
"""
Returns the distance scalig term
"""
return (C["a3"] * np.log(rhypo)) + (C["a4"] + C["a5"] * mag) * rhypo | python | def _get_distance_scaling(self, C, mag, rhypo):
"""
Returns the distance scalig term
"""
return (C["a3"] * np.log(rhypo)) + (C["a4"] + C["a5"] * mag) * rhypo | [
"def",
"_get_distance_scaling",
"(",
"self",
",",
"C",
",",
"mag",
",",
"rhypo",
")",
":",
"return",
"(",
"C",
"[",
"\"a3\"",
"]",
"*",
"np",
".",
"log",
"(",
"rhypo",
")",
")",
"+",
"(",
"C",
"[",
"\"a4\"",
"]",
"+",
"C",
"[",
"\"a5\"",
"]",
"*",
"mag",
")",
"*",
"rhypo"
] | Returns the distance scalig term | [
"Returns",
"the",
"distance",
"scalig",
"term"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/megawati_pan_2010.py#L98-L102 |
612 | gem/oq-engine | openquake/hazardlib/gsim/rietbrock_2013.py | RietbrockEtAl2013SelfSimilar._get_distance_scaling_term | def _get_distance_scaling_term(self, C, rjb, mag):
"""
Returns the distance scaling component of the model
Equation 10, Page 63
"""
# Depth adjusted distance, equation 11 (Page 63)
rval = np.sqrt(rjb ** 2.0 + C["c11"] ** 2.0)
f_0, f_1, f_2 = self._get_distance_segment_coefficients(rval)
return ((C["c4"] + C["c5"] * mag) * f_0 +
(C["c6"] + C["c7"] * mag) * f_1 +
(C["c8"] + C["c9"] * mag) * f_2 +
(C["c10"] * rval)) | python | def _get_distance_scaling_term(self, C, rjb, mag):
"""
Returns the distance scaling component of the model
Equation 10, Page 63
"""
# Depth adjusted distance, equation 11 (Page 63)
rval = np.sqrt(rjb ** 2.0 + C["c11"] ** 2.0)
f_0, f_1, f_2 = self._get_distance_segment_coefficients(rval)
return ((C["c4"] + C["c5"] * mag) * f_0 +
(C["c6"] + C["c7"] * mag) * f_1 +
(C["c8"] + C["c9"] * mag) * f_2 +
(C["c10"] * rval)) | [
"def",
"_get_distance_scaling_term",
"(",
"self",
",",
"C",
",",
"rjb",
",",
"mag",
")",
":",
"# Depth adjusted distance, equation 11 (Page 63)",
"rval",
"=",
"np",
".",
"sqrt",
"(",
"rjb",
"**",
"2.0",
"+",
"C",
"[",
"\"c11\"",
"]",
"**",
"2.0",
")",
"f_0",
",",
"f_1",
",",
"f_2",
"=",
"self",
".",
"_get_distance_segment_coefficients",
"(",
"rval",
")",
"return",
"(",
"(",
"C",
"[",
"\"c4\"",
"]",
"+",
"C",
"[",
"\"c5\"",
"]",
"*",
"mag",
")",
"*",
"f_0",
"+",
"(",
"C",
"[",
"\"c6\"",
"]",
"+",
"C",
"[",
"\"c7\"",
"]",
"*",
"mag",
")",
"*",
"f_1",
"+",
"(",
"C",
"[",
"\"c8\"",
"]",
"+",
"C",
"[",
"\"c9\"",
"]",
"*",
"mag",
")",
"*",
"f_2",
"+",
"(",
"C",
"[",
"\"c10\"",
"]",
"*",
"rval",
")",
")"
] | Returns the distance scaling component of the model
Equation 10, Page 63 | [
"Returns",
"the",
"distance",
"scaling",
"component",
"of",
"the",
"model",
"Equation",
"10",
"Page",
"63"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/rietbrock_2013.py#L109-L120 |
613 | gem/oq-engine | openquake/hazardlib/gsim/rietbrock_2013.py | RietbrockEtAl2013SelfSimilar._get_distance_segment_coefficients | def _get_distance_segment_coefficients(self, rval):
"""
Returns the coefficients describing the distance attenuation shape
for three different distance bins, equations 12a - 12c
"""
# Get distance segment ends
nsites = len(rval)
# Equation 12a
f_0 = np.log10(self.CONSTS["r0"] / rval)
f_0[rval > self.CONSTS["r0"]] = 0.0
# Equation 12b
f_1 = np.log10(rval)
f_1[rval > self.CONSTS["r1"]] = np.log10(self.CONSTS["r1"])
# Equation 12c
f_2 = np.log10(rval / self.CONSTS["r2"])
f_2[rval <= self.CONSTS["r2"]] = 0.0
return f_0, f_1, f_2 | python | def _get_distance_segment_coefficients(self, rval):
"""
Returns the coefficients describing the distance attenuation shape
for three different distance bins, equations 12a - 12c
"""
# Get distance segment ends
nsites = len(rval)
# Equation 12a
f_0 = np.log10(self.CONSTS["r0"] / rval)
f_0[rval > self.CONSTS["r0"]] = 0.0
# Equation 12b
f_1 = np.log10(rval)
f_1[rval > self.CONSTS["r1"]] = np.log10(self.CONSTS["r1"])
# Equation 12c
f_2 = np.log10(rval / self.CONSTS["r2"])
f_2[rval <= self.CONSTS["r2"]] = 0.0
return f_0, f_1, f_2 | [
"def",
"_get_distance_segment_coefficients",
"(",
"self",
",",
"rval",
")",
":",
"# Get distance segment ends",
"nsites",
"=",
"len",
"(",
"rval",
")",
"# Equation 12a",
"f_0",
"=",
"np",
".",
"log10",
"(",
"self",
".",
"CONSTS",
"[",
"\"r0\"",
"]",
"/",
"rval",
")",
"f_0",
"[",
"rval",
">",
"self",
".",
"CONSTS",
"[",
"\"r0\"",
"]",
"]",
"=",
"0.0",
"# Equation 12b",
"f_1",
"=",
"np",
".",
"log10",
"(",
"rval",
")",
"f_1",
"[",
"rval",
">",
"self",
".",
"CONSTS",
"[",
"\"r1\"",
"]",
"]",
"=",
"np",
".",
"log10",
"(",
"self",
".",
"CONSTS",
"[",
"\"r1\"",
"]",
")",
"# Equation 12c",
"f_2",
"=",
"np",
".",
"log10",
"(",
"rval",
"/",
"self",
".",
"CONSTS",
"[",
"\"r2\"",
"]",
")",
"f_2",
"[",
"rval",
"<=",
"self",
".",
"CONSTS",
"[",
"\"r2\"",
"]",
"]",
"=",
"0.0",
"return",
"f_0",
",",
"f_1",
",",
"f_2"
] | Returns the coefficients describing the distance attenuation shape
for three different distance bins, equations 12a - 12c | [
"Returns",
"the",
"coefficients",
"describing",
"the",
"distance",
"attenuation",
"shape",
"for",
"three",
"different",
"distance",
"bins",
"equations",
"12a",
"-",
"12c"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/hazardlib/gsim/rietbrock_2013.py#L122-L139 |
614 | gem/oq-engine | openquake/commonlib/readinput.py | collect_files | def collect_files(dirpath, cond=lambda fullname: True):
"""
Recursively collect the files contained inside dirpath.
:param dirpath: path to a readable directory
:param cond: condition on the path to collect the file
"""
files = []
for fname in os.listdir(dirpath):
fullname = os.path.join(dirpath, fname)
if os.path.isdir(fullname): # navigate inside
files.extend(collect_files(fullname))
else: # collect files
if cond(fullname):
files.append(fullname)
return files | python | def collect_files(dirpath, cond=lambda fullname: True):
"""
Recursively collect the files contained inside dirpath.
:param dirpath: path to a readable directory
:param cond: condition on the path to collect the file
"""
files = []
for fname in os.listdir(dirpath):
fullname = os.path.join(dirpath, fname)
if os.path.isdir(fullname): # navigate inside
files.extend(collect_files(fullname))
else: # collect files
if cond(fullname):
files.append(fullname)
return files | [
"def",
"collect_files",
"(",
"dirpath",
",",
"cond",
"=",
"lambda",
"fullname",
":",
"True",
")",
":",
"files",
"=",
"[",
"]",
"for",
"fname",
"in",
"os",
".",
"listdir",
"(",
"dirpath",
")",
":",
"fullname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"fname",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"fullname",
")",
":",
"# navigate inside",
"files",
".",
"extend",
"(",
"collect_files",
"(",
"fullname",
")",
")",
"else",
":",
"# collect files",
"if",
"cond",
"(",
"fullname",
")",
":",
"files",
".",
"append",
"(",
"fullname",
")",
"return",
"files"
] | Recursively collect the files contained inside dirpath.
:param dirpath: path to a readable directory
:param cond: condition on the path to collect the file | [
"Recursively",
"collect",
"the",
"files",
"contained",
"inside",
"dirpath",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L69-L84 |
615 | gem/oq-engine | openquake/commonlib/readinput.py | extract_from_zip | def extract_from_zip(path, candidates):
"""
Given a zip archive and a function to detect the presence of a given
filename, unzip the archive into a temporary directory and return the
full path of the file. Raise an IOError if the file cannot be found
within the archive.
:param path: pathname of the archive
:param candidates: list of names to search for
"""
temp_dir = tempfile.mkdtemp()
with zipfile.ZipFile(path) as archive:
archive.extractall(temp_dir)
return [f for f in collect_files(temp_dir)
if os.path.basename(f) in candidates] | python | def extract_from_zip(path, candidates):
"""
Given a zip archive and a function to detect the presence of a given
filename, unzip the archive into a temporary directory and return the
full path of the file. Raise an IOError if the file cannot be found
within the archive.
:param path: pathname of the archive
:param candidates: list of names to search for
"""
temp_dir = tempfile.mkdtemp()
with zipfile.ZipFile(path) as archive:
archive.extractall(temp_dir)
return [f for f in collect_files(temp_dir)
if os.path.basename(f) in candidates] | [
"def",
"extract_from_zip",
"(",
"path",
",",
"candidates",
")",
":",
"temp_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"with",
"zipfile",
".",
"ZipFile",
"(",
"path",
")",
"as",
"archive",
":",
"archive",
".",
"extractall",
"(",
"temp_dir",
")",
"return",
"[",
"f",
"for",
"f",
"in",
"collect_files",
"(",
"temp_dir",
")",
"if",
"os",
".",
"path",
".",
"basename",
"(",
"f",
")",
"in",
"candidates",
"]"
] | Given a zip archive and a function to detect the presence of a given
filename, unzip the archive into a temporary directory and return the
full path of the file. Raise an IOError if the file cannot be found
within the archive.
:param path: pathname of the archive
:param candidates: list of names to search for | [
"Given",
"a",
"zip",
"archive",
"and",
"a",
"function",
"to",
"detect",
"the",
"presence",
"of",
"a",
"given",
"filename",
"unzip",
"the",
"archive",
"into",
"a",
"temporary",
"directory",
"and",
"return",
"the",
"full",
"path",
"of",
"the",
"file",
".",
"Raise",
"an",
"IOError",
"if",
"the",
"file",
"cannot",
"be",
"found",
"within",
"the",
"archive",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L87-L101 |
616 | gem/oq-engine | openquake/commonlib/readinput.py | get_params | def get_params(job_inis, **kw):
"""
Parse one or more INI-style config files.
:param job_inis:
List of configuration files (or list containing a single zip archive)
:param kw:
Optionally override some parameters
:returns:
A dictionary of parameters
"""
input_zip = None
if len(job_inis) == 1 and job_inis[0].endswith('.zip'):
input_zip = job_inis[0]
job_inis = extract_from_zip(
job_inis[0], ['job_hazard.ini', 'job_haz.ini',
'job.ini', 'job_risk.ini'])
not_found = [ini for ini in job_inis if not os.path.exists(ini)]
if not_found: # something was not found
raise IOError('File not found: %s' % not_found[0])
cp = configparser.ConfigParser()
cp.read(job_inis)
# directory containing the config files we're parsing
job_ini = os.path.abspath(job_inis[0])
base_path = decode(os.path.dirname(job_ini))
params = dict(base_path=base_path, inputs={'job_ini': job_ini})
if input_zip:
params['inputs']['input_zip'] = os.path.abspath(input_zip)
for sect in cp.sections():
_update(params, cp.items(sect), base_path)
_update(params, kw.items(), base_path) # override on demand
if params['inputs'].get('reqv'):
# using pointsource_distance=0 because of the reqv approximation
params['pointsource_distance'] = '0'
return params | python | def get_params(job_inis, **kw):
"""
Parse one or more INI-style config files.
:param job_inis:
List of configuration files (or list containing a single zip archive)
:param kw:
Optionally override some parameters
:returns:
A dictionary of parameters
"""
input_zip = None
if len(job_inis) == 1 and job_inis[0].endswith('.zip'):
input_zip = job_inis[0]
job_inis = extract_from_zip(
job_inis[0], ['job_hazard.ini', 'job_haz.ini',
'job.ini', 'job_risk.ini'])
not_found = [ini for ini in job_inis if not os.path.exists(ini)]
if not_found: # something was not found
raise IOError('File not found: %s' % not_found[0])
cp = configparser.ConfigParser()
cp.read(job_inis)
# directory containing the config files we're parsing
job_ini = os.path.abspath(job_inis[0])
base_path = decode(os.path.dirname(job_ini))
params = dict(base_path=base_path, inputs={'job_ini': job_ini})
if input_zip:
params['inputs']['input_zip'] = os.path.abspath(input_zip)
for sect in cp.sections():
_update(params, cp.items(sect), base_path)
_update(params, kw.items(), base_path) # override on demand
if params['inputs'].get('reqv'):
# using pointsource_distance=0 because of the reqv approximation
params['pointsource_distance'] = '0'
return params | [
"def",
"get_params",
"(",
"job_inis",
",",
"*",
"*",
"kw",
")",
":",
"input_zip",
"=",
"None",
"if",
"len",
"(",
"job_inis",
")",
"==",
"1",
"and",
"job_inis",
"[",
"0",
"]",
".",
"endswith",
"(",
"'.zip'",
")",
":",
"input_zip",
"=",
"job_inis",
"[",
"0",
"]",
"job_inis",
"=",
"extract_from_zip",
"(",
"job_inis",
"[",
"0",
"]",
",",
"[",
"'job_hazard.ini'",
",",
"'job_haz.ini'",
",",
"'job.ini'",
",",
"'job_risk.ini'",
"]",
")",
"not_found",
"=",
"[",
"ini",
"for",
"ini",
"in",
"job_inis",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"ini",
")",
"]",
"if",
"not_found",
":",
"# something was not found",
"raise",
"IOError",
"(",
"'File not found: %s'",
"%",
"not_found",
"[",
"0",
"]",
")",
"cp",
"=",
"configparser",
".",
"ConfigParser",
"(",
")",
"cp",
".",
"read",
"(",
"job_inis",
")",
"# directory containing the config files we're parsing",
"job_ini",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"job_inis",
"[",
"0",
"]",
")",
"base_path",
"=",
"decode",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"job_ini",
")",
")",
"params",
"=",
"dict",
"(",
"base_path",
"=",
"base_path",
",",
"inputs",
"=",
"{",
"'job_ini'",
":",
"job_ini",
"}",
")",
"if",
"input_zip",
":",
"params",
"[",
"'inputs'",
"]",
"[",
"'input_zip'",
"]",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"input_zip",
")",
"for",
"sect",
"in",
"cp",
".",
"sections",
"(",
")",
":",
"_update",
"(",
"params",
",",
"cp",
".",
"items",
"(",
"sect",
")",
",",
"base_path",
")",
"_update",
"(",
"params",
",",
"kw",
".",
"items",
"(",
")",
",",
"base_path",
")",
"# override on demand",
"if",
"params",
"[",
"'inputs'",
"]",
".",
"get",
"(",
"'reqv'",
")",
":",
"# using pointsource_distance=0 because of the reqv approximation",
"params",
"[",
"'pointsource_distance'",
"]",
"=",
"'0'",
"return",
"params"
] | Parse one or more INI-style config files.
:param job_inis:
List of configuration files (or list containing a single zip archive)
:param kw:
Optionally override some parameters
:returns:
A dictionary of parameters | [
"Parse",
"one",
"or",
"more",
"INI",
"-",
"style",
"config",
"files",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L152-L191 |
617 | gem/oq-engine | openquake/commonlib/readinput.py | get_oqparam | def get_oqparam(job_ini, pkg=None, calculators=None, hc_id=None, validate=1,
**kw):
"""
Parse a dictionary of parameters from an INI-style config file.
:param job_ini:
Path to configuration file/archive or dictionary of parameters
:param pkg:
Python package where to find the configuration file (optional)
:param calculators:
Sequence of calculator names (optional) used to restrict the
valid choices for `calculation_mode`
:param hc_id:
Not None only when called from a post calculation
:param validate:
Flag. By default it is true and the parameters are validated
:param kw:
String-valued keyword arguments used to override the job.ini parameters
:returns:
An :class:`openquake.commonlib.oqvalidation.OqParam` instance
containing the validate and casted parameters/values parsed from
the job.ini file as well as a subdictionary 'inputs' containing
absolute paths to all of the files referenced in the job.ini, keyed by
the parameter name.
"""
# UGLY: this is here to avoid circular imports
from openquake.calculators import base
OqParam.calculation_mode.validator.choices = tuple(
calculators or base.calculators)
if not isinstance(job_ini, dict):
basedir = os.path.dirname(pkg.__file__) if pkg else ''
job_ini = get_params([os.path.join(basedir, job_ini)])
if hc_id:
job_ini.update(hazard_calculation_id=str(hc_id))
job_ini.update(kw)
oqparam = OqParam(**job_ini)
if validate:
oqparam.validate()
return oqparam | python | def get_oqparam(job_ini, pkg=None, calculators=None, hc_id=None, validate=1,
**kw):
"""
Parse a dictionary of parameters from an INI-style config file.
:param job_ini:
Path to configuration file/archive or dictionary of parameters
:param pkg:
Python package where to find the configuration file (optional)
:param calculators:
Sequence of calculator names (optional) used to restrict the
valid choices for `calculation_mode`
:param hc_id:
Not None only when called from a post calculation
:param validate:
Flag. By default it is true and the parameters are validated
:param kw:
String-valued keyword arguments used to override the job.ini parameters
:returns:
An :class:`openquake.commonlib.oqvalidation.OqParam` instance
containing the validate and casted parameters/values parsed from
the job.ini file as well as a subdictionary 'inputs' containing
absolute paths to all of the files referenced in the job.ini, keyed by
the parameter name.
"""
# UGLY: this is here to avoid circular imports
from openquake.calculators import base
OqParam.calculation_mode.validator.choices = tuple(
calculators or base.calculators)
if not isinstance(job_ini, dict):
basedir = os.path.dirname(pkg.__file__) if pkg else ''
job_ini = get_params([os.path.join(basedir, job_ini)])
if hc_id:
job_ini.update(hazard_calculation_id=str(hc_id))
job_ini.update(kw)
oqparam = OqParam(**job_ini)
if validate:
oqparam.validate()
return oqparam | [
"def",
"get_oqparam",
"(",
"job_ini",
",",
"pkg",
"=",
"None",
",",
"calculators",
"=",
"None",
",",
"hc_id",
"=",
"None",
",",
"validate",
"=",
"1",
",",
"*",
"*",
"kw",
")",
":",
"# UGLY: this is here to avoid circular imports",
"from",
"openquake",
".",
"calculators",
"import",
"base",
"OqParam",
".",
"calculation_mode",
".",
"validator",
".",
"choices",
"=",
"tuple",
"(",
"calculators",
"or",
"base",
".",
"calculators",
")",
"if",
"not",
"isinstance",
"(",
"job_ini",
",",
"dict",
")",
":",
"basedir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"pkg",
".",
"__file__",
")",
"if",
"pkg",
"else",
"''",
"job_ini",
"=",
"get_params",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"basedir",
",",
"job_ini",
")",
"]",
")",
"if",
"hc_id",
":",
"job_ini",
".",
"update",
"(",
"hazard_calculation_id",
"=",
"str",
"(",
"hc_id",
")",
")",
"job_ini",
".",
"update",
"(",
"kw",
")",
"oqparam",
"=",
"OqParam",
"(",
"*",
"*",
"job_ini",
")",
"if",
"validate",
":",
"oqparam",
".",
"validate",
"(",
")",
"return",
"oqparam"
] | Parse a dictionary of parameters from an INI-style config file.
:param job_ini:
Path to configuration file/archive or dictionary of parameters
:param pkg:
Python package where to find the configuration file (optional)
:param calculators:
Sequence of calculator names (optional) used to restrict the
valid choices for `calculation_mode`
:param hc_id:
Not None only when called from a post calculation
:param validate:
Flag. By default it is true and the parameters are validated
:param kw:
String-valued keyword arguments used to override the job.ini parameters
:returns:
An :class:`openquake.commonlib.oqvalidation.OqParam` instance
containing the validate and casted parameters/values parsed from
the job.ini file as well as a subdictionary 'inputs' containing
absolute paths to all of the files referenced in the job.ini, keyed by
the parameter name. | [
"Parse",
"a",
"dictionary",
"of",
"parameters",
"from",
"an",
"INI",
"-",
"style",
"config",
"file",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L194-L233 |
618 | gem/oq-engine | openquake/commonlib/readinput.py | get_site_model | def get_site_model(oqparam):
"""
Convert the NRML file into an array of site parameters.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
an array with fields lon, lat, vs30, ...
"""
req_site_params = get_gsim_lt(oqparam).req_site_params
arrays = []
for fname in oqparam.inputs['site_model']:
if isinstance(fname, str) and fname.endswith('.csv'):
sm = read_csv(fname)
if 'site_id' in sm.dtype.names:
raise InvalidFile('%s: you passed a sites.csv file instead of '
'a site_model.csv file!' % fname)
z = numpy.zeros(len(sm), sorted(sm.dtype.descr))
for name in z.dtype.names: # reorder the fields
z[name] = sm[name]
arrays.append(z)
continue
nodes = nrml.read(fname).siteModel
params = [valid.site_param(node.attrib) for node in nodes]
missing = req_site_params - set(params[0])
if 'vs30measured' in missing: # use a default of False
missing -= {'vs30measured'}
for param in params:
param['vs30measured'] = False
if 'backarc' in missing: # use a default of False
missing -= {'backarc'}
for param in params:
param['backarc'] = False
if missing:
raise InvalidFile('%s: missing parameter %s' %
(oqparam.inputs['site_model'],
', '.join(missing)))
# NB: the sorted in sorted(params[0]) is essential, otherwise there is
# an heisenbug in scenario/test_case_4
site_model_dt = numpy.dtype([(p, site.site_param_dt[p])
for p in sorted(params[0])])
sm = numpy.array([tuple(param[name] for name in site_model_dt.names)
for param in params], site_model_dt)
arrays.append(sm)
return numpy.concatenate(arrays) | python | def get_site_model(oqparam):
"""
Convert the NRML file into an array of site parameters.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
an array with fields lon, lat, vs30, ...
"""
req_site_params = get_gsim_lt(oqparam).req_site_params
arrays = []
for fname in oqparam.inputs['site_model']:
if isinstance(fname, str) and fname.endswith('.csv'):
sm = read_csv(fname)
if 'site_id' in sm.dtype.names:
raise InvalidFile('%s: you passed a sites.csv file instead of '
'a site_model.csv file!' % fname)
z = numpy.zeros(len(sm), sorted(sm.dtype.descr))
for name in z.dtype.names: # reorder the fields
z[name] = sm[name]
arrays.append(z)
continue
nodes = nrml.read(fname).siteModel
params = [valid.site_param(node.attrib) for node in nodes]
missing = req_site_params - set(params[0])
if 'vs30measured' in missing: # use a default of False
missing -= {'vs30measured'}
for param in params:
param['vs30measured'] = False
if 'backarc' in missing: # use a default of False
missing -= {'backarc'}
for param in params:
param['backarc'] = False
if missing:
raise InvalidFile('%s: missing parameter %s' %
(oqparam.inputs['site_model'],
', '.join(missing)))
# NB: the sorted in sorted(params[0]) is essential, otherwise there is
# an heisenbug in scenario/test_case_4
site_model_dt = numpy.dtype([(p, site.site_param_dt[p])
for p in sorted(params[0])])
sm = numpy.array([tuple(param[name] for name in site_model_dt.names)
for param in params], site_model_dt)
arrays.append(sm)
return numpy.concatenate(arrays) | [
"def",
"get_site_model",
"(",
"oqparam",
")",
":",
"req_site_params",
"=",
"get_gsim_lt",
"(",
"oqparam",
")",
".",
"req_site_params",
"arrays",
"=",
"[",
"]",
"for",
"fname",
"in",
"oqparam",
".",
"inputs",
"[",
"'site_model'",
"]",
":",
"if",
"isinstance",
"(",
"fname",
",",
"str",
")",
"and",
"fname",
".",
"endswith",
"(",
"'.csv'",
")",
":",
"sm",
"=",
"read_csv",
"(",
"fname",
")",
"if",
"'site_id'",
"in",
"sm",
".",
"dtype",
".",
"names",
":",
"raise",
"InvalidFile",
"(",
"'%s: you passed a sites.csv file instead of '",
"'a site_model.csv file!'",
"%",
"fname",
")",
"z",
"=",
"numpy",
".",
"zeros",
"(",
"len",
"(",
"sm",
")",
",",
"sorted",
"(",
"sm",
".",
"dtype",
".",
"descr",
")",
")",
"for",
"name",
"in",
"z",
".",
"dtype",
".",
"names",
":",
"# reorder the fields",
"z",
"[",
"name",
"]",
"=",
"sm",
"[",
"name",
"]",
"arrays",
".",
"append",
"(",
"z",
")",
"continue",
"nodes",
"=",
"nrml",
".",
"read",
"(",
"fname",
")",
".",
"siteModel",
"params",
"=",
"[",
"valid",
".",
"site_param",
"(",
"node",
".",
"attrib",
")",
"for",
"node",
"in",
"nodes",
"]",
"missing",
"=",
"req_site_params",
"-",
"set",
"(",
"params",
"[",
"0",
"]",
")",
"if",
"'vs30measured'",
"in",
"missing",
":",
"# use a default of False",
"missing",
"-=",
"{",
"'vs30measured'",
"}",
"for",
"param",
"in",
"params",
":",
"param",
"[",
"'vs30measured'",
"]",
"=",
"False",
"if",
"'backarc'",
"in",
"missing",
":",
"# use a default of False",
"missing",
"-=",
"{",
"'backarc'",
"}",
"for",
"param",
"in",
"params",
":",
"param",
"[",
"'backarc'",
"]",
"=",
"False",
"if",
"missing",
":",
"raise",
"InvalidFile",
"(",
"'%s: missing parameter %s'",
"%",
"(",
"oqparam",
".",
"inputs",
"[",
"'site_model'",
"]",
",",
"', '",
".",
"join",
"(",
"missing",
")",
")",
")",
"# NB: the sorted in sorted(params[0]) is essential, otherwise there is",
"# an heisenbug in scenario/test_case_4",
"site_model_dt",
"=",
"numpy",
".",
"dtype",
"(",
"[",
"(",
"p",
",",
"site",
".",
"site_param_dt",
"[",
"p",
"]",
")",
"for",
"p",
"in",
"sorted",
"(",
"params",
"[",
"0",
"]",
")",
"]",
")",
"sm",
"=",
"numpy",
".",
"array",
"(",
"[",
"tuple",
"(",
"param",
"[",
"name",
"]",
"for",
"name",
"in",
"site_model_dt",
".",
"names",
")",
"for",
"param",
"in",
"params",
"]",
",",
"site_model_dt",
")",
"arrays",
".",
"append",
"(",
"sm",
")",
"return",
"numpy",
".",
"concatenate",
"(",
"arrays",
")"
] | Convert the NRML file into an array of site parameters.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
an array with fields lon, lat, vs30, ... | [
"Convert",
"the",
"NRML",
"file",
"into",
"an",
"array",
"of",
"site",
"parameters",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L342-L386 |
619 | gem/oq-engine | openquake/commonlib/readinput.py | get_site_collection | def get_site_collection(oqparam):
"""
Returns a SiteCollection instance by looking at the points and the
site model defined by the configuration parameters.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
"""
mesh = get_mesh(oqparam)
req_site_params = get_gsim_lt(oqparam).req_site_params
if oqparam.inputs.get('site_model'):
sm = get_site_model(oqparam)
try:
# in the future we could have elevation in the site model
depth = sm['depth']
except ValueError:
# this is the normal case
depth = None
sitecol = site.SiteCollection.from_points(
sm['lon'], sm['lat'], depth, sm, req_site_params)
if oqparam.region_grid_spacing:
logging.info('Reducing the grid sites to the site '
'parameters within the grid spacing')
sitecol, params, _ = geo.utils.assoc(
sm, sitecol, oqparam.region_grid_spacing * 1.414, 'filter')
sitecol.make_complete()
else:
params = sm
for name in req_site_params:
if name in ('vs30measured', 'backarc') \
and name not in params.dtype.names:
sitecol._set(name, 0) # the default
else:
sitecol._set(name, params[name])
elif mesh is None and oqparam.ground_motion_fields:
raise InvalidFile('You are missing sites.csv or site_model.csv in %s'
% oqparam.inputs['job_ini'])
elif mesh is None:
# a None sitecol is okay when computing the ruptures only
return
else: # use the default site params
sitecol = site.SiteCollection.from_points(
mesh.lons, mesh.lats, mesh.depths, oqparam, req_site_params)
ss = os.environ.get('OQ_SAMPLE_SITES')
if ss:
# debugging tip to reduce the size of a calculation
# OQ_SAMPLE_SITES=.1 oq engine --run job.ini
# will run a computation with 10 times less sites
sitecol.array = numpy.array(random_filter(sitecol.array, float(ss)))
sitecol.make_complete()
return sitecol | python | def get_site_collection(oqparam):
"""
Returns a SiteCollection instance by looking at the points and the
site model defined by the configuration parameters.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
"""
mesh = get_mesh(oqparam)
req_site_params = get_gsim_lt(oqparam).req_site_params
if oqparam.inputs.get('site_model'):
sm = get_site_model(oqparam)
try:
# in the future we could have elevation in the site model
depth = sm['depth']
except ValueError:
# this is the normal case
depth = None
sitecol = site.SiteCollection.from_points(
sm['lon'], sm['lat'], depth, sm, req_site_params)
if oqparam.region_grid_spacing:
logging.info('Reducing the grid sites to the site '
'parameters within the grid spacing')
sitecol, params, _ = geo.utils.assoc(
sm, sitecol, oqparam.region_grid_spacing * 1.414, 'filter')
sitecol.make_complete()
else:
params = sm
for name in req_site_params:
if name in ('vs30measured', 'backarc') \
and name not in params.dtype.names:
sitecol._set(name, 0) # the default
else:
sitecol._set(name, params[name])
elif mesh is None and oqparam.ground_motion_fields:
raise InvalidFile('You are missing sites.csv or site_model.csv in %s'
% oqparam.inputs['job_ini'])
elif mesh is None:
# a None sitecol is okay when computing the ruptures only
return
else: # use the default site params
sitecol = site.SiteCollection.from_points(
mesh.lons, mesh.lats, mesh.depths, oqparam, req_site_params)
ss = os.environ.get('OQ_SAMPLE_SITES')
if ss:
# debugging tip to reduce the size of a calculation
# OQ_SAMPLE_SITES=.1 oq engine --run job.ini
# will run a computation with 10 times less sites
sitecol.array = numpy.array(random_filter(sitecol.array, float(ss)))
sitecol.make_complete()
return sitecol | [
"def",
"get_site_collection",
"(",
"oqparam",
")",
":",
"mesh",
"=",
"get_mesh",
"(",
"oqparam",
")",
"req_site_params",
"=",
"get_gsim_lt",
"(",
"oqparam",
")",
".",
"req_site_params",
"if",
"oqparam",
".",
"inputs",
".",
"get",
"(",
"'site_model'",
")",
":",
"sm",
"=",
"get_site_model",
"(",
"oqparam",
")",
"try",
":",
"# in the future we could have elevation in the site model",
"depth",
"=",
"sm",
"[",
"'depth'",
"]",
"except",
"ValueError",
":",
"# this is the normal case",
"depth",
"=",
"None",
"sitecol",
"=",
"site",
".",
"SiteCollection",
".",
"from_points",
"(",
"sm",
"[",
"'lon'",
"]",
",",
"sm",
"[",
"'lat'",
"]",
",",
"depth",
",",
"sm",
",",
"req_site_params",
")",
"if",
"oqparam",
".",
"region_grid_spacing",
":",
"logging",
".",
"info",
"(",
"'Reducing the grid sites to the site '",
"'parameters within the grid spacing'",
")",
"sitecol",
",",
"params",
",",
"_",
"=",
"geo",
".",
"utils",
".",
"assoc",
"(",
"sm",
",",
"sitecol",
",",
"oqparam",
".",
"region_grid_spacing",
"*",
"1.414",
",",
"'filter'",
")",
"sitecol",
".",
"make_complete",
"(",
")",
"else",
":",
"params",
"=",
"sm",
"for",
"name",
"in",
"req_site_params",
":",
"if",
"name",
"in",
"(",
"'vs30measured'",
",",
"'backarc'",
")",
"and",
"name",
"not",
"in",
"params",
".",
"dtype",
".",
"names",
":",
"sitecol",
".",
"_set",
"(",
"name",
",",
"0",
")",
"# the default",
"else",
":",
"sitecol",
".",
"_set",
"(",
"name",
",",
"params",
"[",
"name",
"]",
")",
"elif",
"mesh",
"is",
"None",
"and",
"oqparam",
".",
"ground_motion_fields",
":",
"raise",
"InvalidFile",
"(",
"'You are missing sites.csv or site_model.csv in %s'",
"%",
"oqparam",
".",
"inputs",
"[",
"'job_ini'",
"]",
")",
"elif",
"mesh",
"is",
"None",
":",
"# a None sitecol is okay when computing the ruptures only",
"return",
"else",
":",
"# use the default site params",
"sitecol",
"=",
"site",
".",
"SiteCollection",
".",
"from_points",
"(",
"mesh",
".",
"lons",
",",
"mesh",
".",
"lats",
",",
"mesh",
".",
"depths",
",",
"oqparam",
",",
"req_site_params",
")",
"ss",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'OQ_SAMPLE_SITES'",
")",
"if",
"ss",
":",
"# debugging tip to reduce the size of a calculation",
"# OQ_SAMPLE_SITES=.1 oq engine --run job.ini",
"# will run a computation with 10 times less sites",
"sitecol",
".",
"array",
"=",
"numpy",
".",
"array",
"(",
"random_filter",
"(",
"sitecol",
".",
"array",
",",
"float",
"(",
"ss",
")",
")",
")",
"sitecol",
".",
"make_complete",
"(",
")",
"return",
"sitecol"
] | Returns a SiteCollection instance by looking at the points and the
site model defined by the configuration parameters.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance | [
"Returns",
"a",
"SiteCollection",
"instance",
"by",
"looking",
"at",
"the",
"points",
"and",
"the",
"site",
"model",
"defined",
"by",
"the",
"configuration",
"parameters",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L389-L439 |
620 | gem/oq-engine | openquake/commonlib/readinput.py | get_rupture | def get_rupture(oqparam):
"""
Read the `rupture_model` file and by filter the site collection
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
an hazardlib rupture
"""
rup_model = oqparam.inputs['rupture_model']
[rup_node] = nrml.read(rup_model)
conv = sourceconverter.RuptureConverter(
oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing)
rup = conv.convert_node(rup_node)
rup.tectonic_region_type = '*' # there is not TRT for scenario ruptures
rup.serial = oqparam.random_seed
return rup | python | def get_rupture(oqparam):
"""
Read the `rupture_model` file and by filter the site collection
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
an hazardlib rupture
"""
rup_model = oqparam.inputs['rupture_model']
[rup_node] = nrml.read(rup_model)
conv = sourceconverter.RuptureConverter(
oqparam.rupture_mesh_spacing, oqparam.complex_fault_mesh_spacing)
rup = conv.convert_node(rup_node)
rup.tectonic_region_type = '*' # there is not TRT for scenario ruptures
rup.serial = oqparam.random_seed
return rup | [
"def",
"get_rupture",
"(",
"oqparam",
")",
":",
"rup_model",
"=",
"oqparam",
".",
"inputs",
"[",
"'rupture_model'",
"]",
"[",
"rup_node",
"]",
"=",
"nrml",
".",
"read",
"(",
"rup_model",
")",
"conv",
"=",
"sourceconverter",
".",
"RuptureConverter",
"(",
"oqparam",
".",
"rupture_mesh_spacing",
",",
"oqparam",
".",
"complex_fault_mesh_spacing",
")",
"rup",
"=",
"conv",
".",
"convert_node",
"(",
"rup_node",
")",
"rup",
".",
"tectonic_region_type",
"=",
"'*'",
"# there is not TRT for scenario ruptures",
"rup",
".",
"serial",
"=",
"oqparam",
".",
"random_seed",
"return",
"rup"
] | Read the `rupture_model` file and by filter the site collection
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
an hazardlib rupture | [
"Read",
"the",
"rupture_model",
"file",
"and",
"by",
"filter",
"the",
"site",
"collection"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L498-L514 |
621 | gem/oq-engine | openquake/commonlib/readinput.py | get_composite_source_model | def get_composite_source_model(oqparam, monitor=None, in_memory=True,
srcfilter=SourceFilter(None, {})):
"""
Parse the XML and build a complete composite source model in memory.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:param monitor:
a `openquake.baselib.performance.Monitor` instance
:param in_memory:
if False, just parse the XML without instantiating the sources
:param srcfilter:
if not None, use it to prefilter the sources
"""
ucerf = oqparam.calculation_mode.startswith('ucerf')
source_model_lt = get_source_model_lt(oqparam, validate=not ucerf)
trts = source_model_lt.tectonic_region_types
trts_lower = {trt.lower() for trt in trts}
reqv = oqparam.inputs.get('reqv', {})
for trt in reqv: # these are lowercase because they come from the job.ini
if trt not in trts_lower:
raise ValueError('Unknown TRT=%s in %s [reqv]' %
(trt, oqparam.inputs['job_ini']))
gsim_lt = get_gsim_lt(oqparam, trts or ['*'])
p = source_model_lt.num_paths * gsim_lt.get_num_paths()
if oqparam.number_of_logic_tree_samples:
logging.info('Considering {:,d} logic tree paths out of {:,d}'.format(
oqparam.number_of_logic_tree_samples, p))
else: # full enumeration
if oqparam.is_event_based() and p > oqparam.max_potential_paths:
raise ValueError(
'There are too many potential logic tree paths (%d) '
'use sampling instead of full enumeration' % p)
logging.info('Potential number of logic tree paths = {:,d}'.format(p))
if source_model_lt.on_each_source:
logging.info('There is a logic tree on each source')
if monitor is None:
monitor = performance.Monitor()
smodels = []
for source_model in get_source_models(
oqparam, gsim_lt, source_model_lt, monitor, in_memory, srcfilter):
for src_group in source_model.src_groups:
src_group.sources = sorted(src_group, key=getid)
for src in src_group:
# there are two cases depending on the flag in_memory:
# 1) src is a hazardlib source and has a src_group_id
# attribute; in that case the source has to be numbered
# 2) src is a Node object, then nothing must be done
if isinstance(src, Node):
continue
smodels.append(source_model)
csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels,
oqparam.optimize_same_id_sources)
for sm in csm.source_models:
counter = collections.Counter()
for sg in sm.src_groups:
for srcid in map(getid, sg):
counter[srcid] += 1
dupl = [srcid for srcid in counter if counter[srcid] > 1]
if dupl:
raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s'
% (sm, dupl))
if not in_memory:
return csm
if oqparam.is_event_based():
# initialize the rupture serial numbers before splitting/filtering; in
# this way the serials are independent from the site collection
csm.init_serials(oqparam.ses_seed)
if oqparam.disagg_by_src:
csm = csm.grp_by_src() # one group per source
csm.info.gsim_lt.check_imts(oqparam.imtls)
parallel.Starmap.shutdown() # save memory
return csm | python | def get_composite_source_model(oqparam, monitor=None, in_memory=True,
srcfilter=SourceFilter(None, {})):
"""
Parse the XML and build a complete composite source model in memory.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:param monitor:
a `openquake.baselib.performance.Monitor` instance
:param in_memory:
if False, just parse the XML without instantiating the sources
:param srcfilter:
if not None, use it to prefilter the sources
"""
ucerf = oqparam.calculation_mode.startswith('ucerf')
source_model_lt = get_source_model_lt(oqparam, validate=not ucerf)
trts = source_model_lt.tectonic_region_types
trts_lower = {trt.lower() for trt in trts}
reqv = oqparam.inputs.get('reqv', {})
for trt in reqv: # these are lowercase because they come from the job.ini
if trt not in trts_lower:
raise ValueError('Unknown TRT=%s in %s [reqv]' %
(trt, oqparam.inputs['job_ini']))
gsim_lt = get_gsim_lt(oqparam, trts or ['*'])
p = source_model_lt.num_paths * gsim_lt.get_num_paths()
if oqparam.number_of_logic_tree_samples:
logging.info('Considering {:,d} logic tree paths out of {:,d}'.format(
oqparam.number_of_logic_tree_samples, p))
else: # full enumeration
if oqparam.is_event_based() and p > oqparam.max_potential_paths:
raise ValueError(
'There are too many potential logic tree paths (%d) '
'use sampling instead of full enumeration' % p)
logging.info('Potential number of logic tree paths = {:,d}'.format(p))
if source_model_lt.on_each_source:
logging.info('There is a logic tree on each source')
if monitor is None:
monitor = performance.Monitor()
smodels = []
for source_model in get_source_models(
oqparam, gsim_lt, source_model_lt, monitor, in_memory, srcfilter):
for src_group in source_model.src_groups:
src_group.sources = sorted(src_group, key=getid)
for src in src_group:
# there are two cases depending on the flag in_memory:
# 1) src is a hazardlib source and has a src_group_id
# attribute; in that case the source has to be numbered
# 2) src is a Node object, then nothing must be done
if isinstance(src, Node):
continue
smodels.append(source_model)
csm = source.CompositeSourceModel(gsim_lt, source_model_lt, smodels,
oqparam.optimize_same_id_sources)
for sm in csm.source_models:
counter = collections.Counter()
for sg in sm.src_groups:
for srcid in map(getid, sg):
counter[srcid] += 1
dupl = [srcid for srcid in counter if counter[srcid] > 1]
if dupl:
raise nrml.DuplicatedID('Found duplicated source IDs in %s: %s'
% (sm, dupl))
if not in_memory:
return csm
if oqparam.is_event_based():
# initialize the rupture serial numbers before splitting/filtering; in
# this way the serials are independent from the site collection
csm.init_serials(oqparam.ses_seed)
if oqparam.disagg_by_src:
csm = csm.grp_by_src() # one group per source
csm.info.gsim_lt.check_imts(oqparam.imtls)
parallel.Starmap.shutdown() # save memory
return csm | [
"def",
"get_composite_source_model",
"(",
"oqparam",
",",
"monitor",
"=",
"None",
",",
"in_memory",
"=",
"True",
",",
"srcfilter",
"=",
"SourceFilter",
"(",
"None",
",",
"{",
"}",
")",
")",
":",
"ucerf",
"=",
"oqparam",
".",
"calculation_mode",
".",
"startswith",
"(",
"'ucerf'",
")",
"source_model_lt",
"=",
"get_source_model_lt",
"(",
"oqparam",
",",
"validate",
"=",
"not",
"ucerf",
")",
"trts",
"=",
"source_model_lt",
".",
"tectonic_region_types",
"trts_lower",
"=",
"{",
"trt",
".",
"lower",
"(",
")",
"for",
"trt",
"in",
"trts",
"}",
"reqv",
"=",
"oqparam",
".",
"inputs",
".",
"get",
"(",
"'reqv'",
",",
"{",
"}",
")",
"for",
"trt",
"in",
"reqv",
":",
"# these are lowercase because they come from the job.ini",
"if",
"trt",
"not",
"in",
"trts_lower",
":",
"raise",
"ValueError",
"(",
"'Unknown TRT=%s in %s [reqv]'",
"%",
"(",
"trt",
",",
"oqparam",
".",
"inputs",
"[",
"'job_ini'",
"]",
")",
")",
"gsim_lt",
"=",
"get_gsim_lt",
"(",
"oqparam",
",",
"trts",
"or",
"[",
"'*'",
"]",
")",
"p",
"=",
"source_model_lt",
".",
"num_paths",
"*",
"gsim_lt",
".",
"get_num_paths",
"(",
")",
"if",
"oqparam",
".",
"number_of_logic_tree_samples",
":",
"logging",
".",
"info",
"(",
"'Considering {:,d} logic tree paths out of {:,d}'",
".",
"format",
"(",
"oqparam",
".",
"number_of_logic_tree_samples",
",",
"p",
")",
")",
"else",
":",
"# full enumeration",
"if",
"oqparam",
".",
"is_event_based",
"(",
")",
"and",
"p",
">",
"oqparam",
".",
"max_potential_paths",
":",
"raise",
"ValueError",
"(",
"'There are too many potential logic tree paths (%d) '",
"'use sampling instead of full enumeration'",
"%",
"p",
")",
"logging",
".",
"info",
"(",
"'Potential number of logic tree paths = {:,d}'",
".",
"format",
"(",
"p",
")",
")",
"if",
"source_model_lt",
".",
"on_each_source",
":",
"logging",
".",
"info",
"(",
"'There is a logic tree on each source'",
")",
"if",
"monitor",
"is",
"None",
":",
"monitor",
"=",
"performance",
".",
"Monitor",
"(",
")",
"smodels",
"=",
"[",
"]",
"for",
"source_model",
"in",
"get_source_models",
"(",
"oqparam",
",",
"gsim_lt",
",",
"source_model_lt",
",",
"monitor",
",",
"in_memory",
",",
"srcfilter",
")",
":",
"for",
"src_group",
"in",
"source_model",
".",
"src_groups",
":",
"src_group",
".",
"sources",
"=",
"sorted",
"(",
"src_group",
",",
"key",
"=",
"getid",
")",
"for",
"src",
"in",
"src_group",
":",
"# there are two cases depending on the flag in_memory:",
"# 1) src is a hazardlib source and has a src_group_id",
"# attribute; in that case the source has to be numbered",
"# 2) src is a Node object, then nothing must be done",
"if",
"isinstance",
"(",
"src",
",",
"Node",
")",
":",
"continue",
"smodels",
".",
"append",
"(",
"source_model",
")",
"csm",
"=",
"source",
".",
"CompositeSourceModel",
"(",
"gsim_lt",
",",
"source_model_lt",
",",
"smodels",
",",
"oqparam",
".",
"optimize_same_id_sources",
")",
"for",
"sm",
"in",
"csm",
".",
"source_models",
":",
"counter",
"=",
"collections",
".",
"Counter",
"(",
")",
"for",
"sg",
"in",
"sm",
".",
"src_groups",
":",
"for",
"srcid",
"in",
"map",
"(",
"getid",
",",
"sg",
")",
":",
"counter",
"[",
"srcid",
"]",
"+=",
"1",
"dupl",
"=",
"[",
"srcid",
"for",
"srcid",
"in",
"counter",
"if",
"counter",
"[",
"srcid",
"]",
">",
"1",
"]",
"if",
"dupl",
":",
"raise",
"nrml",
".",
"DuplicatedID",
"(",
"'Found duplicated source IDs in %s: %s'",
"%",
"(",
"sm",
",",
"dupl",
")",
")",
"if",
"not",
"in_memory",
":",
"return",
"csm",
"if",
"oqparam",
".",
"is_event_based",
"(",
")",
":",
"# initialize the rupture serial numbers before splitting/filtering; in",
"# this way the serials are independent from the site collection",
"csm",
".",
"init_serials",
"(",
"oqparam",
".",
"ses_seed",
")",
"if",
"oqparam",
".",
"disagg_by_src",
":",
"csm",
"=",
"csm",
".",
"grp_by_src",
"(",
")",
"# one group per source",
"csm",
".",
"info",
".",
"gsim_lt",
".",
"check_imts",
"(",
"oqparam",
".",
"imtls",
")",
"parallel",
".",
"Starmap",
".",
"shutdown",
"(",
")",
"# save memory",
"return",
"csm"
] | Parse the XML and build a complete composite source model in memory.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:param monitor:
a `openquake.baselib.performance.Monitor` instance
:param in_memory:
if False, just parse the XML without instantiating the sources
:param srcfilter:
if not None, use it to prefilter the sources | [
"Parse",
"the",
"XML",
"and",
"build",
"a",
"complete",
"composite",
"source",
"model",
"in",
"memory",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L816-L892 |
622 | gem/oq-engine | openquake/commonlib/readinput.py | get_mesh_hcurves | def get_mesh_hcurves(oqparam):
"""
Read CSV data in the format `lon lat, v1-vN, w1-wN, ...`.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
the mesh of points and the data as a dictionary
imt -> array of curves for each site
"""
imtls = oqparam.imtls
lon_lats = set()
data = AccumDict() # imt -> list of arrays
ncols = len(imtls) + 1 # lon_lat + curve_per_imt ...
csvfile = oqparam.inputs['hazard_curves']
for line, row in enumerate(csv.reader(csvfile), 1):
try:
if len(row) != ncols:
raise ValueError('Expected %d columns, found %d' %
ncols, len(row))
x, y = row[0].split()
lon_lat = valid.longitude(x), valid.latitude(y)
if lon_lat in lon_lats:
raise DuplicatedPoint(lon_lat)
lon_lats.add(lon_lat)
for i, imt_ in enumerate(imtls, 1):
values = valid.decreasing_probabilities(row[i])
if len(values) != len(imtls[imt_]):
raise ValueError('Found %d values, expected %d' %
(len(values), len(imtls([imt_]))))
data += {imt_: [numpy.array(values)]}
except (ValueError, DuplicatedPoint) as err:
raise err.__class__('%s: file %s, line %d' % (err, csvfile, line))
lons, lats = zip(*sorted(lon_lats))
mesh = geo.Mesh(numpy.array(lons), numpy.array(lats))
return mesh, {imt: numpy.array(lst) for imt, lst in data.items()} | python | def get_mesh_hcurves(oqparam):
"""
Read CSV data in the format `lon lat, v1-vN, w1-wN, ...`.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
the mesh of points and the data as a dictionary
imt -> array of curves for each site
"""
imtls = oqparam.imtls
lon_lats = set()
data = AccumDict() # imt -> list of arrays
ncols = len(imtls) + 1 # lon_lat + curve_per_imt ...
csvfile = oqparam.inputs['hazard_curves']
for line, row in enumerate(csv.reader(csvfile), 1):
try:
if len(row) != ncols:
raise ValueError('Expected %d columns, found %d' %
ncols, len(row))
x, y = row[0].split()
lon_lat = valid.longitude(x), valid.latitude(y)
if lon_lat in lon_lats:
raise DuplicatedPoint(lon_lat)
lon_lats.add(lon_lat)
for i, imt_ in enumerate(imtls, 1):
values = valid.decreasing_probabilities(row[i])
if len(values) != len(imtls[imt_]):
raise ValueError('Found %d values, expected %d' %
(len(values), len(imtls([imt_]))))
data += {imt_: [numpy.array(values)]}
except (ValueError, DuplicatedPoint) as err:
raise err.__class__('%s: file %s, line %d' % (err, csvfile, line))
lons, lats = zip(*sorted(lon_lats))
mesh = geo.Mesh(numpy.array(lons), numpy.array(lats))
return mesh, {imt: numpy.array(lst) for imt, lst in data.items()} | [
"def",
"get_mesh_hcurves",
"(",
"oqparam",
")",
":",
"imtls",
"=",
"oqparam",
".",
"imtls",
"lon_lats",
"=",
"set",
"(",
")",
"data",
"=",
"AccumDict",
"(",
")",
"# imt -> list of arrays",
"ncols",
"=",
"len",
"(",
"imtls",
")",
"+",
"1",
"# lon_lat + curve_per_imt ...",
"csvfile",
"=",
"oqparam",
".",
"inputs",
"[",
"'hazard_curves'",
"]",
"for",
"line",
",",
"row",
"in",
"enumerate",
"(",
"csv",
".",
"reader",
"(",
"csvfile",
")",
",",
"1",
")",
":",
"try",
":",
"if",
"len",
"(",
"row",
")",
"!=",
"ncols",
":",
"raise",
"ValueError",
"(",
"'Expected %d columns, found %d'",
"%",
"ncols",
",",
"len",
"(",
"row",
")",
")",
"x",
",",
"y",
"=",
"row",
"[",
"0",
"]",
".",
"split",
"(",
")",
"lon_lat",
"=",
"valid",
".",
"longitude",
"(",
"x",
")",
",",
"valid",
".",
"latitude",
"(",
"y",
")",
"if",
"lon_lat",
"in",
"lon_lats",
":",
"raise",
"DuplicatedPoint",
"(",
"lon_lat",
")",
"lon_lats",
".",
"add",
"(",
"lon_lat",
")",
"for",
"i",
",",
"imt_",
"in",
"enumerate",
"(",
"imtls",
",",
"1",
")",
":",
"values",
"=",
"valid",
".",
"decreasing_probabilities",
"(",
"row",
"[",
"i",
"]",
")",
"if",
"len",
"(",
"values",
")",
"!=",
"len",
"(",
"imtls",
"[",
"imt_",
"]",
")",
":",
"raise",
"ValueError",
"(",
"'Found %d values, expected %d'",
"%",
"(",
"len",
"(",
"values",
")",
",",
"len",
"(",
"imtls",
"(",
"[",
"imt_",
"]",
")",
")",
")",
")",
"data",
"+=",
"{",
"imt_",
":",
"[",
"numpy",
".",
"array",
"(",
"values",
")",
"]",
"}",
"except",
"(",
"ValueError",
",",
"DuplicatedPoint",
")",
"as",
"err",
":",
"raise",
"err",
".",
"__class__",
"(",
"'%s: file %s, line %d'",
"%",
"(",
"err",
",",
"csvfile",
",",
"line",
")",
")",
"lons",
",",
"lats",
"=",
"zip",
"(",
"*",
"sorted",
"(",
"lon_lats",
")",
")",
"mesh",
"=",
"geo",
".",
"Mesh",
"(",
"numpy",
".",
"array",
"(",
"lons",
")",
",",
"numpy",
".",
"array",
"(",
"lats",
")",
")",
"return",
"mesh",
",",
"{",
"imt",
":",
"numpy",
".",
"array",
"(",
"lst",
")",
"for",
"imt",
",",
"lst",
"in",
"data",
".",
"items",
"(",
")",
"}"
] | Read CSV data in the format `lon lat, v1-vN, w1-wN, ...`.
:param oqparam:
an :class:`openquake.commonlib.oqvalidation.OqParam` instance
:returns:
the mesh of points and the data as a dictionary
imt -> array of curves for each site | [
"Read",
"CSV",
"data",
"in",
"the",
"format",
"lon",
"lat",
"v1",
"-",
"vN",
"w1",
"-",
"wN",
"...",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1212-L1247 |
623 | gem/oq-engine | openquake/commonlib/readinput.py | reduce_source_model | def reduce_source_model(smlt_file, source_ids, remove=True):
"""
Extract sources from the composite source model
"""
found = 0
to_remove = []
for paths in logictree.collect_info(smlt_file).smpaths.values():
for path in paths:
logging.info('Reading %s', path)
root = nrml.read(path)
model = Node('sourceModel', root[0].attrib)
origmodel = root[0]
if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
for src_node in origmodel:
if src_node['id'] in source_ids:
model.nodes.append(src_node)
else: # nrml/0.5
for src_group in origmodel:
sg = copy.copy(src_group)
sg.nodes = []
weights = src_group.get('srcs_weights')
if weights:
assert len(weights) == len(src_group.nodes)
else:
weights = [1] * len(src_group.nodes)
src_group['srcs_weights'] = reduced_weigths = []
for src_node, weight in zip(src_group, weights):
if src_node['id'] in source_ids:
found += 1
sg.nodes.append(src_node)
reduced_weigths.append(weight)
if sg.nodes:
model.nodes.append(sg)
shutil.copy(path, path + '.bak')
if model:
with open(path, 'wb') as f:
nrml.write([model], f, xmlns=root['xmlns'])
elif remove: # remove the files completely reduced
to_remove.append(path)
if found:
for path in to_remove:
os.remove(path) | python | def reduce_source_model(smlt_file, source_ids, remove=True):
"""
Extract sources from the composite source model
"""
found = 0
to_remove = []
for paths in logictree.collect_info(smlt_file).smpaths.values():
for path in paths:
logging.info('Reading %s', path)
root = nrml.read(path)
model = Node('sourceModel', root[0].attrib)
origmodel = root[0]
if root['xmlns'] == 'http://openquake.org/xmlns/nrml/0.4':
for src_node in origmodel:
if src_node['id'] in source_ids:
model.nodes.append(src_node)
else: # nrml/0.5
for src_group in origmodel:
sg = copy.copy(src_group)
sg.nodes = []
weights = src_group.get('srcs_weights')
if weights:
assert len(weights) == len(src_group.nodes)
else:
weights = [1] * len(src_group.nodes)
src_group['srcs_weights'] = reduced_weigths = []
for src_node, weight in zip(src_group, weights):
if src_node['id'] in source_ids:
found += 1
sg.nodes.append(src_node)
reduced_weigths.append(weight)
if sg.nodes:
model.nodes.append(sg)
shutil.copy(path, path + '.bak')
if model:
with open(path, 'wb') as f:
nrml.write([model], f, xmlns=root['xmlns'])
elif remove: # remove the files completely reduced
to_remove.append(path)
if found:
for path in to_remove:
os.remove(path) | [
"def",
"reduce_source_model",
"(",
"smlt_file",
",",
"source_ids",
",",
"remove",
"=",
"True",
")",
":",
"found",
"=",
"0",
"to_remove",
"=",
"[",
"]",
"for",
"paths",
"in",
"logictree",
".",
"collect_info",
"(",
"smlt_file",
")",
".",
"smpaths",
".",
"values",
"(",
")",
":",
"for",
"path",
"in",
"paths",
":",
"logging",
".",
"info",
"(",
"'Reading %s'",
",",
"path",
")",
"root",
"=",
"nrml",
".",
"read",
"(",
"path",
")",
"model",
"=",
"Node",
"(",
"'sourceModel'",
",",
"root",
"[",
"0",
"]",
".",
"attrib",
")",
"origmodel",
"=",
"root",
"[",
"0",
"]",
"if",
"root",
"[",
"'xmlns'",
"]",
"==",
"'http://openquake.org/xmlns/nrml/0.4'",
":",
"for",
"src_node",
"in",
"origmodel",
":",
"if",
"src_node",
"[",
"'id'",
"]",
"in",
"source_ids",
":",
"model",
".",
"nodes",
".",
"append",
"(",
"src_node",
")",
"else",
":",
"# nrml/0.5",
"for",
"src_group",
"in",
"origmodel",
":",
"sg",
"=",
"copy",
".",
"copy",
"(",
"src_group",
")",
"sg",
".",
"nodes",
"=",
"[",
"]",
"weights",
"=",
"src_group",
".",
"get",
"(",
"'srcs_weights'",
")",
"if",
"weights",
":",
"assert",
"len",
"(",
"weights",
")",
"==",
"len",
"(",
"src_group",
".",
"nodes",
")",
"else",
":",
"weights",
"=",
"[",
"1",
"]",
"*",
"len",
"(",
"src_group",
".",
"nodes",
")",
"src_group",
"[",
"'srcs_weights'",
"]",
"=",
"reduced_weigths",
"=",
"[",
"]",
"for",
"src_node",
",",
"weight",
"in",
"zip",
"(",
"src_group",
",",
"weights",
")",
":",
"if",
"src_node",
"[",
"'id'",
"]",
"in",
"source_ids",
":",
"found",
"+=",
"1",
"sg",
".",
"nodes",
".",
"append",
"(",
"src_node",
")",
"reduced_weigths",
".",
"append",
"(",
"weight",
")",
"if",
"sg",
".",
"nodes",
":",
"model",
".",
"nodes",
".",
"append",
"(",
"sg",
")",
"shutil",
".",
"copy",
"(",
"path",
",",
"path",
"+",
"'.bak'",
")",
"if",
"model",
":",
"with",
"open",
"(",
"path",
",",
"'wb'",
")",
"as",
"f",
":",
"nrml",
".",
"write",
"(",
"[",
"model",
"]",
",",
"f",
",",
"xmlns",
"=",
"root",
"[",
"'xmlns'",
"]",
")",
"elif",
"remove",
":",
"# remove the files completely reduced",
"to_remove",
".",
"append",
"(",
"path",
")",
"if",
"found",
":",
"for",
"path",
"in",
"to_remove",
":",
"os",
".",
"remove",
"(",
"path",
")"
] | Extract sources from the composite source model | [
"Extract",
"sources",
"from",
"the",
"composite",
"source",
"model"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1251-L1292 |
624 | gem/oq-engine | openquake/commonlib/readinput.py | get_checksum32 | def get_checksum32(oqparam, hazard=False):
"""
Build an unsigned 32 bit integer from the input files of a calculation.
:param oqparam: an OqParam instance
:param hazard: if True, consider only the hazard files
:returns: the checkume
"""
# NB: using adler32 & 0xffffffff is the documented way to get a checksum
# which is the same between Python 2 and Python 3
checksum = 0
for fname in get_input_files(oqparam, hazard):
checksum = _checksum(fname, checksum)
if hazard:
hazard_params = []
for key, val in vars(oqparam).items():
if key in ('rupture_mesh_spacing', 'complex_fault_mesh_spacing',
'width_of_mfd_bin', 'area_source_discretization',
'random_seed', 'ses_seed', 'truncation_level',
'maximum_distance', 'investigation_time',
'number_of_logic_tree_samples', 'imtls',
'ses_per_logic_tree_path', 'minimum_magnitude',
'prefilter_sources', 'sites',
'pointsource_distance', 'filter_distance'):
hazard_params.append('%s = %s' % (key, val))
data = '\n'.join(hazard_params).encode('utf8')
checksum = zlib.adler32(data, checksum) & 0xffffffff
return checksum | python | def get_checksum32(oqparam, hazard=False):
"""
Build an unsigned 32 bit integer from the input files of a calculation.
:param oqparam: an OqParam instance
:param hazard: if True, consider only the hazard files
:returns: the checkume
"""
# NB: using adler32 & 0xffffffff is the documented way to get a checksum
# which is the same between Python 2 and Python 3
checksum = 0
for fname in get_input_files(oqparam, hazard):
checksum = _checksum(fname, checksum)
if hazard:
hazard_params = []
for key, val in vars(oqparam).items():
if key in ('rupture_mesh_spacing', 'complex_fault_mesh_spacing',
'width_of_mfd_bin', 'area_source_discretization',
'random_seed', 'ses_seed', 'truncation_level',
'maximum_distance', 'investigation_time',
'number_of_logic_tree_samples', 'imtls',
'ses_per_logic_tree_path', 'minimum_magnitude',
'prefilter_sources', 'sites',
'pointsource_distance', 'filter_distance'):
hazard_params.append('%s = %s' % (key, val))
data = '\n'.join(hazard_params).encode('utf8')
checksum = zlib.adler32(data, checksum) & 0xffffffff
return checksum | [
"def",
"get_checksum32",
"(",
"oqparam",
",",
"hazard",
"=",
"False",
")",
":",
"# NB: using adler32 & 0xffffffff is the documented way to get a checksum",
"# which is the same between Python 2 and Python 3",
"checksum",
"=",
"0",
"for",
"fname",
"in",
"get_input_files",
"(",
"oqparam",
",",
"hazard",
")",
":",
"checksum",
"=",
"_checksum",
"(",
"fname",
",",
"checksum",
")",
"if",
"hazard",
":",
"hazard_params",
"=",
"[",
"]",
"for",
"key",
",",
"val",
"in",
"vars",
"(",
"oqparam",
")",
".",
"items",
"(",
")",
":",
"if",
"key",
"in",
"(",
"'rupture_mesh_spacing'",
",",
"'complex_fault_mesh_spacing'",
",",
"'width_of_mfd_bin'",
",",
"'area_source_discretization'",
",",
"'random_seed'",
",",
"'ses_seed'",
",",
"'truncation_level'",
",",
"'maximum_distance'",
",",
"'investigation_time'",
",",
"'number_of_logic_tree_samples'",
",",
"'imtls'",
",",
"'ses_per_logic_tree_path'",
",",
"'minimum_magnitude'",
",",
"'prefilter_sources'",
",",
"'sites'",
",",
"'pointsource_distance'",
",",
"'filter_distance'",
")",
":",
"hazard_params",
".",
"append",
"(",
"'%s = %s'",
"%",
"(",
"key",
",",
"val",
")",
")",
"data",
"=",
"'\\n'",
".",
"join",
"(",
"hazard_params",
")",
".",
"encode",
"(",
"'utf8'",
")",
"checksum",
"=",
"zlib",
".",
"adler32",
"(",
"data",
",",
"checksum",
")",
"&",
"0xffffffff",
"return",
"checksum"
] | Build an unsigned 32 bit integer from the input files of a calculation.
:param oqparam: an OqParam instance
:param hazard: if True, consider only the hazard files
:returns: the checkume | [
"Build",
"an",
"unsigned",
"32",
"bit",
"integer",
"from",
"the",
"input",
"files",
"of",
"a",
"calculation",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commonlib/readinput.py#L1388-L1415 |
625 | gem/oq-engine | openquake/commands/dump.py | smart_save | def smart_save(dbpath, archive, calc_id):
"""
Make a copy of the db, remove the incomplete jobs and add the copy
to the archive
"""
tmpdir = tempfile.mkdtemp()
newdb = os.path.join(tmpdir, os.path.basename(dbpath))
shutil.copy(dbpath, newdb)
try:
with sqlite3.connect(newdb) as conn:
conn.execute('DELETE FROM job WHERE status != "complete"')
if calc_id:
conn.execute('DELETE FROM job WHERE id != %d' % calc_id)
except:
safeprint('Please check the copy of the db in %s' % newdb)
raise
zipfiles([newdb], archive, 'a', safeprint)
shutil.rmtree(tmpdir) | python | def smart_save(dbpath, archive, calc_id):
"""
Make a copy of the db, remove the incomplete jobs and add the copy
to the archive
"""
tmpdir = tempfile.mkdtemp()
newdb = os.path.join(tmpdir, os.path.basename(dbpath))
shutil.copy(dbpath, newdb)
try:
with sqlite3.connect(newdb) as conn:
conn.execute('DELETE FROM job WHERE status != "complete"')
if calc_id:
conn.execute('DELETE FROM job WHERE id != %d' % calc_id)
except:
safeprint('Please check the copy of the db in %s' % newdb)
raise
zipfiles([newdb], archive, 'a', safeprint)
shutil.rmtree(tmpdir) | [
"def",
"smart_save",
"(",
"dbpath",
",",
"archive",
",",
"calc_id",
")",
":",
"tmpdir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"newdb",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"os",
".",
"path",
".",
"basename",
"(",
"dbpath",
")",
")",
"shutil",
".",
"copy",
"(",
"dbpath",
",",
"newdb",
")",
"try",
":",
"with",
"sqlite3",
".",
"connect",
"(",
"newdb",
")",
"as",
"conn",
":",
"conn",
".",
"execute",
"(",
"'DELETE FROM job WHERE status != \"complete\"'",
")",
"if",
"calc_id",
":",
"conn",
".",
"execute",
"(",
"'DELETE FROM job WHERE id != %d'",
"%",
"calc_id",
")",
"except",
":",
"safeprint",
"(",
"'Please check the copy of the db in %s'",
"%",
"newdb",
")",
"raise",
"zipfiles",
"(",
"[",
"newdb",
"]",
",",
"archive",
",",
"'a'",
",",
"safeprint",
")",
"shutil",
".",
"rmtree",
"(",
"tmpdir",
")"
] | Make a copy of the db, remove the incomplete jobs and add the copy
to the archive | [
"Make",
"a",
"copy",
"of",
"the",
"db",
"remove",
"the",
"incomplete",
"jobs",
"and",
"add",
"the",
"copy",
"to",
"the",
"archive"
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/dump.py#L28-L45 |
626 | gem/oq-engine | openquake/commands/dump.py | dump | def dump(archive, calc_id=0, user=None):
"""
Dump the openquake database and all the complete calculations into a zip
file. In a multiuser installation must be run as administrator.
"""
t0 = time.time()
assert archive.endswith('.zip'), archive
getfnames = 'select ds_calc_dir || ".hdf5" from job where ?A'
param = dict(status='complete')
if calc_id:
param['id'] = calc_id
if user:
param['user_name'] = user
fnames = [f for f, in db(getfnames, param) if os.path.exists(f)]
zipfiles(fnames, archive, 'w', safeprint)
pending_jobs = db('select id, status, description from job '
'where status="executing"')
if pending_jobs:
safeprint('WARNING: there were calculations executing during the dump,'
' they have been not copied')
for job_id, status, descr in pending_jobs:
safeprint('%d %s %s' % (job_id, status, descr))
# this also checks that the copied db is not corrupted
smart_save(db.path, archive, calc_id)
dt = time.time() - t0
safeprint('Archived %d calculations into %s in %d seconds'
% (len(fnames), archive, dt)) | python | def dump(archive, calc_id=0, user=None):
"""
Dump the openquake database and all the complete calculations into a zip
file. In a multiuser installation must be run as administrator.
"""
t0 = time.time()
assert archive.endswith('.zip'), archive
getfnames = 'select ds_calc_dir || ".hdf5" from job where ?A'
param = dict(status='complete')
if calc_id:
param['id'] = calc_id
if user:
param['user_name'] = user
fnames = [f for f, in db(getfnames, param) if os.path.exists(f)]
zipfiles(fnames, archive, 'w', safeprint)
pending_jobs = db('select id, status, description from job '
'where status="executing"')
if pending_jobs:
safeprint('WARNING: there were calculations executing during the dump,'
' they have been not copied')
for job_id, status, descr in pending_jobs:
safeprint('%d %s %s' % (job_id, status, descr))
# this also checks that the copied db is not corrupted
smart_save(db.path, archive, calc_id)
dt = time.time() - t0
safeprint('Archived %d calculations into %s in %d seconds'
% (len(fnames), archive, dt)) | [
"def",
"dump",
"(",
"archive",
",",
"calc_id",
"=",
"0",
",",
"user",
"=",
"None",
")",
":",
"t0",
"=",
"time",
".",
"time",
"(",
")",
"assert",
"archive",
".",
"endswith",
"(",
"'.zip'",
")",
",",
"archive",
"getfnames",
"=",
"'select ds_calc_dir || \".hdf5\" from job where ?A'",
"param",
"=",
"dict",
"(",
"status",
"=",
"'complete'",
")",
"if",
"calc_id",
":",
"param",
"[",
"'id'",
"]",
"=",
"calc_id",
"if",
"user",
":",
"param",
"[",
"'user_name'",
"]",
"=",
"user",
"fnames",
"=",
"[",
"f",
"for",
"f",
",",
"in",
"db",
"(",
"getfnames",
",",
"param",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"f",
")",
"]",
"zipfiles",
"(",
"fnames",
",",
"archive",
",",
"'w'",
",",
"safeprint",
")",
"pending_jobs",
"=",
"db",
"(",
"'select id, status, description from job '",
"'where status=\"executing\"'",
")",
"if",
"pending_jobs",
":",
"safeprint",
"(",
"'WARNING: there were calculations executing during the dump,'",
"' they have been not copied'",
")",
"for",
"job_id",
",",
"status",
",",
"descr",
"in",
"pending_jobs",
":",
"safeprint",
"(",
"'%d %s %s'",
"%",
"(",
"job_id",
",",
"status",
",",
"descr",
")",
")",
"# this also checks that the copied db is not corrupted",
"smart_save",
"(",
"db",
".",
"path",
",",
"archive",
",",
"calc_id",
")",
"dt",
"=",
"time",
".",
"time",
"(",
")",
"-",
"t0",
"safeprint",
"(",
"'Archived %d calculations into %s in %d seconds'",
"%",
"(",
"len",
"(",
"fnames",
")",
",",
"archive",
",",
"dt",
")",
")"
] | Dump the openquake database and all the complete calculations into a zip
file. In a multiuser installation must be run as administrator. | [
"Dump",
"the",
"openquake",
"database",
"and",
"all",
"the",
"complete",
"calculations",
"into",
"a",
"zip",
"file",
".",
"In",
"a",
"multiuser",
"installation",
"must",
"be",
"run",
"as",
"administrator",
"."
] | 8294553a0b8aba33fd96437a35065d03547d0040 | https://github.com/gem/oq-engine/blob/8294553a0b8aba33fd96437a35065d03547d0040/openquake/commands/dump.py#L49-L77 |
627 | conan-io/conan-package-tools | setup.py | load_version | def load_version():
"""Loads a file content"""
filename = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"cpt", "__init__.py"))
with open(filename, "rt") as version_file:
conan_init = version_file.read()
version = re.search("__version__ = '([0-9a-z.-]+)'", conan_init).group(1)
return version | python | def load_version():
"""Loads a file content"""
filename = os.path.abspath(os.path.join(os.path.dirname(os.path.abspath(__file__)),
"cpt", "__init__.py"))
with open(filename, "rt") as version_file:
conan_init = version_file.read()
version = re.search("__version__ = '([0-9a-z.-]+)'", conan_init).group(1)
return version | [
"def",
"load_version",
"(",
")",
":",
"filename",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"__file__",
")",
")",
",",
"\"cpt\"",
",",
"\"__init__.py\"",
")",
")",
"with",
"open",
"(",
"filename",
",",
"\"rt\"",
")",
"as",
"version_file",
":",
"conan_init",
"=",
"version_file",
".",
"read",
"(",
")",
"version",
"=",
"re",
".",
"search",
"(",
"\"__version__ = '([0-9a-z.-]+)'\"",
",",
"conan_init",
")",
".",
"group",
"(",
"1",
")",
"return",
"version"
] | Loads a file content | [
"Loads",
"a",
"file",
"content"
] | 3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324 | https://github.com/conan-io/conan-package-tools/blob/3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324/setup.py#L25-L32 |
628 | conan-io/conan-package-tools | cpt/packager.py | ConanMultiPackager.builds | def builds(self, confs):
"""For retro compatibility directly assigning builds"""
self._named_builds = {}
self._builds = []
for values in confs:
if len(values) == 2:
self._builds.append(BuildConf(values[0], values[1], {}, {}, self.reference))
elif len(values) == 4:
self._builds.append(BuildConf(values[0], values[1], values[2], values[3],
self.reference))
elif len(values) != 5:
raise Exception("Invalid build configuration, has to be a tuple of "
"(settings, options, env_vars, build_requires, reference)")
else:
self._builds.append(BuildConf(*values)) | python | def builds(self, confs):
"""For retro compatibility directly assigning builds"""
self._named_builds = {}
self._builds = []
for values in confs:
if len(values) == 2:
self._builds.append(BuildConf(values[0], values[1], {}, {}, self.reference))
elif len(values) == 4:
self._builds.append(BuildConf(values[0], values[1], values[2], values[3],
self.reference))
elif len(values) != 5:
raise Exception("Invalid build configuration, has to be a tuple of "
"(settings, options, env_vars, build_requires, reference)")
else:
self._builds.append(BuildConf(*values)) | [
"def",
"builds",
"(",
"self",
",",
"confs",
")",
":",
"self",
".",
"_named_builds",
"=",
"{",
"}",
"self",
".",
"_builds",
"=",
"[",
"]",
"for",
"values",
"in",
"confs",
":",
"if",
"len",
"(",
"values",
")",
"==",
"2",
":",
"self",
".",
"_builds",
".",
"append",
"(",
"BuildConf",
"(",
"values",
"[",
"0",
"]",
",",
"values",
"[",
"1",
"]",
",",
"{",
"}",
",",
"{",
"}",
",",
"self",
".",
"reference",
")",
")",
"elif",
"len",
"(",
"values",
")",
"==",
"4",
":",
"self",
".",
"_builds",
".",
"append",
"(",
"BuildConf",
"(",
"values",
"[",
"0",
"]",
",",
"values",
"[",
"1",
"]",
",",
"values",
"[",
"2",
"]",
",",
"values",
"[",
"3",
"]",
",",
"self",
".",
"reference",
")",
")",
"elif",
"len",
"(",
"values",
")",
"!=",
"5",
":",
"raise",
"Exception",
"(",
"\"Invalid build configuration, has to be a tuple of \"",
"\"(settings, options, env_vars, build_requires, reference)\"",
")",
"else",
":",
"self",
".",
"_builds",
".",
"append",
"(",
"BuildConf",
"(",
"*",
"values",
")",
")"
] | For retro compatibility directly assigning builds | [
"For",
"retro",
"compatibility",
"directly",
"assigning",
"builds"
] | 3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324 | https://github.com/conan-io/conan-package-tools/blob/3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324/cpt/packager.py#L357-L371 |
629 | conan-io/conan-package-tools | cpt/profiles.py | patch_default_base_profile | def patch_default_base_profile(conan_api, profile_abs_path):
"""If we have a profile including default, but the users default in config is that the default
is other, we have to change the include"""
text = tools.load(profile_abs_path)
if "include(default)" in text: # User didn't specified a custom profile
if Version(conan_version) < Version("1.12.0"):
cache = conan_api._client_cache
else:
cache = conan_api._cache
default_profile_name = os.path.basename(cache.default_profile_path)
if not os.path.exists(cache.default_profile_path):
conan_api.create_profile(default_profile_name, detect=True)
if default_profile_name != "default": # User have a different default profile name
# https://github.com/conan-io/conan-package-tools/issues/121
text = text.replace("include(default)", "include(%s)" % default_profile_name)
tools.save(profile_abs_path, text) | python | def patch_default_base_profile(conan_api, profile_abs_path):
"""If we have a profile including default, but the users default in config is that the default
is other, we have to change the include"""
text = tools.load(profile_abs_path)
if "include(default)" in text: # User didn't specified a custom profile
if Version(conan_version) < Version("1.12.0"):
cache = conan_api._client_cache
else:
cache = conan_api._cache
default_profile_name = os.path.basename(cache.default_profile_path)
if not os.path.exists(cache.default_profile_path):
conan_api.create_profile(default_profile_name, detect=True)
if default_profile_name != "default": # User have a different default profile name
# https://github.com/conan-io/conan-package-tools/issues/121
text = text.replace("include(default)", "include(%s)" % default_profile_name)
tools.save(profile_abs_path, text) | [
"def",
"patch_default_base_profile",
"(",
"conan_api",
",",
"profile_abs_path",
")",
":",
"text",
"=",
"tools",
".",
"load",
"(",
"profile_abs_path",
")",
"if",
"\"include(default)\"",
"in",
"text",
":",
"# User didn't specified a custom profile",
"if",
"Version",
"(",
"conan_version",
")",
"<",
"Version",
"(",
"\"1.12.0\"",
")",
":",
"cache",
"=",
"conan_api",
".",
"_client_cache",
"else",
":",
"cache",
"=",
"conan_api",
".",
"_cache",
"default_profile_name",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"cache",
".",
"default_profile_path",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"cache",
".",
"default_profile_path",
")",
":",
"conan_api",
".",
"create_profile",
"(",
"default_profile_name",
",",
"detect",
"=",
"True",
")",
"if",
"default_profile_name",
"!=",
"\"default\"",
":",
"# User have a different default profile name",
"# https://github.com/conan-io/conan-package-tools/issues/121",
"text",
"=",
"text",
".",
"replace",
"(",
"\"include(default)\"",
",",
"\"include(%s)\"",
"%",
"default_profile_name",
")",
"tools",
".",
"save",
"(",
"profile_abs_path",
",",
"text",
")"
] | If we have a profile including default, but the users default in config is that the default
is other, we have to change the include | [
"If",
"we",
"have",
"a",
"profile",
"including",
"default",
"but",
"the",
"users",
"default",
"in",
"config",
"is",
"that",
"the",
"default",
"is",
"other",
"we",
"have",
"to",
"change",
"the",
"include"
] | 3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324 | https://github.com/conan-io/conan-package-tools/blob/3d0f5f4dc5d9dc899a57626e8d8a125fc28b8324/cpt/profiles.py#L51-L68 |
630 | edx/auth-backends | auth_backends/pipeline.py | get_user_if_exists | def get_user_if_exists(strategy, details, user=None, *args, **kwargs):
"""Return a User with the given username iff the User exists."""
if user:
return {'is_new': False}
try:
username = details.get('username')
# Return the user if it exists
return {
'is_new': False,
'user': User.objects.get(username=username)
}
except User.DoesNotExist:
# Fall to the default return value
pass
# Nothing to return since we don't have a user
return {} | python | def get_user_if_exists(strategy, details, user=None, *args, **kwargs):
"""Return a User with the given username iff the User exists."""
if user:
return {'is_new': False}
try:
username = details.get('username')
# Return the user if it exists
return {
'is_new': False,
'user': User.objects.get(username=username)
}
except User.DoesNotExist:
# Fall to the default return value
pass
# Nothing to return since we don't have a user
return {} | [
"def",
"get_user_if_exists",
"(",
"strategy",
",",
"details",
",",
"user",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"user",
":",
"return",
"{",
"'is_new'",
":",
"False",
"}",
"try",
":",
"username",
"=",
"details",
".",
"get",
"(",
"'username'",
")",
"# Return the user if it exists",
"return",
"{",
"'is_new'",
":",
"False",
",",
"'user'",
":",
"User",
".",
"objects",
".",
"get",
"(",
"username",
"=",
"username",
")",
"}",
"except",
"User",
".",
"DoesNotExist",
":",
"# Fall to the default return value",
"pass",
"# Nothing to return since we don't have a user",
"return",
"{",
"}"
] | Return a User with the given username iff the User exists. | [
"Return",
"a",
"User",
"with",
"the",
"given",
"username",
"iff",
"the",
"User",
"exists",
"."
] | 493f93e9d87d0237f0fea6d75c7b70646ad6d31e | https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/pipeline.py#L14-L31 |
631 | edx/auth-backends | auth_backends/pipeline.py | update_email | def update_email(strategy, details, user=None, *args, **kwargs):
"""Update the user's email address using data from provider."""
if user:
email = details.get('email')
if email and user.email != email:
user.email = email
strategy.storage.user.changed(user) | python | def update_email(strategy, details, user=None, *args, **kwargs):
"""Update the user's email address using data from provider."""
if user:
email = details.get('email')
if email and user.email != email:
user.email = email
strategy.storage.user.changed(user) | [
"def",
"update_email",
"(",
"strategy",
",",
"details",
",",
"user",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"user",
":",
"email",
"=",
"details",
".",
"get",
"(",
"'email'",
")",
"if",
"email",
"and",
"user",
".",
"email",
"!=",
"email",
":",
"user",
".",
"email",
"=",
"email",
"strategy",
".",
"storage",
".",
"user",
".",
"changed",
"(",
"user",
")"
] | Update the user's email address using data from provider. | [
"Update",
"the",
"user",
"s",
"email",
"address",
"using",
"data",
"from",
"provider",
"."
] | 493f93e9d87d0237f0fea6d75c7b70646ad6d31e | https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/pipeline.py#L34-L41 |
632 | edx/auth-backends | auth_backends/backends.py | EdXOpenIdConnect.get_user_claims | def get_user_claims(self, access_token, claims=None, token_type='Bearer'):
"""Returns a dictionary with the values for each claim requested."""
data = self.get_json(
self.USER_INFO_URL,
headers={'Authorization': '{token_type} {token}'.format(token_type=token_type, token=access_token)}
)
if claims:
claims_names = set(claims)
data = {k: v for (k, v) in six.iteritems(data) if k in claims_names}
return data | python | def get_user_claims(self, access_token, claims=None, token_type='Bearer'):
"""Returns a dictionary with the values for each claim requested."""
data = self.get_json(
self.USER_INFO_URL,
headers={'Authorization': '{token_type} {token}'.format(token_type=token_type, token=access_token)}
)
if claims:
claims_names = set(claims)
data = {k: v for (k, v) in six.iteritems(data) if k in claims_names}
return data | [
"def",
"get_user_claims",
"(",
"self",
",",
"access_token",
",",
"claims",
"=",
"None",
",",
"token_type",
"=",
"'Bearer'",
")",
":",
"data",
"=",
"self",
".",
"get_json",
"(",
"self",
".",
"USER_INFO_URL",
",",
"headers",
"=",
"{",
"'Authorization'",
":",
"'{token_type} {token}'",
".",
"format",
"(",
"token_type",
"=",
"token_type",
",",
"token",
"=",
"access_token",
")",
"}",
")",
"if",
"claims",
":",
"claims_names",
"=",
"set",
"(",
"claims",
")",
"data",
"=",
"{",
"k",
":",
"v",
"for",
"(",
"k",
",",
"v",
")",
"in",
"six",
".",
"iteritems",
"(",
"data",
")",
"if",
"k",
"in",
"claims_names",
"}",
"return",
"data"
] | Returns a dictionary with the values for each claim requested. | [
"Returns",
"a",
"dictionary",
"with",
"the",
"values",
"for",
"each",
"claim",
"requested",
"."
] | 493f93e9d87d0237f0fea6d75c7b70646ad6d31e | https://github.com/edx/auth-backends/blob/493f93e9d87d0237f0fea6d75c7b70646ad6d31e/auth_backends/backends.py#L170-L181 |
633 | napalm-automation/napalm-logs | napalm_logs/server.py | NapalmLogsServerProc._setup_ipc | def _setup_ipc(self):
'''
Setup the IPC pub and sub.
Subscript to the listener IPC
and publish to the device specific IPC.
'''
log.debug('Setting up the server IPC puller to receive from the listener')
self.ctx = zmq.Context()
# subscribe to listener
self.sub = self.ctx.socket(zmq.PULL)
self.sub.bind(LST_IPC_URL)
try:
self.sub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm'])
# device publishers
log.debug('Creating the router ICP on the server')
self.pub = self.ctx.socket(zmq.ROUTER)
self.pub.bind(DEV_IPC_URL)
try:
self.pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
# Pipe to the publishers
self.publisher_pub = self.ctx.socket(zmq.PUB)
self.publisher_pub.connect(PUB_PX_IPC_URL)
try:
self.publisher_pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.publisher_pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) | python | def _setup_ipc(self):
'''
Setup the IPC pub and sub.
Subscript to the listener IPC
and publish to the device specific IPC.
'''
log.debug('Setting up the server IPC puller to receive from the listener')
self.ctx = zmq.Context()
# subscribe to listener
self.sub = self.ctx.socket(zmq.PULL)
self.sub.bind(LST_IPC_URL)
try:
self.sub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm'])
# device publishers
log.debug('Creating the router ICP on the server')
self.pub = self.ctx.socket(zmq.ROUTER)
self.pub.bind(DEV_IPC_URL)
try:
self.pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm'])
# Pipe to the publishers
self.publisher_pub = self.ctx.socket(zmq.PUB)
self.publisher_pub.connect(PUB_PX_IPC_URL)
try:
self.publisher_pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.publisher_pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) | [
"def",
"_setup_ipc",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Setting up the server IPC puller to receive from the listener'",
")",
"self",
".",
"ctx",
"=",
"zmq",
".",
"Context",
"(",
")",
"# subscribe to listener",
"self",
".",
"sub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"PULL",
")",
"self",
".",
"sub",
".",
"bind",
"(",
"LST_IPC_URL",
")",
"try",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"RCVHWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# device publishers",
"log",
".",
"debug",
"(",
"'Creating the router ICP on the server'",
")",
"self",
".",
"pub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"ROUTER",
")",
"self",
".",
"pub",
".",
"bind",
"(",
"DEV_IPC_URL",
")",
"try",
":",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"SNDHWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# Pipe to the publishers",
"self",
".",
"publisher_pub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"PUB",
")",
"self",
".",
"publisher_pub",
".",
"connect",
"(",
"PUB_PX_IPC_URL",
")",
"try",
":",
"self",
".",
"publisher_pub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"publisher_pub",
".",
"setsockopt",
"(",
"zmq",
".",
"SNDHWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")"
] | Setup the IPC pub and sub.
Subscript to the listener IPC
and publish to the device specific IPC. | [
"Setup",
"the",
"IPC",
"pub",
"and",
"sub",
".",
"Subscript",
"to",
"the",
"listener",
"IPC",
"and",
"publish",
"to",
"the",
"device",
"specific",
"IPC",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L55-L90 |
634 | napalm-automation/napalm-logs | napalm_logs/server.py | NapalmLogsServerProc._cleanup_buffer | def _cleanup_buffer(self):
'''
Periodically cleanup the buffer.
'''
if not self._buffer:
return
while True:
time.sleep(60)
log.debug('Cleaning up buffer')
items = self._buffer.items()
# The ``items`` function should also cleanup the buffer
log.debug('Collected items')
log.debug(list(items)) | python | def _cleanup_buffer(self):
'''
Periodically cleanup the buffer.
'''
if not self._buffer:
return
while True:
time.sleep(60)
log.debug('Cleaning up buffer')
items = self._buffer.items()
# The ``items`` function should also cleanup the buffer
log.debug('Collected items')
log.debug(list(items)) | [
"def",
"_cleanup_buffer",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_buffer",
":",
"return",
"while",
"True",
":",
"time",
".",
"sleep",
"(",
"60",
")",
"log",
".",
"debug",
"(",
"'Cleaning up buffer'",
")",
"items",
"=",
"self",
".",
"_buffer",
".",
"items",
"(",
")",
"# The ``items`` function should also cleanup the buffer",
"log",
".",
"debug",
"(",
"'Collected items'",
")",
"log",
".",
"debug",
"(",
"list",
"(",
"items",
")",
")"
] | Periodically cleanup the buffer. | [
"Periodically",
"cleanup",
"the",
"buffer",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L92-L104 |
635 | napalm-automation/napalm-logs | napalm_logs/server.py | NapalmLogsServerProc._compile_prefixes | def _compile_prefixes(self):
'''
Create a dict of all OS prefixes and their compiled regexs
'''
self.compiled_prefixes = {}
for dev_os, os_config in self.config.items():
if not os_config:
continue
self.compiled_prefixes[dev_os] = []
for prefix in os_config.get('prefixes', []):
values = prefix.get('values', {})
line = prefix.get('line', '')
if prefix.get('__python_fun__'):
self.compiled_prefixes[dev_os].append({
'__python_fun__': prefix['__python_fun__'],
'__python_mod__': prefix['__python_mod__']
})
continue # if python profiler defined for this prefix,
# no need to go further, but jump to the next prefix
# Add 'pri' and 'message' to the line, and values
line = '{{pri}}{}{{message}}'.format(line)
# PRI https://tools.ietf.org/html/rfc5424#section-6.2.1
values['pri'] = r'\<(\d+)\>'
values['message'] = '(.*)'
# We will now figure out which position each value is in so we can use it with the match statement
position = {}
for key in values.keys():
position[line.find('{' + key + '}')] = key
sorted_position = {}
for i, elem in enumerate(sorted(position.items())):
sorted_position[elem[1]] = i + 1
# Escape the line, then remove the escape for the curly bracets so they can be used when formatting
escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}')
# Replace a whitespace with \s+
escaped = escaped.replace(r'\ ', r'\s+')
self.compiled_prefixes[dev_os].append({
'prefix': re.compile(escaped.format(**values)),
'prefix_positions': sorted_position,
'raw_prefix': escaped.format(**values),
'values': values
}) | python | def _compile_prefixes(self):
'''
Create a dict of all OS prefixes and their compiled regexs
'''
self.compiled_prefixes = {}
for dev_os, os_config in self.config.items():
if not os_config:
continue
self.compiled_prefixes[dev_os] = []
for prefix in os_config.get('prefixes', []):
values = prefix.get('values', {})
line = prefix.get('line', '')
if prefix.get('__python_fun__'):
self.compiled_prefixes[dev_os].append({
'__python_fun__': prefix['__python_fun__'],
'__python_mod__': prefix['__python_mod__']
})
continue # if python profiler defined for this prefix,
# no need to go further, but jump to the next prefix
# Add 'pri' and 'message' to the line, and values
line = '{{pri}}{}{{message}}'.format(line)
# PRI https://tools.ietf.org/html/rfc5424#section-6.2.1
values['pri'] = r'\<(\d+)\>'
values['message'] = '(.*)'
# We will now figure out which position each value is in so we can use it with the match statement
position = {}
for key in values.keys():
position[line.find('{' + key + '}')] = key
sorted_position = {}
for i, elem in enumerate(sorted(position.items())):
sorted_position[elem[1]] = i + 1
# Escape the line, then remove the escape for the curly bracets so they can be used when formatting
escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}')
# Replace a whitespace with \s+
escaped = escaped.replace(r'\ ', r'\s+')
self.compiled_prefixes[dev_os].append({
'prefix': re.compile(escaped.format(**values)),
'prefix_positions': sorted_position,
'raw_prefix': escaped.format(**values),
'values': values
}) | [
"def",
"_compile_prefixes",
"(",
"self",
")",
":",
"self",
".",
"compiled_prefixes",
"=",
"{",
"}",
"for",
"dev_os",
",",
"os_config",
"in",
"self",
".",
"config",
".",
"items",
"(",
")",
":",
"if",
"not",
"os_config",
":",
"continue",
"self",
".",
"compiled_prefixes",
"[",
"dev_os",
"]",
"=",
"[",
"]",
"for",
"prefix",
"in",
"os_config",
".",
"get",
"(",
"'prefixes'",
",",
"[",
"]",
")",
":",
"values",
"=",
"prefix",
".",
"get",
"(",
"'values'",
",",
"{",
"}",
")",
"line",
"=",
"prefix",
".",
"get",
"(",
"'line'",
",",
"''",
")",
"if",
"prefix",
".",
"get",
"(",
"'__python_fun__'",
")",
":",
"self",
".",
"compiled_prefixes",
"[",
"dev_os",
"]",
".",
"append",
"(",
"{",
"'__python_fun__'",
":",
"prefix",
"[",
"'__python_fun__'",
"]",
",",
"'__python_mod__'",
":",
"prefix",
"[",
"'__python_mod__'",
"]",
"}",
")",
"continue",
"# if python profiler defined for this prefix,",
"# no need to go further, but jump to the next prefix",
"# Add 'pri' and 'message' to the line, and values",
"line",
"=",
"'{{pri}}{}{{message}}'",
".",
"format",
"(",
"line",
")",
"# PRI https://tools.ietf.org/html/rfc5424#section-6.2.1",
"values",
"[",
"'pri'",
"]",
"=",
"r'\\<(\\d+)\\>'",
"values",
"[",
"'message'",
"]",
"=",
"'(.*)'",
"# We will now figure out which position each value is in so we can use it with the match statement",
"position",
"=",
"{",
"}",
"for",
"key",
"in",
"values",
".",
"keys",
"(",
")",
":",
"position",
"[",
"line",
".",
"find",
"(",
"'{'",
"+",
"key",
"+",
"'}'",
")",
"]",
"=",
"key",
"sorted_position",
"=",
"{",
"}",
"for",
"i",
",",
"elem",
"in",
"enumerate",
"(",
"sorted",
"(",
"position",
".",
"items",
"(",
")",
")",
")",
":",
"sorted_position",
"[",
"elem",
"[",
"1",
"]",
"]",
"=",
"i",
"+",
"1",
"# Escape the line, then remove the escape for the curly bracets so they can be used when formatting",
"escaped",
"=",
"re",
".",
"escape",
"(",
"line",
")",
".",
"replace",
"(",
"r'\\{'",
",",
"'{'",
")",
".",
"replace",
"(",
"r'\\}'",
",",
"'}'",
")",
"# Replace a whitespace with \\s+",
"escaped",
"=",
"escaped",
".",
"replace",
"(",
"r'\\ '",
",",
"r'\\s+'",
")",
"self",
".",
"compiled_prefixes",
"[",
"dev_os",
"]",
".",
"append",
"(",
"{",
"'prefix'",
":",
"re",
".",
"compile",
"(",
"escaped",
".",
"format",
"(",
"*",
"*",
"values",
")",
")",
",",
"'prefix_positions'",
":",
"sorted_position",
",",
"'raw_prefix'",
":",
"escaped",
".",
"format",
"(",
"*",
"*",
"values",
")",
",",
"'values'",
":",
"values",
"}",
")"
] | Create a dict of all OS prefixes and their compiled regexs | [
"Create",
"a",
"dict",
"of",
"all",
"OS",
"prefixes",
"and",
"their",
"compiled",
"regexs"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L106-L146 |
636 | napalm-automation/napalm-logs | napalm_logs/server.py | NapalmLogsServerProc._identify_prefix | def _identify_prefix(self, msg, data):
'''
Check the message again each OS prefix and if matched return the
message dict
'''
prefix_id = -1
for prefix in data:
msg_dict = {}
prefix_id += 1
match = None
if '__python_fun__' in prefix:
log.debug('Trying to match using the %s custom python profiler', prefix['__python_mod__'])
try:
match = prefix['__python_fun__'](msg)
except Exception:
log.error('Exception while parsing %s with the %s python profiler',
msg, prefix['__python_mod__'], exc_info=True)
else:
log.debug('Matching using YAML-defined profiler:')
log.debug(prefix['raw_prefix'])
match = prefix['prefix'].search(msg)
if not match:
log.debug('Match not found')
continue
if '__python_fun__' in prefix:
log.debug('%s matched using the custom python profiler %s', msg, prefix['__python_mod__'])
msg_dict = match # the output as-is from the custom function
else:
positions = prefix.get('prefix_positions', {})
values = prefix.get('values')
msg_dict = {}
for key in values.keys():
msg_dict[key] = match.group(positions.get(key))
# Remove whitespace from the start or end of the message
msg_dict['__prefix_id__'] = prefix_id
msg_dict['message'] = msg_dict['message'].strip()
# The pri has to be an int as it is retrived using regex '\<(\d+)\>'
if 'pri' in msg_dict:
msg_dict['facility'] = int(int(msg_dict['pri']) / 8)
msg_dict['severity'] = int(int(msg_dict['pri']) - (msg_dict['facility'] * 8))
return msg_dict | python | def _identify_prefix(self, msg, data):
'''
Check the message again each OS prefix and if matched return the
message dict
'''
prefix_id = -1
for prefix in data:
msg_dict = {}
prefix_id += 1
match = None
if '__python_fun__' in prefix:
log.debug('Trying to match using the %s custom python profiler', prefix['__python_mod__'])
try:
match = prefix['__python_fun__'](msg)
except Exception:
log.error('Exception while parsing %s with the %s python profiler',
msg, prefix['__python_mod__'], exc_info=True)
else:
log.debug('Matching using YAML-defined profiler:')
log.debug(prefix['raw_prefix'])
match = prefix['prefix'].search(msg)
if not match:
log.debug('Match not found')
continue
if '__python_fun__' in prefix:
log.debug('%s matched using the custom python profiler %s', msg, prefix['__python_mod__'])
msg_dict = match # the output as-is from the custom function
else:
positions = prefix.get('prefix_positions', {})
values = prefix.get('values')
msg_dict = {}
for key in values.keys():
msg_dict[key] = match.group(positions.get(key))
# Remove whitespace from the start or end of the message
msg_dict['__prefix_id__'] = prefix_id
msg_dict['message'] = msg_dict['message'].strip()
# The pri has to be an int as it is retrived using regex '\<(\d+)\>'
if 'pri' in msg_dict:
msg_dict['facility'] = int(int(msg_dict['pri']) / 8)
msg_dict['severity'] = int(int(msg_dict['pri']) - (msg_dict['facility'] * 8))
return msg_dict | [
"def",
"_identify_prefix",
"(",
"self",
",",
"msg",
",",
"data",
")",
":",
"prefix_id",
"=",
"-",
"1",
"for",
"prefix",
"in",
"data",
":",
"msg_dict",
"=",
"{",
"}",
"prefix_id",
"+=",
"1",
"match",
"=",
"None",
"if",
"'__python_fun__'",
"in",
"prefix",
":",
"log",
".",
"debug",
"(",
"'Trying to match using the %s custom python profiler'",
",",
"prefix",
"[",
"'__python_mod__'",
"]",
")",
"try",
":",
"match",
"=",
"prefix",
"[",
"'__python_fun__'",
"]",
"(",
"msg",
")",
"except",
"Exception",
":",
"log",
".",
"error",
"(",
"'Exception while parsing %s with the %s python profiler'",
",",
"msg",
",",
"prefix",
"[",
"'__python_mod__'",
"]",
",",
"exc_info",
"=",
"True",
")",
"else",
":",
"log",
".",
"debug",
"(",
"'Matching using YAML-defined profiler:'",
")",
"log",
".",
"debug",
"(",
"prefix",
"[",
"'raw_prefix'",
"]",
")",
"match",
"=",
"prefix",
"[",
"'prefix'",
"]",
".",
"search",
"(",
"msg",
")",
"if",
"not",
"match",
":",
"log",
".",
"debug",
"(",
"'Match not found'",
")",
"continue",
"if",
"'__python_fun__'",
"in",
"prefix",
":",
"log",
".",
"debug",
"(",
"'%s matched using the custom python profiler %s'",
",",
"msg",
",",
"prefix",
"[",
"'__python_mod__'",
"]",
")",
"msg_dict",
"=",
"match",
"# the output as-is from the custom function",
"else",
":",
"positions",
"=",
"prefix",
".",
"get",
"(",
"'prefix_positions'",
",",
"{",
"}",
")",
"values",
"=",
"prefix",
".",
"get",
"(",
"'values'",
")",
"msg_dict",
"=",
"{",
"}",
"for",
"key",
"in",
"values",
".",
"keys",
"(",
")",
":",
"msg_dict",
"[",
"key",
"]",
"=",
"match",
".",
"group",
"(",
"positions",
".",
"get",
"(",
"key",
")",
")",
"# Remove whitespace from the start or end of the message",
"msg_dict",
"[",
"'__prefix_id__'",
"]",
"=",
"prefix_id",
"msg_dict",
"[",
"'message'",
"]",
"=",
"msg_dict",
"[",
"'message'",
"]",
".",
"strip",
"(",
")",
"# The pri has to be an int as it is retrived using regex '\\<(\\d+)\\>'",
"if",
"'pri'",
"in",
"msg_dict",
":",
"msg_dict",
"[",
"'facility'",
"]",
"=",
"int",
"(",
"int",
"(",
"msg_dict",
"[",
"'pri'",
"]",
")",
"/",
"8",
")",
"msg_dict",
"[",
"'severity'",
"]",
"=",
"int",
"(",
"int",
"(",
"msg_dict",
"[",
"'pri'",
"]",
")",
"-",
"(",
"msg_dict",
"[",
"'facility'",
"]",
"*",
"8",
")",
")",
"return",
"msg_dict"
] | Check the message again each OS prefix and if matched return the
message dict | [
"Check",
"the",
"message",
"again",
"each",
"OS",
"prefix",
"and",
"if",
"matched",
"return",
"the",
"message",
"dict"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L150-L191 |
637 | napalm-automation/napalm-logs | napalm_logs/server.py | NapalmLogsServerProc._identify_os | def _identify_os(self, msg):
'''
Using the prefix of the syslog message,
we are able to identify the operating system and then continue parsing.
'''
ret = []
for dev_os, data in self.compiled_prefixes.items():
# TODO Should we prevent attepmting to determine the OS for the blacklisted?
# [mircea] I think its good from a logging perspective to know at least that
# that the server found the matching and it tells that it won't be processed
# further. Later, we could potentially add an option to control this.
log.debug('Matching under %s', dev_os)
msg_dict = self._identify_prefix(msg, data)
if msg_dict:
log.debug('Adding %s to list of matched OS', dev_os)
ret.append((dev_os, msg_dict))
else:
log.debug('No match found for %s', dev_os)
if not ret:
log.debug('Not matched any OS, returning original log')
msg_dict = {'message': msg}
ret.append((None, msg_dict))
return ret | python | def _identify_os(self, msg):
'''
Using the prefix of the syslog message,
we are able to identify the operating system and then continue parsing.
'''
ret = []
for dev_os, data in self.compiled_prefixes.items():
# TODO Should we prevent attepmting to determine the OS for the blacklisted?
# [mircea] I think its good from a logging perspective to know at least that
# that the server found the matching and it tells that it won't be processed
# further. Later, we could potentially add an option to control this.
log.debug('Matching under %s', dev_os)
msg_dict = self._identify_prefix(msg, data)
if msg_dict:
log.debug('Adding %s to list of matched OS', dev_os)
ret.append((dev_os, msg_dict))
else:
log.debug('No match found for %s', dev_os)
if not ret:
log.debug('Not matched any OS, returning original log')
msg_dict = {'message': msg}
ret.append((None, msg_dict))
return ret | [
"def",
"_identify_os",
"(",
"self",
",",
"msg",
")",
":",
"ret",
"=",
"[",
"]",
"for",
"dev_os",
",",
"data",
"in",
"self",
".",
"compiled_prefixes",
".",
"items",
"(",
")",
":",
"# TODO Should we prevent attepmting to determine the OS for the blacklisted?",
"# [mircea] I think its good from a logging perspective to know at least that",
"# that the server found the matching and it tells that it won't be processed",
"# further. Later, we could potentially add an option to control this.",
"log",
".",
"debug",
"(",
"'Matching under %s'",
",",
"dev_os",
")",
"msg_dict",
"=",
"self",
".",
"_identify_prefix",
"(",
"msg",
",",
"data",
")",
"if",
"msg_dict",
":",
"log",
".",
"debug",
"(",
"'Adding %s to list of matched OS'",
",",
"dev_os",
")",
"ret",
".",
"append",
"(",
"(",
"dev_os",
",",
"msg_dict",
")",
")",
"else",
":",
"log",
".",
"debug",
"(",
"'No match found for %s'",
",",
"dev_os",
")",
"if",
"not",
"ret",
":",
"log",
".",
"debug",
"(",
"'Not matched any OS, returning original log'",
")",
"msg_dict",
"=",
"{",
"'message'",
":",
"msg",
"}",
"ret",
".",
"append",
"(",
"(",
"None",
",",
"msg_dict",
")",
")",
"return",
"ret"
] | Using the prefix of the syslog message,
we are able to identify the operating system and then continue parsing. | [
"Using",
"the",
"prefix",
"of",
"the",
"syslog",
"message",
"we",
"are",
"able",
"to",
"identify",
"the",
"operating",
"system",
"and",
"then",
"continue",
"parsing",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/server.py#L193-L215 |
638 | napalm-automation/napalm-logs | napalm_logs/device.py | NapalmLogsDeviceProc._setup_ipc | def _setup_ipc(self):
'''
Subscribe to the right topic
in the device IPC and publish to the
publisher proxy.
'''
self.ctx = zmq.Context()
# subscribe to device IPC
log.debug('Creating the dealer IPC for %s', self._name)
self.sub = self.ctx.socket(zmq.DEALER)
if six.PY2:
self.sub.setsockopt(zmq.IDENTITY, self._name)
elif six.PY3:
self.sub.setsockopt(zmq.IDENTITY, bytes(self._name, 'utf-8'))
try:
self.sub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm'])
# subscribe to the corresponding IPC pipe
self.sub.connect(DEV_IPC_URL)
# publish to the publisher IPC
self.pub = self.ctx.socket(zmq.PUB)
self.pub.connect(PUB_PX_IPC_URL)
try:
self.pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) | python | def _setup_ipc(self):
'''
Subscribe to the right topic
in the device IPC and publish to the
publisher proxy.
'''
self.ctx = zmq.Context()
# subscribe to device IPC
log.debug('Creating the dealer IPC for %s', self._name)
self.sub = self.ctx.socket(zmq.DEALER)
if six.PY2:
self.sub.setsockopt(zmq.IDENTITY, self._name)
elif six.PY3:
self.sub.setsockopt(zmq.IDENTITY, bytes(self._name, 'utf-8'))
try:
self.sub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm'])
# subscribe to the corresponding IPC pipe
self.sub.connect(DEV_IPC_URL)
# publish to the publisher IPC
self.pub = self.ctx.socket(zmq.PUB)
self.pub.connect(PUB_PX_IPC_URL)
try:
self.pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) | [
"def",
"_setup_ipc",
"(",
"self",
")",
":",
"self",
".",
"ctx",
"=",
"zmq",
".",
"Context",
"(",
")",
"# subscribe to device IPC",
"log",
".",
"debug",
"(",
"'Creating the dealer IPC for %s'",
",",
"self",
".",
"_name",
")",
"self",
".",
"sub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"DEALER",
")",
"if",
"six",
".",
"PY2",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"IDENTITY",
",",
"self",
".",
"_name",
")",
"elif",
"six",
".",
"PY3",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"IDENTITY",
",",
"bytes",
"(",
"self",
".",
"_name",
",",
"'utf-8'",
")",
")",
"try",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"RCVHWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# subscribe to the corresponding IPC pipe",
"self",
".",
"sub",
".",
"connect",
"(",
"DEV_IPC_URL",
")",
"# publish to the publisher IPC",
"self",
".",
"pub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"PUB",
")",
"self",
".",
"pub",
".",
"connect",
"(",
"PUB_PX_IPC_URL",
")",
"try",
":",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"SNDHWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")"
] | Subscribe to the right topic
in the device IPC and publish to the
publisher proxy. | [
"Subscribe",
"to",
"the",
"right",
"topic",
"in",
"the",
"device",
"IPC",
"and",
"publish",
"to",
"the",
"publisher",
"proxy",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L52-L82 |
639 | napalm-automation/napalm-logs | napalm_logs/device.py | NapalmLogsDeviceProc._compile_messages | def _compile_messages(self):
'''
Create a list of all OS messages and their compiled regexs
'''
self.compiled_messages = []
if not self._config:
return
for message_dict in self._config.get('messages', {}):
error = message_dict['error']
tag = message_dict['tag']
model = message_dict['model']
match_on = message_dict.get('match_on', 'tag')
if '__python_fun__' in message_dict:
self.compiled_messages.append({
'error': error,
'tag': tag,
'match_on': match_on,
'model': model,
'__python_fun__': message_dict['__python_fun__']
})
continue
values = message_dict['values']
line = message_dict['line']
mapping = message_dict['mapping']
# We will now figure out which position each value is in so we can use it with the match statement
position = {}
replace = {}
for key in values.keys():
if '|' in key:
new_key, replace[new_key] = key.replace(' ', '').split('|')
values[new_key] = values.pop(key)
key = new_key
position[line.find('{' + key + '}')] = key
sorted_position = {}
for i, elem in enumerate(sorted(position.items())):
sorted_position[elem[1]] = i + 1
# Escape the line, then remove the escape for the curly bracets so they can be used when formatting
escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}')
# Replace a whitespace with \s+
escaped = escaped.replace(r'\ ', r'\s+')
self.compiled_messages.append(
{
'error': error,
'tag': tag,
'match_on': match_on,
'line': re.compile(escaped.format(**values)),
'positions': sorted_position,
'values': values,
'replace': replace,
'model': model,
'mapping': mapping
}
)
log.debug('Compiled messages:')
log.debug(self.compiled_messages) | python | def _compile_messages(self):
'''
Create a list of all OS messages and their compiled regexs
'''
self.compiled_messages = []
if not self._config:
return
for message_dict in self._config.get('messages', {}):
error = message_dict['error']
tag = message_dict['tag']
model = message_dict['model']
match_on = message_dict.get('match_on', 'tag')
if '__python_fun__' in message_dict:
self.compiled_messages.append({
'error': error,
'tag': tag,
'match_on': match_on,
'model': model,
'__python_fun__': message_dict['__python_fun__']
})
continue
values = message_dict['values']
line = message_dict['line']
mapping = message_dict['mapping']
# We will now figure out which position each value is in so we can use it with the match statement
position = {}
replace = {}
for key in values.keys():
if '|' in key:
new_key, replace[new_key] = key.replace(' ', '').split('|')
values[new_key] = values.pop(key)
key = new_key
position[line.find('{' + key + '}')] = key
sorted_position = {}
for i, elem in enumerate(sorted(position.items())):
sorted_position[elem[1]] = i + 1
# Escape the line, then remove the escape for the curly bracets so they can be used when formatting
escaped = re.escape(line).replace(r'\{', '{').replace(r'\}', '}')
# Replace a whitespace with \s+
escaped = escaped.replace(r'\ ', r'\s+')
self.compiled_messages.append(
{
'error': error,
'tag': tag,
'match_on': match_on,
'line': re.compile(escaped.format(**values)),
'positions': sorted_position,
'values': values,
'replace': replace,
'model': model,
'mapping': mapping
}
)
log.debug('Compiled messages:')
log.debug(self.compiled_messages) | [
"def",
"_compile_messages",
"(",
"self",
")",
":",
"self",
".",
"compiled_messages",
"=",
"[",
"]",
"if",
"not",
"self",
".",
"_config",
":",
"return",
"for",
"message_dict",
"in",
"self",
".",
"_config",
".",
"get",
"(",
"'messages'",
",",
"{",
"}",
")",
":",
"error",
"=",
"message_dict",
"[",
"'error'",
"]",
"tag",
"=",
"message_dict",
"[",
"'tag'",
"]",
"model",
"=",
"message_dict",
"[",
"'model'",
"]",
"match_on",
"=",
"message_dict",
".",
"get",
"(",
"'match_on'",
",",
"'tag'",
")",
"if",
"'__python_fun__'",
"in",
"message_dict",
":",
"self",
".",
"compiled_messages",
".",
"append",
"(",
"{",
"'error'",
":",
"error",
",",
"'tag'",
":",
"tag",
",",
"'match_on'",
":",
"match_on",
",",
"'model'",
":",
"model",
",",
"'__python_fun__'",
":",
"message_dict",
"[",
"'__python_fun__'",
"]",
"}",
")",
"continue",
"values",
"=",
"message_dict",
"[",
"'values'",
"]",
"line",
"=",
"message_dict",
"[",
"'line'",
"]",
"mapping",
"=",
"message_dict",
"[",
"'mapping'",
"]",
"# We will now figure out which position each value is in so we can use it with the match statement",
"position",
"=",
"{",
"}",
"replace",
"=",
"{",
"}",
"for",
"key",
"in",
"values",
".",
"keys",
"(",
")",
":",
"if",
"'|'",
"in",
"key",
":",
"new_key",
",",
"replace",
"[",
"new_key",
"]",
"=",
"key",
".",
"replace",
"(",
"' '",
",",
"''",
")",
".",
"split",
"(",
"'|'",
")",
"values",
"[",
"new_key",
"]",
"=",
"values",
".",
"pop",
"(",
"key",
")",
"key",
"=",
"new_key",
"position",
"[",
"line",
".",
"find",
"(",
"'{'",
"+",
"key",
"+",
"'}'",
")",
"]",
"=",
"key",
"sorted_position",
"=",
"{",
"}",
"for",
"i",
",",
"elem",
"in",
"enumerate",
"(",
"sorted",
"(",
"position",
".",
"items",
"(",
")",
")",
")",
":",
"sorted_position",
"[",
"elem",
"[",
"1",
"]",
"]",
"=",
"i",
"+",
"1",
"# Escape the line, then remove the escape for the curly bracets so they can be used when formatting",
"escaped",
"=",
"re",
".",
"escape",
"(",
"line",
")",
".",
"replace",
"(",
"r'\\{'",
",",
"'{'",
")",
".",
"replace",
"(",
"r'\\}'",
",",
"'}'",
")",
"# Replace a whitespace with \\s+",
"escaped",
"=",
"escaped",
".",
"replace",
"(",
"r'\\ '",
",",
"r'\\s+'",
")",
"self",
".",
"compiled_messages",
".",
"append",
"(",
"{",
"'error'",
":",
"error",
",",
"'tag'",
":",
"tag",
",",
"'match_on'",
":",
"match_on",
",",
"'line'",
":",
"re",
".",
"compile",
"(",
"escaped",
".",
"format",
"(",
"*",
"*",
"values",
")",
")",
",",
"'positions'",
":",
"sorted_position",
",",
"'values'",
":",
"values",
",",
"'replace'",
":",
"replace",
",",
"'model'",
":",
"model",
",",
"'mapping'",
":",
"mapping",
"}",
")",
"log",
".",
"debug",
"(",
"'Compiled messages:'",
")",
"log",
".",
"debug",
"(",
"self",
".",
"compiled_messages",
")"
] | Create a list of all OS messages and their compiled regexs | [
"Create",
"a",
"list",
"of",
"all",
"OS",
"messages",
"and",
"their",
"compiled",
"regexs"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L84-L138 |
640 | napalm-automation/napalm-logs | napalm_logs/device.py | NapalmLogsDeviceProc._parse | def _parse(self, msg_dict):
'''
Parse a syslog message and check what OpenConfig object should
be generated.
'''
error_present = False
# log.debug('Matching the message:')
# log.debug(msg_dict)
for message in self.compiled_messages:
# log.debug('Matching using:')
# log.debug(message)
match_on = message['match_on']
if match_on not in msg_dict:
# log.debug('%s is not a valid key in the partially parsed dict', match_on)
continue
if message['tag'] != msg_dict[match_on]:
continue
if '__python_fun__' in message:
return {
'model': message['model'],
'error': message['error'],
'__python_fun__': message['__python_fun__']
}
error_present = True
match = message['line'].search(msg_dict['message'])
if not match:
continue
positions = message.get('positions', {})
values = message.get('values')
ret = {
'model': message['model'],
'mapping': message['mapping'],
'replace': message['replace'],
'error': message['error']
}
for key in values.keys():
# Check if the value needs to be replaced
if key in message['replace']:
result = napalm_logs.utils.cast(match.group(positions.get(key)), message['replace'][key])
else:
result = match.group(positions.get(key))
ret[key] = result
return ret
if error_present is True:
log.info('Configured regex did not match for os: %s tag %s', self._name, msg_dict.get('tag', ''))
else:
log.info('Syslog message not configured for os: %s tag %s', self._name, msg_dict.get('tag', '')) | python | def _parse(self, msg_dict):
'''
Parse a syslog message and check what OpenConfig object should
be generated.
'''
error_present = False
# log.debug('Matching the message:')
# log.debug(msg_dict)
for message in self.compiled_messages:
# log.debug('Matching using:')
# log.debug(message)
match_on = message['match_on']
if match_on not in msg_dict:
# log.debug('%s is not a valid key in the partially parsed dict', match_on)
continue
if message['tag'] != msg_dict[match_on]:
continue
if '__python_fun__' in message:
return {
'model': message['model'],
'error': message['error'],
'__python_fun__': message['__python_fun__']
}
error_present = True
match = message['line'].search(msg_dict['message'])
if not match:
continue
positions = message.get('positions', {})
values = message.get('values')
ret = {
'model': message['model'],
'mapping': message['mapping'],
'replace': message['replace'],
'error': message['error']
}
for key in values.keys():
# Check if the value needs to be replaced
if key in message['replace']:
result = napalm_logs.utils.cast(match.group(positions.get(key)), message['replace'][key])
else:
result = match.group(positions.get(key))
ret[key] = result
return ret
if error_present is True:
log.info('Configured regex did not match for os: %s tag %s', self._name, msg_dict.get('tag', ''))
else:
log.info('Syslog message not configured for os: %s tag %s', self._name, msg_dict.get('tag', '')) | [
"def",
"_parse",
"(",
"self",
",",
"msg_dict",
")",
":",
"error_present",
"=",
"False",
"# log.debug('Matching the message:')",
"# log.debug(msg_dict)",
"for",
"message",
"in",
"self",
".",
"compiled_messages",
":",
"# log.debug('Matching using:')",
"# log.debug(message)",
"match_on",
"=",
"message",
"[",
"'match_on'",
"]",
"if",
"match_on",
"not",
"in",
"msg_dict",
":",
"# log.debug('%s is not a valid key in the partially parsed dict', match_on)",
"continue",
"if",
"message",
"[",
"'tag'",
"]",
"!=",
"msg_dict",
"[",
"match_on",
"]",
":",
"continue",
"if",
"'__python_fun__'",
"in",
"message",
":",
"return",
"{",
"'model'",
":",
"message",
"[",
"'model'",
"]",
",",
"'error'",
":",
"message",
"[",
"'error'",
"]",
",",
"'__python_fun__'",
":",
"message",
"[",
"'__python_fun__'",
"]",
"}",
"error_present",
"=",
"True",
"match",
"=",
"message",
"[",
"'line'",
"]",
".",
"search",
"(",
"msg_dict",
"[",
"'message'",
"]",
")",
"if",
"not",
"match",
":",
"continue",
"positions",
"=",
"message",
".",
"get",
"(",
"'positions'",
",",
"{",
"}",
")",
"values",
"=",
"message",
".",
"get",
"(",
"'values'",
")",
"ret",
"=",
"{",
"'model'",
":",
"message",
"[",
"'model'",
"]",
",",
"'mapping'",
":",
"message",
"[",
"'mapping'",
"]",
",",
"'replace'",
":",
"message",
"[",
"'replace'",
"]",
",",
"'error'",
":",
"message",
"[",
"'error'",
"]",
"}",
"for",
"key",
"in",
"values",
".",
"keys",
"(",
")",
":",
"# Check if the value needs to be replaced",
"if",
"key",
"in",
"message",
"[",
"'replace'",
"]",
":",
"result",
"=",
"napalm_logs",
".",
"utils",
".",
"cast",
"(",
"match",
".",
"group",
"(",
"positions",
".",
"get",
"(",
"key",
")",
")",
",",
"message",
"[",
"'replace'",
"]",
"[",
"key",
"]",
")",
"else",
":",
"result",
"=",
"match",
".",
"group",
"(",
"positions",
".",
"get",
"(",
"key",
")",
")",
"ret",
"[",
"key",
"]",
"=",
"result",
"return",
"ret",
"if",
"error_present",
"is",
"True",
":",
"log",
".",
"info",
"(",
"'Configured regex did not match for os: %s tag %s'",
",",
"self",
".",
"_name",
",",
"msg_dict",
".",
"get",
"(",
"'tag'",
",",
"''",
")",
")",
"else",
":",
"log",
".",
"info",
"(",
"'Syslog message not configured for os: %s tag %s'",
",",
"self",
".",
"_name",
",",
"msg_dict",
".",
"get",
"(",
"'tag'",
",",
"''",
")",
")"
] | Parse a syslog message and check what OpenConfig object should
be generated. | [
"Parse",
"a",
"syslog",
"message",
"and",
"check",
"what",
"OpenConfig",
"object",
"should",
"be",
"generated",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L140-L186 |
641 | napalm-automation/napalm-logs | napalm_logs/device.py | NapalmLogsDeviceProc._emit | def _emit(self, **kwargs):
'''
Emit an OpenConfig object given a certain combination of
fields mappeed in the config to the corresponding hierarchy.
'''
oc_dict = {}
for mapping, result_key in kwargs['mapping']['variables'].items():
result = kwargs[result_key]
oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict)
for mapping, result in kwargs['mapping']['static'].items():
oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict)
return oc_dict | python | def _emit(self, **kwargs):
'''
Emit an OpenConfig object given a certain combination of
fields mappeed in the config to the corresponding hierarchy.
'''
oc_dict = {}
for mapping, result_key in kwargs['mapping']['variables'].items():
result = kwargs[result_key]
oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict)
for mapping, result in kwargs['mapping']['static'].items():
oc_dict = napalm_logs.utils.setval(mapping.format(**kwargs), result, oc_dict)
return oc_dict | [
"def",
"_emit",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"oc_dict",
"=",
"{",
"}",
"for",
"mapping",
",",
"result_key",
"in",
"kwargs",
"[",
"'mapping'",
"]",
"[",
"'variables'",
"]",
".",
"items",
"(",
")",
":",
"result",
"=",
"kwargs",
"[",
"result_key",
"]",
"oc_dict",
"=",
"napalm_logs",
".",
"utils",
".",
"setval",
"(",
"mapping",
".",
"format",
"(",
"*",
"*",
"kwargs",
")",
",",
"result",
",",
"oc_dict",
")",
"for",
"mapping",
",",
"result",
"in",
"kwargs",
"[",
"'mapping'",
"]",
"[",
"'static'",
"]",
".",
"items",
"(",
")",
":",
"oc_dict",
"=",
"napalm_logs",
".",
"utils",
".",
"setval",
"(",
"mapping",
".",
"format",
"(",
"*",
"*",
"kwargs",
")",
",",
"result",
",",
"oc_dict",
")",
"return",
"oc_dict"
] | Emit an OpenConfig object given a certain combination of
fields mappeed in the config to the corresponding hierarchy. | [
"Emit",
"an",
"OpenConfig",
"object",
"given",
"a",
"certain",
"combination",
"of",
"fields",
"mappeed",
"in",
"the",
"config",
"to",
"the",
"corresponding",
"hierarchy",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L188-L200 |
642 | napalm-automation/napalm-logs | napalm_logs/device.py | NapalmLogsDeviceProc._publish | def _publish(self, obj):
'''
Publish the OC object.
'''
bin_obj = umsgpack.packb(obj)
self.pub.send(bin_obj) | python | def _publish(self, obj):
'''
Publish the OC object.
'''
bin_obj = umsgpack.packb(obj)
self.pub.send(bin_obj) | [
"def",
"_publish",
"(",
"self",
",",
"obj",
")",
":",
"bin_obj",
"=",
"umsgpack",
".",
"packb",
"(",
"obj",
")",
"self",
".",
"pub",
".",
"send",
"(",
"bin_obj",
")"
] | Publish the OC object. | [
"Publish",
"the",
"OC",
"object",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/device.py#L202-L207 |
643 | napalm-automation/napalm-logs | napalm_logs/auth.py | NapalmLogsAuthProc._handshake | def _handshake(self, conn, addr):
'''
Ensures that the client receives the AES key.
'''
# waiting for the magic request message
msg = conn.recv(len(MAGIC_REQ))
log.debug('Received message %s from %s', msg, addr)
if msg != MAGIC_REQ:
log.warning('%s is not a valid REQ message from %s', msg, addr)
return
log.debug('Sending the private key')
conn.send(self.__key)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return
log.debug('Sending the signature key')
conn.send(self.__sgn)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return
log.info('%s is now authenticated', addr)
self.keep_alive(conn) | python | def _handshake(self, conn, addr):
'''
Ensures that the client receives the AES key.
'''
# waiting for the magic request message
msg = conn.recv(len(MAGIC_REQ))
log.debug('Received message %s from %s', msg, addr)
if msg != MAGIC_REQ:
log.warning('%s is not a valid REQ message from %s', msg, addr)
return
log.debug('Sending the private key')
conn.send(self.__key)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return
log.debug('Sending the signature key')
conn.send(self.__sgn)
# wait for explicit ACK
log.debug('Waiting for the client to confirm')
msg = conn.recv(len(MAGIC_ACK))
if msg != MAGIC_ACK:
return
log.info('%s is now authenticated', addr)
self.keep_alive(conn) | [
"def",
"_handshake",
"(",
"self",
",",
"conn",
",",
"addr",
")",
":",
"# waiting for the magic request message",
"msg",
"=",
"conn",
".",
"recv",
"(",
"len",
"(",
"MAGIC_REQ",
")",
")",
"log",
".",
"debug",
"(",
"'Received message %s from %s'",
",",
"msg",
",",
"addr",
")",
"if",
"msg",
"!=",
"MAGIC_REQ",
":",
"log",
".",
"warning",
"(",
"'%s is not a valid REQ message from %s'",
",",
"msg",
",",
"addr",
")",
"return",
"log",
".",
"debug",
"(",
"'Sending the private key'",
")",
"conn",
".",
"send",
"(",
"self",
".",
"__key",
")",
"# wait for explicit ACK",
"log",
".",
"debug",
"(",
"'Waiting for the client to confirm'",
")",
"msg",
"=",
"conn",
".",
"recv",
"(",
"len",
"(",
"MAGIC_ACK",
")",
")",
"if",
"msg",
"!=",
"MAGIC_ACK",
":",
"return",
"log",
".",
"debug",
"(",
"'Sending the signature key'",
")",
"conn",
".",
"send",
"(",
"self",
".",
"__sgn",
")",
"# wait for explicit ACK",
"log",
".",
"debug",
"(",
"'Waiting for the client to confirm'",
")",
"msg",
"=",
"conn",
".",
"recv",
"(",
"len",
"(",
"MAGIC_ACK",
")",
")",
"if",
"msg",
"!=",
"MAGIC_ACK",
":",
"return",
"log",
".",
"info",
"(",
"'%s is now authenticated'",
",",
"addr",
")",
"self",
".",
"keep_alive",
"(",
"conn",
")"
] | Ensures that the client receives the AES key. | [
"Ensures",
"that",
"the",
"client",
"receives",
"the",
"AES",
"key",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L79-L104 |
644 | napalm-automation/napalm-logs | napalm_logs/auth.py | NapalmLogsAuthProc.keep_alive | def keep_alive(self, conn):
'''
Maintains auth sessions
'''
while self.__up:
msg = conn.recv(len(AUTH_KEEP_ALIVE))
if msg != AUTH_KEEP_ALIVE:
log.error('Received something other than %s', AUTH_KEEP_ALIVE)
conn.close()
return
try:
conn.send(AUTH_KEEP_ALIVE_ACK)
except (IOError, socket.error) as err:
log.error('Unable to send auth keep alive: %s', err)
conn.close()
return | python | def keep_alive(self, conn):
'''
Maintains auth sessions
'''
while self.__up:
msg = conn.recv(len(AUTH_KEEP_ALIVE))
if msg != AUTH_KEEP_ALIVE:
log.error('Received something other than %s', AUTH_KEEP_ALIVE)
conn.close()
return
try:
conn.send(AUTH_KEEP_ALIVE_ACK)
except (IOError, socket.error) as err:
log.error('Unable to send auth keep alive: %s', err)
conn.close()
return | [
"def",
"keep_alive",
"(",
"self",
",",
"conn",
")",
":",
"while",
"self",
".",
"__up",
":",
"msg",
"=",
"conn",
".",
"recv",
"(",
"len",
"(",
"AUTH_KEEP_ALIVE",
")",
")",
"if",
"msg",
"!=",
"AUTH_KEEP_ALIVE",
":",
"log",
".",
"error",
"(",
"'Received something other than %s'",
",",
"AUTH_KEEP_ALIVE",
")",
"conn",
".",
"close",
"(",
")",
"return",
"try",
":",
"conn",
".",
"send",
"(",
"AUTH_KEEP_ALIVE_ACK",
")",
"except",
"(",
"IOError",
",",
"socket",
".",
"error",
")",
"as",
"err",
":",
"log",
".",
"error",
"(",
"'Unable to send auth keep alive: %s'",
",",
"err",
")",
"conn",
".",
"close",
"(",
")",
"return"
] | Maintains auth sessions | [
"Maintains",
"auth",
"sessions"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L106-L121 |
645 | napalm-automation/napalm-logs | napalm_logs/auth.py | NapalmLogsAuthProc.verify_cert | def verify_cert(self):
'''
Checks that the provided cert and key are valid and usable
'''
log.debug('Verifying the %s certificate, keyfile: %s',
self.certificate, self.keyfile)
try:
ssl.create_default_context().load_cert_chain(self.certificate, keyfile=self.keyfile)
except ssl.SSLError:
error_string = 'SSL certificate and key do not match'
log.error(error_string)
raise SSLMismatchException(error_string)
except IOError:
log.error('Unable to open either certificate or key file')
raise
log.debug('Certificate looks good.') | python | def verify_cert(self):
'''
Checks that the provided cert and key are valid and usable
'''
log.debug('Verifying the %s certificate, keyfile: %s',
self.certificate, self.keyfile)
try:
ssl.create_default_context().load_cert_chain(self.certificate, keyfile=self.keyfile)
except ssl.SSLError:
error_string = 'SSL certificate and key do not match'
log.error(error_string)
raise SSLMismatchException(error_string)
except IOError:
log.error('Unable to open either certificate or key file')
raise
log.debug('Certificate looks good.') | [
"def",
"verify_cert",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Verifying the %s certificate, keyfile: %s'",
",",
"self",
".",
"certificate",
",",
"self",
".",
"keyfile",
")",
"try",
":",
"ssl",
".",
"create_default_context",
"(",
")",
".",
"load_cert_chain",
"(",
"self",
".",
"certificate",
",",
"keyfile",
"=",
"self",
".",
"keyfile",
")",
"except",
"ssl",
".",
"SSLError",
":",
"error_string",
"=",
"'SSL certificate and key do not match'",
"log",
".",
"error",
"(",
"error_string",
")",
"raise",
"SSLMismatchException",
"(",
"error_string",
")",
"except",
"IOError",
":",
"log",
".",
"error",
"(",
"'Unable to open either certificate or key file'",
")",
"raise",
"log",
".",
"debug",
"(",
"'Certificate looks good.'",
")"
] | Checks that the provided cert and key are valid and usable | [
"Checks",
"that",
"the",
"provided",
"cert",
"and",
"key",
"are",
"valid",
"and",
"usable"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L123-L138 |
646 | napalm-automation/napalm-logs | napalm_logs/auth.py | NapalmLogsAuthProc._create_skt | def _create_skt(self):
'''
Create the authentication socket.
'''
log.debug('Creating the auth socket')
if ':' in self.auth_address:
self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.socket.bind((self.auth_address, self.auth_port))
except socket.error as msg:
error_string = 'Unable to bind (auth) to port {} on {}: {}'.format(self.auth_port, self.auth_address, msg)
log.error(error_string, exc_info=True)
raise BindException(error_string) | python | def _create_skt(self):
'''
Create the authentication socket.
'''
log.debug('Creating the auth socket')
if ':' in self.auth_address:
self.socket = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.socket.bind((self.auth_address, self.auth_port))
except socket.error as msg:
error_string = 'Unable to bind (auth) to port {} on {}: {}'.format(self.auth_port, self.auth_address, msg)
log.error(error_string, exc_info=True)
raise BindException(error_string) | [
"def",
"_create_skt",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Creating the auth socket'",
")",
"if",
"':'",
"in",
"self",
".",
"auth_address",
":",
"self",
".",
"socket",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET6",
",",
"socket",
".",
"SOCK_STREAM",
")",
"else",
":",
"self",
".",
"socket",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_STREAM",
")",
"try",
":",
"self",
".",
"socket",
".",
"bind",
"(",
"(",
"self",
".",
"auth_address",
",",
"self",
".",
"auth_port",
")",
")",
"except",
"socket",
".",
"error",
"as",
"msg",
":",
"error_string",
"=",
"'Unable to bind (auth) to port {} on {}: {}'",
".",
"format",
"(",
"self",
".",
"auth_port",
",",
"self",
".",
"auth_address",
",",
"msg",
")",
"log",
".",
"error",
"(",
"error_string",
",",
"exc_info",
"=",
"True",
")",
"raise",
"BindException",
"(",
"error_string",
")"
] | Create the authentication socket. | [
"Create",
"the",
"authentication",
"socket",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L140-L154 |
647 | napalm-automation/napalm-logs | napalm_logs/auth.py | NapalmLogsAuthProc.start | def start(self):
'''
Listen to auth requests and send the AES key.
Each client connection starts a new thread.
'''
# Start suicide polling thread
log.debug('Starting the auth process')
self.verify_cert()
self._create_skt()
log.debug('The auth process can receive at most %d parallel connections', AUTH_MAX_CONN)
self.socket.listen(AUTH_MAX_CONN)
thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))
thread.start()
signal.signal(signal.SIGTERM, self._exit_gracefully)
self.__up = True
while self.__up:
try:
(clientsocket, address) = self.socket.accept()
wrapped_auth_skt = ssl.wrap_socket(clientsocket,
server_side=True,
certfile=self.certificate,
keyfile=self.keyfile)
except ssl.SSLError:
log.exception('SSL error', exc_info=True)
continue
except socket.error as error:
if self.__up is False:
return
else:
msg = 'Received auth socket error: {}'.format(error)
log.error(msg, exc_info=True)
raise NapalmLogsExit(msg)
log.info('%s connected', address)
log.debug('Starting the handshake')
client_thread = threading.Thread(target=self._handshake,
args=(wrapped_auth_skt, address))
client_thread.start() | python | def start(self):
'''
Listen to auth requests and send the AES key.
Each client connection starts a new thread.
'''
# Start suicide polling thread
log.debug('Starting the auth process')
self.verify_cert()
self._create_skt()
log.debug('The auth process can receive at most %d parallel connections', AUTH_MAX_CONN)
self.socket.listen(AUTH_MAX_CONN)
thread = threading.Thread(target=self._suicide_when_without_parent, args=(os.getppid(),))
thread.start()
signal.signal(signal.SIGTERM, self._exit_gracefully)
self.__up = True
while self.__up:
try:
(clientsocket, address) = self.socket.accept()
wrapped_auth_skt = ssl.wrap_socket(clientsocket,
server_side=True,
certfile=self.certificate,
keyfile=self.keyfile)
except ssl.SSLError:
log.exception('SSL error', exc_info=True)
continue
except socket.error as error:
if self.__up is False:
return
else:
msg = 'Received auth socket error: {}'.format(error)
log.error(msg, exc_info=True)
raise NapalmLogsExit(msg)
log.info('%s connected', address)
log.debug('Starting the handshake')
client_thread = threading.Thread(target=self._handshake,
args=(wrapped_auth_skt, address))
client_thread.start() | [
"def",
"start",
"(",
"self",
")",
":",
"# Start suicide polling thread",
"log",
".",
"debug",
"(",
"'Starting the auth process'",
")",
"self",
".",
"verify_cert",
"(",
")",
"self",
".",
"_create_skt",
"(",
")",
"log",
".",
"debug",
"(",
"'The auth process can receive at most %d parallel connections'",
",",
"AUTH_MAX_CONN",
")",
"self",
".",
"socket",
".",
"listen",
"(",
"AUTH_MAX_CONN",
")",
"thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_suicide_when_without_parent",
",",
"args",
"=",
"(",
"os",
".",
"getppid",
"(",
")",
",",
")",
")",
"thread",
".",
"start",
"(",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"self",
".",
"_exit_gracefully",
")",
"self",
".",
"__up",
"=",
"True",
"while",
"self",
".",
"__up",
":",
"try",
":",
"(",
"clientsocket",
",",
"address",
")",
"=",
"self",
".",
"socket",
".",
"accept",
"(",
")",
"wrapped_auth_skt",
"=",
"ssl",
".",
"wrap_socket",
"(",
"clientsocket",
",",
"server_side",
"=",
"True",
",",
"certfile",
"=",
"self",
".",
"certificate",
",",
"keyfile",
"=",
"self",
".",
"keyfile",
")",
"except",
"ssl",
".",
"SSLError",
":",
"log",
".",
"exception",
"(",
"'SSL error'",
",",
"exc_info",
"=",
"True",
")",
"continue",
"except",
"socket",
".",
"error",
"as",
"error",
":",
"if",
"self",
".",
"__up",
"is",
"False",
":",
"return",
"else",
":",
"msg",
"=",
"'Received auth socket error: {}'",
".",
"format",
"(",
"error",
")",
"log",
".",
"error",
"(",
"msg",
",",
"exc_info",
"=",
"True",
")",
"raise",
"NapalmLogsExit",
"(",
"msg",
")",
"log",
".",
"info",
"(",
"'%s connected'",
",",
"address",
")",
"log",
".",
"debug",
"(",
"'Starting the handshake'",
")",
"client_thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_handshake",
",",
"args",
"=",
"(",
"wrapped_auth_skt",
",",
"address",
")",
")",
"client_thread",
".",
"start",
"(",
")"
] | Listen to auth requests and send the AES key.
Each client connection starts a new thread. | [
"Listen",
"to",
"auth",
"requests",
"and",
"send",
"the",
"AES",
"key",
".",
"Each",
"client",
"connection",
"starts",
"a",
"new",
"thread",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L156-L192 |
648 | napalm-automation/napalm-logs | napalm_logs/auth.py | NapalmLogsAuthProc.stop | def stop(self):
'''
Stop the auth proc.
'''
log.info('Stopping auth process')
self.__up = False
self.socket.close() | python | def stop(self):
'''
Stop the auth proc.
'''
log.info('Stopping auth process')
self.__up = False
self.socket.close() | [
"def",
"stop",
"(",
"self",
")",
":",
"log",
".",
"info",
"(",
"'Stopping auth process'",
")",
"self",
".",
"__up",
"=",
"False",
"self",
".",
"socket",
".",
"close",
"(",
")"
] | Stop the auth proc. | [
"Stop",
"the",
"auth",
"proc",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/auth.py#L194-L200 |
649 | napalm-automation/napalm-logs | napalm_logs/listener/kafka.py | KafkaListener.start | def start(self):
'''
Startup the kafka consumer.
'''
log.debug('Creating the consumer using the bootstrap servers: %s and the group ID: %s',
self.bootstrap_servers,
self.group_id)
try:
self.consumer = kafka.KafkaConsumer(bootstrap_servers=self.bootstrap_servers,
group_id=self.group_id)
except kafka.errors.NoBrokersAvailable as err:
log.error(err, exc_info=True)
raise ListenerException(err)
log.debug('Subscribing to the %s topic', self.topic)
self.consumer.subscribe(topics=[self.topic]) | python | def start(self):
'''
Startup the kafka consumer.
'''
log.debug('Creating the consumer using the bootstrap servers: %s and the group ID: %s',
self.bootstrap_servers,
self.group_id)
try:
self.consumer = kafka.KafkaConsumer(bootstrap_servers=self.bootstrap_servers,
group_id=self.group_id)
except kafka.errors.NoBrokersAvailable as err:
log.error(err, exc_info=True)
raise ListenerException(err)
log.debug('Subscribing to the %s topic', self.topic)
self.consumer.subscribe(topics=[self.topic]) | [
"def",
"start",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Creating the consumer using the bootstrap servers: %s and the group ID: %s'",
",",
"self",
".",
"bootstrap_servers",
",",
"self",
".",
"group_id",
")",
"try",
":",
"self",
".",
"consumer",
"=",
"kafka",
".",
"KafkaConsumer",
"(",
"bootstrap_servers",
"=",
"self",
".",
"bootstrap_servers",
",",
"group_id",
"=",
"self",
".",
"group_id",
")",
"except",
"kafka",
".",
"errors",
".",
"NoBrokersAvailable",
"as",
"err",
":",
"log",
".",
"error",
"(",
"err",
",",
"exc_info",
"=",
"True",
")",
"raise",
"ListenerException",
"(",
"err",
")",
"log",
".",
"debug",
"(",
"'Subscribing to the %s topic'",
",",
"self",
".",
"topic",
")",
"self",
".",
"consumer",
".",
"subscribe",
"(",
"topics",
"=",
"[",
"self",
".",
"topic",
"]",
")"
] | Startup the kafka consumer. | [
"Startup",
"the",
"kafka",
"consumer",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/kafka.py#L41-L55 |
650 | napalm-automation/napalm-logs | napalm_logs/listener/kafka.py | KafkaListener.stop | def stop(self):
'''
Shutdown kafka consumer.
'''
log.info('Stopping te kafka listener class')
self.consumer.unsubscribe()
self.consumer.close() | python | def stop(self):
'''
Shutdown kafka consumer.
'''
log.info('Stopping te kafka listener class')
self.consumer.unsubscribe()
self.consumer.close() | [
"def",
"stop",
"(",
"self",
")",
":",
"log",
".",
"info",
"(",
"'Stopping te kafka listener class'",
")",
"self",
".",
"consumer",
".",
"unsubscribe",
"(",
")",
"self",
".",
"consumer",
".",
"close",
"(",
")"
] | Shutdown kafka consumer. | [
"Shutdown",
"kafka",
"consumer",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/kafka.py#L76-L82 |
651 | napalm-automation/napalm-logs | napalm_logs/transport/__init__.py | get_transport | def get_transport(name):
'''
Return the transport class.
'''
try:
log.debug('Using %s as transport', name)
return TRANSPORT_LOOKUP[name]
except KeyError:
msg = 'Transport {} is not available. Are the dependencies installed?'.format(name)
log.error(msg, exc_info=True)
raise InvalidTransportException(msg) | python | def get_transport(name):
'''
Return the transport class.
'''
try:
log.debug('Using %s as transport', name)
return TRANSPORT_LOOKUP[name]
except KeyError:
msg = 'Transport {} is not available. Are the dependencies installed?'.format(name)
log.error(msg, exc_info=True)
raise InvalidTransportException(msg) | [
"def",
"get_transport",
"(",
"name",
")",
":",
"try",
":",
"log",
".",
"debug",
"(",
"'Using %s as transport'",
",",
"name",
")",
"return",
"TRANSPORT_LOOKUP",
"[",
"name",
"]",
"except",
"KeyError",
":",
"msg",
"=",
"'Transport {} is not available. Are the dependencies installed?'",
".",
"format",
"(",
"name",
")",
"log",
".",
"error",
"(",
"msg",
",",
"exc_info",
"=",
"True",
")",
"raise",
"InvalidTransportException",
"(",
"msg",
")"
] | Return the transport class. | [
"Return",
"the",
"transport",
"class",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/transport/__init__.py#L50-L60 |
652 | napalm-automation/napalm-logs | napalm_logs/listener/zeromq.py | ZMQListener.start | def start(self):
'''
Startup the zmq consumer.
'''
zmq_uri = '{protocol}://{address}:{port}'.format(
protocol=self.protocol,
address=self.address,
port=self.port
) if self.port else\
'{protocol}://{address}'.format( # noqa
protocol=self.protocol,
address=self.address
)
log.debug('ZMQ URI: %s', zmq_uri)
self.ctx = zmq.Context()
if hasattr(zmq, self.type):
skt_type = getattr(zmq, self.type)
else:
skt_type = zmq.PULL
self.sub = self.ctx.socket(skt_type)
self.sub.connect(zmq_uri)
if self.hwm is not None:
try:
self.sub.setsockopt(zmq.HWM, self.hwm)
except AttributeError:
self.sub.setsockopt(zmq.RCVHWM, self.hwm)
if self.recvtimeout is not None:
log.debug('Setting RCVTIMEO to %d', self.recvtimeout)
self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout)
if self.keepalive is not None:
log.debug('Setting TCP_KEEPALIVE to %d', self.keepalive)
self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive)
if self.keepalive_idle is not None:
log.debug('Setting TCP_KEEPALIVE_IDLE to %d', self.keepalive_idle)
self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle)
if self.keepalive_interval is not None:
log.debug('Setting TCP_KEEPALIVE_INTVL to %d', self.keepalive_interval)
self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval) | python | def start(self):
'''
Startup the zmq consumer.
'''
zmq_uri = '{protocol}://{address}:{port}'.format(
protocol=self.protocol,
address=self.address,
port=self.port
) if self.port else\
'{protocol}://{address}'.format( # noqa
protocol=self.protocol,
address=self.address
)
log.debug('ZMQ URI: %s', zmq_uri)
self.ctx = zmq.Context()
if hasattr(zmq, self.type):
skt_type = getattr(zmq, self.type)
else:
skt_type = zmq.PULL
self.sub = self.ctx.socket(skt_type)
self.sub.connect(zmq_uri)
if self.hwm is not None:
try:
self.sub.setsockopt(zmq.HWM, self.hwm)
except AttributeError:
self.sub.setsockopt(zmq.RCVHWM, self.hwm)
if self.recvtimeout is not None:
log.debug('Setting RCVTIMEO to %d', self.recvtimeout)
self.sub.setsockopt(zmq.RCVTIMEO, self.recvtimeout)
if self.keepalive is not None:
log.debug('Setting TCP_KEEPALIVE to %d', self.keepalive)
self.sub.setsockopt(zmq.TCP_KEEPALIVE, self.keepalive)
if self.keepalive_idle is not None:
log.debug('Setting TCP_KEEPALIVE_IDLE to %d', self.keepalive_idle)
self.sub.setsockopt(zmq.TCP_KEEPALIVE_IDLE, self.keepalive_idle)
if self.keepalive_interval is not None:
log.debug('Setting TCP_KEEPALIVE_INTVL to %d', self.keepalive_interval)
self.sub.setsockopt(zmq.TCP_KEEPALIVE_INTVL, self.keepalive_interval) | [
"def",
"start",
"(",
"self",
")",
":",
"zmq_uri",
"=",
"'{protocol}://{address}:{port}'",
".",
"format",
"(",
"protocol",
"=",
"self",
".",
"protocol",
",",
"address",
"=",
"self",
".",
"address",
",",
"port",
"=",
"self",
".",
"port",
")",
"if",
"self",
".",
"port",
"else",
"'{protocol}://{address}'",
".",
"format",
"(",
"# noqa",
"protocol",
"=",
"self",
".",
"protocol",
",",
"address",
"=",
"self",
".",
"address",
")",
"log",
".",
"debug",
"(",
"'ZMQ URI: %s'",
",",
"zmq_uri",
")",
"self",
".",
"ctx",
"=",
"zmq",
".",
"Context",
"(",
")",
"if",
"hasattr",
"(",
"zmq",
",",
"self",
".",
"type",
")",
":",
"skt_type",
"=",
"getattr",
"(",
"zmq",
",",
"self",
".",
"type",
")",
"else",
":",
"skt_type",
"=",
"zmq",
".",
"PULL",
"self",
".",
"sub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"skt_type",
")",
"self",
".",
"sub",
".",
"connect",
"(",
"zmq_uri",
")",
"if",
"self",
".",
"hwm",
"is",
"not",
"None",
":",
"try",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"hwm",
")",
"except",
"AttributeError",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"RCVHWM",
",",
"self",
".",
"hwm",
")",
"if",
"self",
".",
"recvtimeout",
"is",
"not",
"None",
":",
"log",
".",
"debug",
"(",
"'Setting RCVTIMEO to %d'",
",",
"self",
".",
"recvtimeout",
")",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"RCVTIMEO",
",",
"self",
".",
"recvtimeout",
")",
"if",
"self",
".",
"keepalive",
"is",
"not",
"None",
":",
"log",
".",
"debug",
"(",
"'Setting TCP_KEEPALIVE to %d'",
",",
"self",
".",
"keepalive",
")",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"TCP_KEEPALIVE",
",",
"self",
".",
"keepalive",
")",
"if",
"self",
".",
"keepalive_idle",
"is",
"not",
"None",
":",
"log",
".",
"debug",
"(",
"'Setting TCP_KEEPALIVE_IDLE to %d'",
",",
"self",
".",
"keepalive_idle",
")",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"TCP_KEEPALIVE_IDLE",
",",
"self",
".",
"keepalive_idle",
")",
"if",
"self",
".",
"keepalive_interval",
"is",
"not",
"None",
":",
"log",
".",
"debug",
"(",
"'Setting TCP_KEEPALIVE_INTVL to %d'",
",",
"self",
".",
"keepalive_interval",
")",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"TCP_KEEPALIVE_INTVL",
",",
"self",
".",
"keepalive_interval",
")"
] | Startup the zmq consumer. | [
"Startup",
"the",
"zmq",
"consumer",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/zeromq.py#L45-L82 |
653 | napalm-automation/napalm-logs | napalm_logs/listener/zeromq.py | ZMQListener.receive | def receive(self):
'''
Return the message received.
..note::
In ZMQ we are unable to get the address where we got the message from.
'''
try:
msg = self.sub.recv()
except zmq.Again as error:
log.error('Unable to receive messages: %s', error, exc_info=True)
raise ListenerException(error)
log.debug('[%s] Received %s', time.time(), msg)
return msg, '' | python | def receive(self):
'''
Return the message received.
..note::
In ZMQ we are unable to get the address where we got the message from.
'''
try:
msg = self.sub.recv()
except zmq.Again as error:
log.error('Unable to receive messages: %s', error, exc_info=True)
raise ListenerException(error)
log.debug('[%s] Received %s', time.time(), msg)
return msg, '' | [
"def",
"receive",
"(",
"self",
")",
":",
"try",
":",
"msg",
"=",
"self",
".",
"sub",
".",
"recv",
"(",
")",
"except",
"zmq",
".",
"Again",
"as",
"error",
":",
"log",
".",
"error",
"(",
"'Unable to receive messages: %s'",
",",
"error",
",",
"exc_info",
"=",
"True",
")",
"raise",
"ListenerException",
"(",
"error",
")",
"log",
".",
"debug",
"(",
"'[%s] Received %s'",
",",
"time",
".",
"time",
"(",
")",
",",
"msg",
")",
"return",
"msg",
",",
"''"
] | Return the message received.
..note::
In ZMQ we are unable to get the address where we got the message from. | [
"Return",
"the",
"message",
"received",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/zeromq.py#L84-L97 |
654 | napalm-automation/napalm-logs | napalm_logs/listener/zeromq.py | ZMQListener.stop | def stop(self):
'''
Shutdown zmq listener.
'''
log.info('Stopping the zmq listener class')
self.sub.close()
self.ctx.term() | python | def stop(self):
'''
Shutdown zmq listener.
'''
log.info('Stopping the zmq listener class')
self.sub.close()
self.ctx.term() | [
"def",
"stop",
"(",
"self",
")",
":",
"log",
".",
"info",
"(",
"'Stopping the zmq listener class'",
")",
"self",
".",
"sub",
".",
"close",
"(",
")",
"self",
".",
"ctx",
".",
"term",
"(",
")"
] | Shutdown zmq listener. | [
"Shutdown",
"zmq",
"listener",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/zeromq.py#L99-L105 |
655 | napalm-automation/napalm-logs | napalm_logs/listener/udp.py | UDPListener.start | def start(self):
'''
Create the UDP listener socket.
'''
if ':' in self.address:
self.skt = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
else:
self.skt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if self.reuse_port:
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'SO_REUSEPORT'):
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
else:
log.error('SO_REUSEPORT not supported')
try:
self.skt.bind((self.address, int(self.port)))
except socket.error as msg:
error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg)
log.error(error_string, exc_info=True)
raise BindException(error_string) | python | def start(self):
'''
Create the UDP listener socket.
'''
if ':' in self.address:
self.skt = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
else:
self.skt = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
if self.reuse_port:
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'SO_REUSEPORT'):
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
else:
log.error('SO_REUSEPORT not supported')
try:
self.skt.bind((self.address, int(self.port)))
except socket.error as msg:
error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg)
log.error(error_string, exc_info=True)
raise BindException(error_string) | [
"def",
"start",
"(",
"self",
")",
":",
"if",
"':'",
"in",
"self",
".",
"address",
":",
"self",
".",
"skt",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET6",
",",
"socket",
".",
"SOCK_DGRAM",
")",
"else",
":",
"self",
".",
"skt",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_DGRAM",
")",
"if",
"self",
".",
"reuse_port",
":",
"self",
".",
"skt",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEADDR",
",",
"1",
")",
"if",
"hasattr",
"(",
"socket",
",",
"'SO_REUSEPORT'",
")",
":",
"self",
".",
"skt",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEPORT",
",",
"1",
")",
"else",
":",
"log",
".",
"error",
"(",
"'SO_REUSEPORT not supported'",
")",
"try",
":",
"self",
".",
"skt",
".",
"bind",
"(",
"(",
"self",
".",
"address",
",",
"int",
"(",
"self",
".",
"port",
")",
")",
")",
"except",
"socket",
".",
"error",
"as",
"msg",
":",
"error_string",
"=",
"'Unable to bind to port {} on {}: {}'",
".",
"format",
"(",
"self",
".",
"port",
",",
"self",
".",
"address",
",",
"msg",
")",
"log",
".",
"error",
"(",
"error_string",
",",
"exc_info",
"=",
"True",
")",
"raise",
"BindException",
"(",
"error_string",
")"
] | Create the UDP listener socket. | [
"Create",
"the",
"UDP",
"listener",
"socket",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/udp.py#L43-L62 |
656 | napalm-automation/napalm-logs | napalm_logs/proc.py | NapalmLogsProc._suicide_when_without_parent | def _suicide_when_without_parent(self, parent_pid):
'''
Kill this process when the parent died.
'''
while True:
time.sleep(5)
try:
# Check pid alive
os.kill(parent_pid, 0)
except OSError:
# Forcibly exit
# Regular sys.exit raises an exception
self.stop()
log.warning('The parent is not alive, exiting.')
os._exit(999) | python | def _suicide_when_without_parent(self, parent_pid):
'''
Kill this process when the parent died.
'''
while True:
time.sleep(5)
try:
# Check pid alive
os.kill(parent_pid, 0)
except OSError:
# Forcibly exit
# Regular sys.exit raises an exception
self.stop()
log.warning('The parent is not alive, exiting.')
os._exit(999) | [
"def",
"_suicide_when_without_parent",
"(",
"self",
",",
"parent_pid",
")",
":",
"while",
"True",
":",
"time",
".",
"sleep",
"(",
"5",
")",
"try",
":",
"# Check pid alive",
"os",
".",
"kill",
"(",
"parent_pid",
",",
"0",
")",
"except",
"OSError",
":",
"# Forcibly exit",
"# Regular sys.exit raises an exception",
"self",
".",
"stop",
"(",
")",
"log",
".",
"warning",
"(",
"'The parent is not alive, exiting.'",
")",
"os",
".",
"_exit",
"(",
"999",
")"
] | Kill this process when the parent died. | [
"Kill",
"this",
"process",
"when",
"the",
"parent",
"died",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/proc.py#L20-L34 |
657 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._setup_buffer | def _setup_buffer(self):
'''
Setup the buffer subsystem.
'''
if not self._buffer_cfg or not isinstance(self._buffer_cfg, dict):
return
buffer_name = list(self._buffer_cfg.keys())[0]
buffer_class = napalm_logs.buffer.get_interface(buffer_name)
log.debug('Setting up buffer interface "%s"', buffer_name)
if 'expire_time' not in self._buffer_cfg[buffer_name]:
self._buffer_cfg[buffer_name]['expire_time'] = CONFIG.BUFFER_EXPIRE_TIME
self._buffer = buffer_class(**self._buffer_cfg[buffer_name]) | python | def _setup_buffer(self):
'''
Setup the buffer subsystem.
'''
if not self._buffer_cfg or not isinstance(self._buffer_cfg, dict):
return
buffer_name = list(self._buffer_cfg.keys())[0]
buffer_class = napalm_logs.buffer.get_interface(buffer_name)
log.debug('Setting up buffer interface "%s"', buffer_name)
if 'expire_time' not in self._buffer_cfg[buffer_name]:
self._buffer_cfg[buffer_name]['expire_time'] = CONFIG.BUFFER_EXPIRE_TIME
self._buffer = buffer_class(**self._buffer_cfg[buffer_name]) | [
"def",
"_setup_buffer",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_buffer_cfg",
"or",
"not",
"isinstance",
"(",
"self",
".",
"_buffer_cfg",
",",
"dict",
")",
":",
"return",
"buffer_name",
"=",
"list",
"(",
"self",
".",
"_buffer_cfg",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"buffer_class",
"=",
"napalm_logs",
".",
"buffer",
".",
"get_interface",
"(",
"buffer_name",
")",
"log",
".",
"debug",
"(",
"'Setting up buffer interface \"%s\"'",
",",
"buffer_name",
")",
"if",
"'expire_time'",
"not",
"in",
"self",
".",
"_buffer_cfg",
"[",
"buffer_name",
"]",
":",
"self",
".",
"_buffer_cfg",
"[",
"buffer_name",
"]",
"[",
"'expire_time'",
"]",
"=",
"CONFIG",
".",
"BUFFER_EXPIRE_TIME",
"self",
".",
"_buffer",
"=",
"buffer_class",
"(",
"*",
"*",
"self",
".",
"_buffer_cfg",
"[",
"buffer_name",
"]",
")"
] | Setup the buffer subsystem. | [
"Setup",
"the",
"buffer",
"subsystem",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L134-L145 |
658 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._setup_metrics | def _setup_metrics(self):
"""
Start metric exposition
"""
path = os.environ.get("prometheus_multiproc_dir")
if not os.path.exists(self.metrics_dir):
try:
log.info("Creating metrics directory")
os.makedirs(self.metrics_dir)
except OSError:
log.error("Failed to create metrics directory!")
raise ConfigurationException("Failed to create metrics directory!")
path = self.metrics_dir
elif path != self.metrics_dir:
path = self.metrics_dir
os.environ['prometheus_multiproc_dir'] = path
log.info("Cleaning metrics collection directory")
log.debug("Metrics directory set to: {}".format(path))
files = os.listdir(path)
for f in files:
if f.endswith(".db"):
os.remove(os.path.join(path, f))
log.debug("Starting metrics exposition")
if self.metrics_enabled:
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
start_http_server(
port=self.metrics_port,
addr=self.metrics_address,
registry=registry
) | python | def _setup_metrics(self):
"""
Start metric exposition
"""
path = os.environ.get("prometheus_multiproc_dir")
if not os.path.exists(self.metrics_dir):
try:
log.info("Creating metrics directory")
os.makedirs(self.metrics_dir)
except OSError:
log.error("Failed to create metrics directory!")
raise ConfigurationException("Failed to create metrics directory!")
path = self.metrics_dir
elif path != self.metrics_dir:
path = self.metrics_dir
os.environ['prometheus_multiproc_dir'] = path
log.info("Cleaning metrics collection directory")
log.debug("Metrics directory set to: {}".format(path))
files = os.listdir(path)
for f in files:
if f.endswith(".db"):
os.remove(os.path.join(path, f))
log.debug("Starting metrics exposition")
if self.metrics_enabled:
registry = CollectorRegistry()
multiprocess.MultiProcessCollector(registry)
start_http_server(
port=self.metrics_port,
addr=self.metrics_address,
registry=registry
) | [
"def",
"_setup_metrics",
"(",
"self",
")",
":",
"path",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"\"prometheus_multiproc_dir\"",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"metrics_dir",
")",
":",
"try",
":",
"log",
".",
"info",
"(",
"\"Creating metrics directory\"",
")",
"os",
".",
"makedirs",
"(",
"self",
".",
"metrics_dir",
")",
"except",
"OSError",
":",
"log",
".",
"error",
"(",
"\"Failed to create metrics directory!\"",
")",
"raise",
"ConfigurationException",
"(",
"\"Failed to create metrics directory!\"",
")",
"path",
"=",
"self",
".",
"metrics_dir",
"elif",
"path",
"!=",
"self",
".",
"metrics_dir",
":",
"path",
"=",
"self",
".",
"metrics_dir",
"os",
".",
"environ",
"[",
"'prometheus_multiproc_dir'",
"]",
"=",
"path",
"log",
".",
"info",
"(",
"\"Cleaning metrics collection directory\"",
")",
"log",
".",
"debug",
"(",
"\"Metrics directory set to: {}\"",
".",
"format",
"(",
"path",
")",
")",
"files",
"=",
"os",
".",
"listdir",
"(",
"path",
")",
"for",
"f",
"in",
"files",
":",
"if",
"f",
".",
"endswith",
"(",
"\".db\"",
")",
":",
"os",
".",
"remove",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"f",
")",
")",
"log",
".",
"debug",
"(",
"\"Starting metrics exposition\"",
")",
"if",
"self",
".",
"metrics_enabled",
":",
"registry",
"=",
"CollectorRegistry",
"(",
")",
"multiprocess",
".",
"MultiProcessCollector",
"(",
"registry",
")",
"start_http_server",
"(",
"port",
"=",
"self",
".",
"metrics_port",
",",
"addr",
"=",
"self",
".",
"metrics_address",
",",
"registry",
"=",
"registry",
")"
] | Start metric exposition | [
"Start",
"metric",
"exposition"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L147-L177 |
659 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._setup_log | def _setup_log(self):
'''
Setup the log object.
'''
logging_level = CONFIG.LOGGING_LEVEL.get(self.log_level.lower())
logging.basicConfig(format=self.log_format,
level=logging_level) | python | def _setup_log(self):
'''
Setup the log object.
'''
logging_level = CONFIG.LOGGING_LEVEL.get(self.log_level.lower())
logging.basicConfig(format=self.log_format,
level=logging_level) | [
"def",
"_setup_log",
"(",
"self",
")",
":",
"logging_level",
"=",
"CONFIG",
".",
"LOGGING_LEVEL",
".",
"get",
"(",
"self",
".",
"log_level",
".",
"lower",
"(",
")",
")",
"logging",
".",
"basicConfig",
"(",
"format",
"=",
"self",
".",
"log_format",
",",
"level",
"=",
"logging_level",
")"
] | Setup the log object. | [
"Setup",
"the",
"log",
"object",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L179-L185 |
660 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._whitelist_blacklist | def _whitelist_blacklist(self, os_name):
'''
Determines if the OS should be ignored,
depending on the whitelist-blacklist logic
configured by the user.
'''
return napalm_logs.ext.check_whitelist_blacklist(os_name,
whitelist=self.device_whitelist,
blacklist=self.device_blacklist) | python | def _whitelist_blacklist(self, os_name):
'''
Determines if the OS should be ignored,
depending on the whitelist-blacklist logic
configured by the user.
'''
return napalm_logs.ext.check_whitelist_blacklist(os_name,
whitelist=self.device_whitelist,
blacklist=self.device_blacklist) | [
"def",
"_whitelist_blacklist",
"(",
"self",
",",
"os_name",
")",
":",
"return",
"napalm_logs",
".",
"ext",
".",
"check_whitelist_blacklist",
"(",
"os_name",
",",
"whitelist",
"=",
"self",
".",
"device_whitelist",
",",
"blacklist",
"=",
"self",
".",
"device_blacklist",
")"
] | Determines if the OS should be ignored,
depending on the whitelist-blacklist logic
configured by the user. | [
"Determines",
"if",
"the",
"OS",
"should",
"be",
"ignored",
"depending",
"on",
"the",
"whitelist",
"-",
"blacklist",
"logic",
"configured",
"by",
"the",
"user",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L223-L231 |
661 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._extract_yaml_docstring | def _extract_yaml_docstring(stream):
'''
Extract the comments at the top of the YAML file,
from the stream handler.
Return the extracted comment as string.
'''
comment_lines = []
lines = stream.read().splitlines()
for line in lines:
line_strip = line.strip()
if not line_strip:
continue
if line_strip.startswith('#'):
comment_lines.append(
line_strip.replace('#', '', 1).strip()
)
else:
break
return ' '.join(comment_lines) | python | def _extract_yaml_docstring(stream):
'''
Extract the comments at the top of the YAML file,
from the stream handler.
Return the extracted comment as string.
'''
comment_lines = []
lines = stream.read().splitlines()
for line in lines:
line_strip = line.strip()
if not line_strip:
continue
if line_strip.startswith('#'):
comment_lines.append(
line_strip.replace('#', '', 1).strip()
)
else:
break
return ' '.join(comment_lines) | [
"def",
"_extract_yaml_docstring",
"(",
"stream",
")",
":",
"comment_lines",
"=",
"[",
"]",
"lines",
"=",
"stream",
".",
"read",
"(",
")",
".",
"splitlines",
"(",
")",
"for",
"line",
"in",
"lines",
":",
"line_strip",
"=",
"line",
".",
"strip",
"(",
")",
"if",
"not",
"line_strip",
":",
"continue",
"if",
"line_strip",
".",
"startswith",
"(",
"'#'",
")",
":",
"comment_lines",
".",
"append",
"(",
"line_strip",
".",
"replace",
"(",
"'#'",
",",
"''",
",",
"1",
")",
".",
"strip",
"(",
")",
")",
"else",
":",
"break",
"return",
"' '",
".",
"join",
"(",
"comment_lines",
")"
] | Extract the comments at the top of the YAML file,
from the stream handler.
Return the extracted comment as string. | [
"Extract",
"the",
"comments",
"at",
"the",
"top",
"of",
"the",
"YAML",
"file",
"from",
"the",
"stream",
"handler",
".",
"Return",
"the",
"extracted",
"comment",
"as",
"string",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L234-L252 |
662 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._verify_config_dict | def _verify_config_dict(self, valid, config, dev_os, key_path=None):
'''
Verify if the config dict is valid.
'''
if not key_path:
key_path = []
for key, value in valid.items():
self._verify_config_key(key, value, valid, config, dev_os, key_path) | python | def _verify_config_dict(self, valid, config, dev_os, key_path=None):
'''
Verify if the config dict is valid.
'''
if not key_path:
key_path = []
for key, value in valid.items():
self._verify_config_key(key, value, valid, config, dev_os, key_path) | [
"def",
"_verify_config_dict",
"(",
"self",
",",
"valid",
",",
"config",
",",
"dev_os",
",",
"key_path",
"=",
"None",
")",
":",
"if",
"not",
"key_path",
":",
"key_path",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"valid",
".",
"items",
"(",
")",
":",
"self",
".",
"_verify_config_key",
"(",
"key",
",",
"value",
",",
"valid",
",",
"config",
",",
"dev_os",
",",
"key_path",
")"
] | Verify if the config dict is valid. | [
"Verify",
"if",
"the",
"config",
"dict",
"is",
"valid",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L461-L468 |
663 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._verify_config | def _verify_config(self):
'''
Verify that the config is correct
'''
if not self.config_dict:
self._raise_config_exception('No config found')
# Check for device conifg, if there isn't anything then just log, do not raise an exception
for dev_os, dev_config in self.config_dict.items():
if not dev_config:
log.warning('No config found for %s', dev_os)
continue
# Compare the valid opts with the conifg
self._verify_config_dict(CONFIG.VALID_CONFIG, dev_config, dev_os)
log.debug('Read the config without error') | python | def _verify_config(self):
'''
Verify that the config is correct
'''
if not self.config_dict:
self._raise_config_exception('No config found')
# Check for device conifg, if there isn't anything then just log, do not raise an exception
for dev_os, dev_config in self.config_dict.items():
if not dev_config:
log.warning('No config found for %s', dev_os)
continue
# Compare the valid opts with the conifg
self._verify_config_dict(CONFIG.VALID_CONFIG, dev_config, dev_os)
log.debug('Read the config without error') | [
"def",
"_verify_config",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"config_dict",
":",
"self",
".",
"_raise_config_exception",
"(",
"'No config found'",
")",
"# Check for device conifg, if there isn't anything then just log, do not raise an exception",
"for",
"dev_os",
",",
"dev_config",
"in",
"self",
".",
"config_dict",
".",
"items",
"(",
")",
":",
"if",
"not",
"dev_config",
":",
"log",
".",
"warning",
"(",
"'No config found for %s'",
",",
"dev_os",
")",
"continue",
"# Compare the valid opts with the conifg",
"self",
".",
"_verify_config_dict",
"(",
"CONFIG",
".",
"VALID_CONFIG",
",",
"dev_config",
",",
"dev_os",
")",
"log",
".",
"debug",
"(",
"'Read the config without error'",
")"
] | Verify that the config is correct | [
"Verify",
"that",
"the",
"config",
"is",
"correct"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L470-L483 |
664 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._build_config | def _build_config(self):
'''
Build the config of the napalm syslog parser.
'''
if not self.config_dict:
if not self.config_path:
# No custom config path requested
# Read the native config files
self.config_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config'
)
log.info('Reading the configuration from %s', self.config_path)
self.config_dict = self._load_config(self.config_path)
if not self.extension_config_dict and\
self.extension_config_path and\
os.path.normpath(self.extension_config_path) != os.path.normpath(self.config_path): # same path?
# When extension config is not sent as dict
# But `extension_config_path` is specified
log.info('Reading extension configuration from %s', self.extension_config_path)
self.extension_config_dict = self._load_config(self.extension_config_path)
if self.extension_config_dict:
napalm_logs.utils.dictupdate(self.config_dict, self.extension_config_dict) | python | def _build_config(self):
'''
Build the config of the napalm syslog parser.
'''
if not self.config_dict:
if not self.config_path:
# No custom config path requested
# Read the native config files
self.config_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
'config'
)
log.info('Reading the configuration from %s', self.config_path)
self.config_dict = self._load_config(self.config_path)
if not self.extension_config_dict and\
self.extension_config_path and\
os.path.normpath(self.extension_config_path) != os.path.normpath(self.config_path): # same path?
# When extension config is not sent as dict
# But `extension_config_path` is specified
log.info('Reading extension configuration from %s', self.extension_config_path)
self.extension_config_dict = self._load_config(self.extension_config_path)
if self.extension_config_dict:
napalm_logs.utils.dictupdate(self.config_dict, self.extension_config_dict) | [
"def",
"_build_config",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"config_dict",
":",
"if",
"not",
"self",
".",
"config_path",
":",
"# No custom config path requested",
"# Read the native config files",
"self",
".",
"config_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"realpath",
"(",
"__file__",
")",
")",
",",
"'config'",
")",
"log",
".",
"info",
"(",
"'Reading the configuration from %s'",
",",
"self",
".",
"config_path",
")",
"self",
".",
"config_dict",
"=",
"self",
".",
"_load_config",
"(",
"self",
".",
"config_path",
")",
"if",
"not",
"self",
".",
"extension_config_dict",
"and",
"self",
".",
"extension_config_path",
"and",
"os",
".",
"path",
".",
"normpath",
"(",
"self",
".",
"extension_config_path",
")",
"!=",
"os",
".",
"path",
".",
"normpath",
"(",
"self",
".",
"config_path",
")",
":",
"# same path?",
"# When extension config is not sent as dict",
"# But `extension_config_path` is specified",
"log",
".",
"info",
"(",
"'Reading extension configuration from %s'",
",",
"self",
".",
"extension_config_path",
")",
"self",
".",
"extension_config_dict",
"=",
"self",
".",
"_load_config",
"(",
"self",
".",
"extension_config_path",
")",
"if",
"self",
".",
"extension_config_dict",
":",
"napalm_logs",
".",
"utils",
".",
"dictupdate",
"(",
"self",
".",
"config_dict",
",",
"self",
".",
"extension_config_dict",
")"
] | Build the config of the napalm syslog parser. | [
"Build",
"the",
"config",
"of",
"the",
"napalm",
"syslog",
"parser",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L485-L507 |
665 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._start_auth_proc | def _start_auth_proc(self):
'''
Start the authenticator process.
'''
log.debug('Computing the signing key hex')
verify_key = self.__signing_key.verify_key
sgn_verify_hex = verify_key.encode(encoder=nacl.encoding.HexEncoder)
log.debug('Starting the authenticator subprocess')
auth = NapalmLogsAuthProc(self.certificate,
self.keyfile,
self.__priv_key,
sgn_verify_hex,
self.auth_address,
self.auth_port)
proc = Process(target=auth.start)
proc.start()
proc.description = 'Auth process'
log.debug('Started auth process as %s with PID %s', proc._name, proc.pid)
return proc | python | def _start_auth_proc(self):
'''
Start the authenticator process.
'''
log.debug('Computing the signing key hex')
verify_key = self.__signing_key.verify_key
sgn_verify_hex = verify_key.encode(encoder=nacl.encoding.HexEncoder)
log.debug('Starting the authenticator subprocess')
auth = NapalmLogsAuthProc(self.certificate,
self.keyfile,
self.__priv_key,
sgn_verify_hex,
self.auth_address,
self.auth_port)
proc = Process(target=auth.start)
proc.start()
proc.description = 'Auth process'
log.debug('Started auth process as %s with PID %s', proc._name, proc.pid)
return proc | [
"def",
"_start_auth_proc",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Computing the signing key hex'",
")",
"verify_key",
"=",
"self",
".",
"__signing_key",
".",
"verify_key",
"sgn_verify_hex",
"=",
"verify_key",
".",
"encode",
"(",
"encoder",
"=",
"nacl",
".",
"encoding",
".",
"HexEncoder",
")",
"log",
".",
"debug",
"(",
"'Starting the authenticator subprocess'",
")",
"auth",
"=",
"NapalmLogsAuthProc",
"(",
"self",
".",
"certificate",
",",
"self",
".",
"keyfile",
",",
"self",
".",
"__priv_key",
",",
"sgn_verify_hex",
",",
"self",
".",
"auth_address",
",",
"self",
".",
"auth_port",
")",
"proc",
"=",
"Process",
"(",
"target",
"=",
"auth",
".",
"start",
")",
"proc",
".",
"start",
"(",
")",
"proc",
".",
"description",
"=",
"'Auth process'",
"log",
".",
"debug",
"(",
"'Started auth process as %s with PID %s'",
",",
"proc",
".",
"_name",
",",
"proc",
".",
"pid",
")",
"return",
"proc"
] | Start the authenticator process. | [
"Start",
"the",
"authenticator",
"process",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L509-L527 |
666 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._start_lst_proc | def _start_lst_proc(self,
listener_type,
listener_opts):
'''
Start the listener process.
'''
log.debug('Starting the listener process for %s', listener_type)
listener = NapalmLogsListenerProc(self.opts,
self.address,
self.port,
listener_type,
listener_opts=listener_opts)
proc = Process(target=listener.start)
proc.start()
proc.description = 'Listener process'
log.debug('Started listener process as %s with PID %s', proc._name, proc.pid)
return proc | python | def _start_lst_proc(self,
listener_type,
listener_opts):
'''
Start the listener process.
'''
log.debug('Starting the listener process for %s', listener_type)
listener = NapalmLogsListenerProc(self.opts,
self.address,
self.port,
listener_type,
listener_opts=listener_opts)
proc = Process(target=listener.start)
proc.start()
proc.description = 'Listener process'
log.debug('Started listener process as %s with PID %s', proc._name, proc.pid)
return proc | [
"def",
"_start_lst_proc",
"(",
"self",
",",
"listener_type",
",",
"listener_opts",
")",
":",
"log",
".",
"debug",
"(",
"'Starting the listener process for %s'",
",",
"listener_type",
")",
"listener",
"=",
"NapalmLogsListenerProc",
"(",
"self",
".",
"opts",
",",
"self",
".",
"address",
",",
"self",
".",
"port",
",",
"listener_type",
",",
"listener_opts",
"=",
"listener_opts",
")",
"proc",
"=",
"Process",
"(",
"target",
"=",
"listener",
".",
"start",
")",
"proc",
".",
"start",
"(",
")",
"proc",
".",
"description",
"=",
"'Listener process'",
"log",
".",
"debug",
"(",
"'Started listener process as %s with PID %s'",
",",
"proc",
".",
"_name",
",",
"proc",
".",
"pid",
")",
"return",
"proc"
] | Start the listener process. | [
"Start",
"the",
"listener",
"process",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L529-L545 |
667 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._start_srv_proc | def _start_srv_proc(self,
started_os_proc):
'''
Start the server process.
'''
log.debug('Starting the server process')
server = NapalmLogsServerProc(self.opts,
self.config_dict,
started_os_proc,
buffer=self._buffer)
proc = Process(target=server.start)
proc.start()
proc.description = 'Server process'
log.debug('Started server process as %s with PID %s', proc._name, proc.pid)
return proc | python | def _start_srv_proc(self,
started_os_proc):
'''
Start the server process.
'''
log.debug('Starting the server process')
server = NapalmLogsServerProc(self.opts,
self.config_dict,
started_os_proc,
buffer=self._buffer)
proc = Process(target=server.start)
proc.start()
proc.description = 'Server process'
log.debug('Started server process as %s with PID %s', proc._name, proc.pid)
return proc | [
"def",
"_start_srv_proc",
"(",
"self",
",",
"started_os_proc",
")",
":",
"log",
".",
"debug",
"(",
"'Starting the server process'",
")",
"server",
"=",
"NapalmLogsServerProc",
"(",
"self",
".",
"opts",
",",
"self",
".",
"config_dict",
",",
"started_os_proc",
",",
"buffer",
"=",
"self",
".",
"_buffer",
")",
"proc",
"=",
"Process",
"(",
"target",
"=",
"server",
".",
"start",
")",
"proc",
".",
"start",
"(",
")",
"proc",
".",
"description",
"=",
"'Server process'",
"log",
".",
"debug",
"(",
"'Started server process as %s with PID %s'",
",",
"proc",
".",
"_name",
",",
"proc",
".",
"pid",
")",
"return",
"proc"
] | Start the server process. | [
"Start",
"the",
"server",
"process",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L547-L561 |
668 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._start_pub_proc | def _start_pub_proc(self,
publisher_type,
publisher_opts,
pub_id):
'''
Start the publisher process.
'''
log.debug('Starting the publisher process for %s', publisher_type)
publisher = NapalmLogsPublisherProc(self.opts,
self.publish_address,
self.publish_port,
publisher_type,
self.serializer,
self.__priv_key,
self.__signing_key,
publisher_opts,
disable_security=self.disable_security,
pub_id=pub_id)
proc = Process(target=publisher.start)
proc.start()
proc.description = 'Publisher process'
log.debug('Started publisher process as %s with PID %s', proc._name, proc.pid)
return proc | python | def _start_pub_proc(self,
publisher_type,
publisher_opts,
pub_id):
'''
Start the publisher process.
'''
log.debug('Starting the publisher process for %s', publisher_type)
publisher = NapalmLogsPublisherProc(self.opts,
self.publish_address,
self.publish_port,
publisher_type,
self.serializer,
self.__priv_key,
self.__signing_key,
publisher_opts,
disable_security=self.disable_security,
pub_id=pub_id)
proc = Process(target=publisher.start)
proc.start()
proc.description = 'Publisher process'
log.debug('Started publisher process as %s with PID %s', proc._name, proc.pid)
return proc | [
"def",
"_start_pub_proc",
"(",
"self",
",",
"publisher_type",
",",
"publisher_opts",
",",
"pub_id",
")",
":",
"log",
".",
"debug",
"(",
"'Starting the publisher process for %s'",
",",
"publisher_type",
")",
"publisher",
"=",
"NapalmLogsPublisherProc",
"(",
"self",
".",
"opts",
",",
"self",
".",
"publish_address",
",",
"self",
".",
"publish_port",
",",
"publisher_type",
",",
"self",
".",
"serializer",
",",
"self",
".",
"__priv_key",
",",
"self",
".",
"__signing_key",
",",
"publisher_opts",
",",
"disable_security",
"=",
"self",
".",
"disable_security",
",",
"pub_id",
"=",
"pub_id",
")",
"proc",
"=",
"Process",
"(",
"target",
"=",
"publisher",
".",
"start",
")",
"proc",
".",
"start",
"(",
")",
"proc",
".",
"description",
"=",
"'Publisher process'",
"log",
".",
"debug",
"(",
"'Started publisher process as %s with PID %s'",
",",
"proc",
".",
"_name",
",",
"proc",
".",
"pid",
")",
"return",
"proc"
] | Start the publisher process. | [
"Start",
"the",
"publisher",
"process",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L573-L595 |
669 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._start_dev_proc | def _start_dev_proc(self,
device_os,
device_config):
'''
Start the device worker process.
'''
log.info('Starting the child process for %s', device_os)
dos = NapalmLogsDeviceProc(device_os,
self.opts,
device_config)
os_proc = Process(target=dos.start)
os_proc.start()
os_proc.description = '%s device process' % device_os
log.debug('Started process %s for %s, having PID %s', os_proc._name, device_os, os_proc.pid)
return os_proc | python | def _start_dev_proc(self,
device_os,
device_config):
'''
Start the device worker process.
'''
log.info('Starting the child process for %s', device_os)
dos = NapalmLogsDeviceProc(device_os,
self.opts,
device_config)
os_proc = Process(target=dos.start)
os_proc.start()
os_proc.description = '%s device process' % device_os
log.debug('Started process %s for %s, having PID %s', os_proc._name, device_os, os_proc.pid)
return os_proc | [
"def",
"_start_dev_proc",
"(",
"self",
",",
"device_os",
",",
"device_config",
")",
":",
"log",
".",
"info",
"(",
"'Starting the child process for %s'",
",",
"device_os",
")",
"dos",
"=",
"NapalmLogsDeviceProc",
"(",
"device_os",
",",
"self",
".",
"opts",
",",
"device_config",
")",
"os_proc",
"=",
"Process",
"(",
"target",
"=",
"dos",
".",
"start",
")",
"os_proc",
".",
"start",
"(",
")",
"os_proc",
".",
"description",
"=",
"'%s device process'",
"%",
"device_os",
"log",
".",
"debug",
"(",
"'Started process %s for %s, having PID %s'",
",",
"os_proc",
".",
"_name",
",",
"device_os",
",",
"os_proc",
".",
"pid",
")",
"return",
"os_proc"
] | Start the device worker process. | [
"Start",
"the",
"device",
"worker",
"process",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L597-L611 |
670 | napalm-automation/napalm-logs | napalm_logs/base.py | NapalmLogs._check_children | def _check_children(self):
'''
Check all of the child processes are still running
'''
while self.up:
time.sleep(1)
for process in self._processes:
if process.is_alive() is True:
continue
log.debug('%s is dead. Stopping the napalm-logs engine.', process.description)
self.stop_engine() | python | def _check_children(self):
'''
Check all of the child processes are still running
'''
while self.up:
time.sleep(1)
for process in self._processes:
if process.is_alive() is True:
continue
log.debug('%s is dead. Stopping the napalm-logs engine.', process.description)
self.stop_engine() | [
"def",
"_check_children",
"(",
"self",
")",
":",
"while",
"self",
".",
"up",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"for",
"process",
"in",
"self",
".",
"_processes",
":",
"if",
"process",
".",
"is_alive",
"(",
")",
"is",
"True",
":",
"continue",
"log",
".",
"debug",
"(",
"'%s is dead. Stopping the napalm-logs engine.'",
",",
"process",
".",
"description",
")",
"self",
".",
"stop_engine",
"(",
")"
] | Check all of the child processes are still running | [
"Check",
"all",
"of",
"the",
"child",
"processes",
"are",
"still",
"running"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/base.py#L664-L674 |
671 | napalm-automation/napalm-logs | napalm_logs/pub_proxy.py | NapalmLogsPublisherProxy._setup_ipc | def _setup_ipc(self):
'''
Setup the IPC PUB and SUB sockets for the proxy.
'''
log.debug('Setting up the internal IPC proxy')
self.ctx = zmq.Context()
# Frontend
self.sub = self.ctx.socket(zmq.SUB)
self.sub.bind(PUB_PX_IPC_URL)
self.sub.setsockopt(zmq.SUBSCRIBE, b'')
log.debug('Setting HWM for the proxy frontend: %d', self.hwm)
try:
self.sub.setsockopt(zmq.HWM, self.hwm)
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.SNDHWM, self.hwm)
# Backend
self.pub = self.ctx.socket(zmq.PUB)
self.pub.bind(PUB_IPC_URL)
log.debug('Setting HWM for the proxy backend: %d', self.hwm)
try:
self.pub.setsockopt(zmq.HWM, self.hwm)
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.hwm) | python | def _setup_ipc(self):
'''
Setup the IPC PUB and SUB sockets for the proxy.
'''
log.debug('Setting up the internal IPC proxy')
self.ctx = zmq.Context()
# Frontend
self.sub = self.ctx.socket(zmq.SUB)
self.sub.bind(PUB_PX_IPC_URL)
self.sub.setsockopt(zmq.SUBSCRIBE, b'')
log.debug('Setting HWM for the proxy frontend: %d', self.hwm)
try:
self.sub.setsockopt(zmq.HWM, self.hwm)
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.SNDHWM, self.hwm)
# Backend
self.pub = self.ctx.socket(zmq.PUB)
self.pub.bind(PUB_IPC_URL)
log.debug('Setting HWM for the proxy backend: %d', self.hwm)
try:
self.pub.setsockopt(zmq.HWM, self.hwm)
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.hwm) | [
"def",
"_setup_ipc",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Setting up the internal IPC proxy'",
")",
"self",
".",
"ctx",
"=",
"zmq",
".",
"Context",
"(",
")",
"# Frontend",
"self",
".",
"sub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"SUB",
")",
"self",
".",
"sub",
".",
"bind",
"(",
"PUB_PX_IPC_URL",
")",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"SUBSCRIBE",
",",
"b''",
")",
"log",
".",
"debug",
"(",
"'Setting HWM for the proxy frontend: %d'",
",",
"self",
".",
"hwm",
")",
"try",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"hwm",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"SNDHWM",
",",
"self",
".",
"hwm",
")",
"# Backend",
"self",
".",
"pub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"PUB",
")",
"self",
".",
"pub",
".",
"bind",
"(",
"PUB_IPC_URL",
")",
"log",
".",
"debug",
"(",
"'Setting HWM for the proxy backend: %d'",
",",
"self",
".",
"hwm",
")",
"try",
":",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"hwm",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"SNDHWM",
",",
"self",
".",
"hwm",
")"
] | Setup the IPC PUB and SUB sockets for the proxy. | [
"Setup",
"the",
"IPC",
"PUB",
"and",
"SUB",
"sockets",
"for",
"the",
"proxy",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/pub_proxy.py#L38-L64 |
672 | napalm-automation/napalm-logs | napalm_logs/publisher.py | NapalmLogsPublisherProc._setup_ipc | def _setup_ipc(self):
'''
Subscribe to the pub IPC
and publish the messages
on the right transport.
'''
self.ctx = zmq.Context()
log.debug('Setting up the %s publisher subscriber #%d', self._transport_type, self.pub_id)
self.sub = self.ctx.socket(zmq.SUB)
self.sub.connect(PUB_IPC_URL)
self.sub.setsockopt(zmq.SUBSCRIBE, b'')
try:
self.sub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) | python | def _setup_ipc(self):
'''
Subscribe to the pub IPC
and publish the messages
on the right transport.
'''
self.ctx = zmq.Context()
log.debug('Setting up the %s publisher subscriber #%d', self._transport_type, self.pub_id)
self.sub = self.ctx.socket(zmq.SUB)
self.sub.connect(PUB_IPC_URL)
self.sub.setsockopt(zmq.SUBSCRIBE, b'')
try:
self.sub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.sub.setsockopt(zmq.RCVHWM, self.opts['hwm']) | [
"def",
"_setup_ipc",
"(",
"self",
")",
":",
"self",
".",
"ctx",
"=",
"zmq",
".",
"Context",
"(",
")",
"log",
".",
"debug",
"(",
"'Setting up the %s publisher subscriber #%d'",
",",
"self",
".",
"_transport_type",
",",
"self",
".",
"pub_id",
")",
"self",
".",
"sub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"SUB",
")",
"self",
".",
"sub",
".",
"connect",
"(",
"PUB_IPC_URL",
")",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"SUBSCRIBE",
",",
"b''",
")",
"try",
":",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"sub",
".",
"setsockopt",
"(",
"zmq",
".",
"RCVHWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")"
] | Subscribe to the pub IPC
and publish the messages
on the right transport. | [
"Subscribe",
"to",
"the",
"pub",
"IPC",
"and",
"publish",
"the",
"messages",
"on",
"the",
"right",
"transport",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/publisher.py#L71-L87 |
673 | napalm-automation/napalm-logs | napalm_logs/publisher.py | NapalmLogsPublisherProc._prepare | def _prepare(self, serialized_obj):
'''
Prepare the object to be sent over the untrusted channel.
'''
# generating a nonce
nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)
# encrypting using the nonce
encrypted = self.__safe.encrypt(serialized_obj, nonce)
# sign the message
signed = self.__signing_key.sign(encrypted)
return signed | python | def _prepare(self, serialized_obj):
'''
Prepare the object to be sent over the untrusted channel.
'''
# generating a nonce
nonce = nacl.utils.random(nacl.secret.SecretBox.NONCE_SIZE)
# encrypting using the nonce
encrypted = self.__safe.encrypt(serialized_obj, nonce)
# sign the message
signed = self.__signing_key.sign(encrypted)
return signed | [
"def",
"_prepare",
"(",
"self",
",",
"serialized_obj",
")",
":",
"# generating a nonce",
"nonce",
"=",
"nacl",
".",
"utils",
".",
"random",
"(",
"nacl",
".",
"secret",
".",
"SecretBox",
".",
"NONCE_SIZE",
")",
"# encrypting using the nonce",
"encrypted",
"=",
"self",
".",
"__safe",
".",
"encrypt",
"(",
"serialized_obj",
",",
"nonce",
")",
"# sign the message",
"signed",
"=",
"self",
".",
"__signing_key",
".",
"sign",
"(",
"encrypted",
")",
"return",
"signed"
] | Prepare the object to be sent over the untrusted channel. | [
"Prepare",
"the",
"object",
"to",
"be",
"sent",
"over",
"the",
"untrusted",
"channel",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/publisher.py#L110-L120 |
674 | napalm-automation/napalm-logs | napalm_logs/listener/__init__.py | get_listener | def get_listener(name):
'''
Return the listener class.
'''
try:
log.debug('Using %s as listener', name)
return LISTENER_LOOKUP[name]
except KeyError:
msg = 'Listener {} is not available. Are the dependencies installed?'.format(name)
log.error(msg, exc_info=True)
raise InvalidListenerException(msg) | python | def get_listener(name):
'''
Return the listener class.
'''
try:
log.debug('Using %s as listener', name)
return LISTENER_LOOKUP[name]
except KeyError:
msg = 'Listener {} is not available. Are the dependencies installed?'.format(name)
log.error(msg, exc_info=True)
raise InvalidListenerException(msg) | [
"def",
"get_listener",
"(",
"name",
")",
":",
"try",
":",
"log",
".",
"debug",
"(",
"'Using %s as listener'",
",",
"name",
")",
"return",
"LISTENER_LOOKUP",
"[",
"name",
"]",
"except",
"KeyError",
":",
"msg",
"=",
"'Listener {} is not available. Are the dependencies installed?'",
".",
"format",
"(",
"name",
")",
"log",
".",
"error",
"(",
"msg",
",",
"exc_info",
"=",
"True",
")",
"raise",
"InvalidListenerException",
"(",
"msg",
")"
] | Return the listener class. | [
"Return",
"the",
"listener",
"class",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/__init__.py#L41-L51 |
675 | napalm-automation/napalm-logs | napalm_logs/utils/__init__.py | ClientAuth._start_keep_alive | def _start_keep_alive(self):
'''
Start the keep alive thread as a daemon
'''
keep_alive_thread = threading.Thread(target=self.keep_alive)
keep_alive_thread.daemon = True
keep_alive_thread.start() | python | def _start_keep_alive(self):
'''
Start the keep alive thread as a daemon
'''
keep_alive_thread = threading.Thread(target=self.keep_alive)
keep_alive_thread.daemon = True
keep_alive_thread.start() | [
"def",
"_start_keep_alive",
"(",
"self",
")",
":",
"keep_alive_thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"keep_alive",
")",
"keep_alive_thread",
".",
"daemon",
"=",
"True",
"keep_alive_thread",
".",
"start",
"(",
")"
] | Start the keep alive thread as a daemon | [
"Start",
"the",
"keep",
"alive",
"thread",
"as",
"a",
"daemon"
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L61-L67 |
676 | napalm-automation/napalm-logs | napalm_logs/utils/__init__.py | ClientAuth.keep_alive | def keep_alive(self):
'''
Send a keep alive request periodically to make sure that the server
is still alive. If not then try to reconnect.
'''
self.ssl_skt.settimeout(defaults.AUTH_KEEP_ALIVE_INTERVAL)
while self.__up:
try:
log.debug('Sending keep-alive message to the server')
self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE)
except socket.error:
log.error('Unable to send keep-alive message to the server.')
log.error('Re-init the SSL socket.')
self.reconnect()
log.debug('Trying to re-send the keep-alive message to the server.')
self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE)
msg = self.ssl_skt.recv(len(defaults.AUTH_KEEP_ALIVE_ACK))
log.debug('Received %s from the keep-alive server', msg)
if msg != defaults.AUTH_KEEP_ALIVE_ACK:
log.error('Received %s instead of %s form the auth keep-alive server',
msg, defaults.AUTH_KEEP_ALIVE_ACK)
log.error('Re-init the SSL socket.')
self.reconnect()
time.sleep(defaults.AUTH_KEEP_ALIVE_INTERVAL) | python | def keep_alive(self):
'''
Send a keep alive request periodically to make sure that the server
is still alive. If not then try to reconnect.
'''
self.ssl_skt.settimeout(defaults.AUTH_KEEP_ALIVE_INTERVAL)
while self.__up:
try:
log.debug('Sending keep-alive message to the server')
self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE)
except socket.error:
log.error('Unable to send keep-alive message to the server.')
log.error('Re-init the SSL socket.')
self.reconnect()
log.debug('Trying to re-send the keep-alive message to the server.')
self.ssl_skt.send(defaults.AUTH_KEEP_ALIVE)
msg = self.ssl_skt.recv(len(defaults.AUTH_KEEP_ALIVE_ACK))
log.debug('Received %s from the keep-alive server', msg)
if msg != defaults.AUTH_KEEP_ALIVE_ACK:
log.error('Received %s instead of %s form the auth keep-alive server',
msg, defaults.AUTH_KEEP_ALIVE_ACK)
log.error('Re-init the SSL socket.')
self.reconnect()
time.sleep(defaults.AUTH_KEEP_ALIVE_INTERVAL) | [
"def",
"keep_alive",
"(",
"self",
")",
":",
"self",
".",
"ssl_skt",
".",
"settimeout",
"(",
"defaults",
".",
"AUTH_KEEP_ALIVE_INTERVAL",
")",
"while",
"self",
".",
"__up",
":",
"try",
":",
"log",
".",
"debug",
"(",
"'Sending keep-alive message to the server'",
")",
"self",
".",
"ssl_skt",
".",
"send",
"(",
"defaults",
".",
"AUTH_KEEP_ALIVE",
")",
"except",
"socket",
".",
"error",
":",
"log",
".",
"error",
"(",
"'Unable to send keep-alive message to the server.'",
")",
"log",
".",
"error",
"(",
"'Re-init the SSL socket.'",
")",
"self",
".",
"reconnect",
"(",
")",
"log",
".",
"debug",
"(",
"'Trying to re-send the keep-alive message to the server.'",
")",
"self",
".",
"ssl_skt",
".",
"send",
"(",
"defaults",
".",
"AUTH_KEEP_ALIVE",
")",
"msg",
"=",
"self",
".",
"ssl_skt",
".",
"recv",
"(",
"len",
"(",
"defaults",
".",
"AUTH_KEEP_ALIVE_ACK",
")",
")",
"log",
".",
"debug",
"(",
"'Received %s from the keep-alive server'",
",",
"msg",
")",
"if",
"msg",
"!=",
"defaults",
".",
"AUTH_KEEP_ALIVE_ACK",
":",
"log",
".",
"error",
"(",
"'Received %s instead of %s form the auth keep-alive server'",
",",
"msg",
",",
"defaults",
".",
"AUTH_KEEP_ALIVE_ACK",
")",
"log",
".",
"error",
"(",
"'Re-init the SSL socket.'",
")",
"self",
".",
"reconnect",
"(",
")",
"time",
".",
"sleep",
"(",
"defaults",
".",
"AUTH_KEEP_ALIVE_INTERVAL",
")"
] | Send a keep alive request periodically to make sure that the server
is still alive. If not then try to reconnect. | [
"Send",
"a",
"keep",
"alive",
"request",
"periodically",
"to",
"make",
"sure",
"that",
"the",
"server",
"is",
"still",
"alive",
".",
"If",
"not",
"then",
"try",
"to",
"reconnect",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L69-L92 |
677 | napalm-automation/napalm-logs | napalm_logs/utils/__init__.py | ClientAuth.reconnect | def reconnect(self):
'''
Try to reconnect and re-authenticate with the server.
'''
log.debug('Closing the SSH socket.')
try:
self.ssl_skt.close()
except socket.error:
log.error('The socket seems to be closed already.')
log.debug('Re-opening the SSL socket.')
self.authenticate() | python | def reconnect(self):
'''
Try to reconnect and re-authenticate with the server.
'''
log.debug('Closing the SSH socket.')
try:
self.ssl_skt.close()
except socket.error:
log.error('The socket seems to be closed already.')
log.debug('Re-opening the SSL socket.')
self.authenticate() | [
"def",
"reconnect",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Closing the SSH socket.'",
")",
"try",
":",
"self",
".",
"ssl_skt",
".",
"close",
"(",
")",
"except",
"socket",
".",
"error",
":",
"log",
".",
"error",
"(",
"'The socket seems to be closed already.'",
")",
"log",
".",
"debug",
"(",
"'Re-opening the SSL socket.'",
")",
"self",
".",
"authenticate",
"(",
")"
] | Try to reconnect and re-authenticate with the server. | [
"Try",
"to",
"reconnect",
"and",
"re",
"-",
"authenticate",
"with",
"the",
"server",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L94-L104 |
678 | napalm-automation/napalm-logs | napalm_logs/utils/__init__.py | ClientAuth.authenticate | def authenticate(self):
'''
Authenticate the client and return the private
and signature keys.
Establish a connection through a secured socket,
then do the handshake using the napalm-logs
auth algorithm.
'''
log.debug('Authenticate to %s:%d, using the certificate %s',
self.address, self.port, self.certificate)
if ':' in self.address:
skt_ver = socket.AF_INET6
else:
skt_ver = socket.AF_INET
skt = socket.socket(skt_ver, socket.SOCK_STREAM)
self.ssl_skt = ssl.wrap_socket(skt,
ca_certs=self.certificate,
cert_reqs=ssl.CERT_REQUIRED)
try:
self.ssl_skt.connect((self.address, self.port))
self.auth_try_id = 0
except socket.error as err:
log.error('Unable to open the SSL socket.')
self.auth_try_id += 1
if not self.max_try or self.auth_try_id < self.max_try:
log.error('Trying to authenticate again in %d seconds', self.timeout)
time.sleep(self.timeout)
self.authenticate()
log.critical('Giving up, unable to authenticate to %s:%d using the certificate %s',
self.address, self.port, self.certificate)
raise ClientConnectException(err)
# Explicit INIT
self.ssl_skt.write(defaults.MAGIC_REQ)
# Receive the private key
private_key = self.ssl_skt.recv(defaults.BUFFER_SIZE)
# Send back explicit ACK
self.ssl_skt.write(defaults.MAGIC_ACK)
# Read the hex of the verification key
verify_key_hex = self.ssl_skt.recv(defaults.BUFFER_SIZE)
# Send back explicit ACK
self.ssl_skt.write(defaults.MAGIC_ACK)
self.priv_key = nacl.secret.SecretBox(private_key)
self.verify_key = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder) | python | def authenticate(self):
'''
Authenticate the client and return the private
and signature keys.
Establish a connection through a secured socket,
then do the handshake using the napalm-logs
auth algorithm.
'''
log.debug('Authenticate to %s:%d, using the certificate %s',
self.address, self.port, self.certificate)
if ':' in self.address:
skt_ver = socket.AF_INET6
else:
skt_ver = socket.AF_INET
skt = socket.socket(skt_ver, socket.SOCK_STREAM)
self.ssl_skt = ssl.wrap_socket(skt,
ca_certs=self.certificate,
cert_reqs=ssl.CERT_REQUIRED)
try:
self.ssl_skt.connect((self.address, self.port))
self.auth_try_id = 0
except socket.error as err:
log.error('Unable to open the SSL socket.')
self.auth_try_id += 1
if not self.max_try or self.auth_try_id < self.max_try:
log.error('Trying to authenticate again in %d seconds', self.timeout)
time.sleep(self.timeout)
self.authenticate()
log.critical('Giving up, unable to authenticate to %s:%d using the certificate %s',
self.address, self.port, self.certificate)
raise ClientConnectException(err)
# Explicit INIT
self.ssl_skt.write(defaults.MAGIC_REQ)
# Receive the private key
private_key = self.ssl_skt.recv(defaults.BUFFER_SIZE)
# Send back explicit ACK
self.ssl_skt.write(defaults.MAGIC_ACK)
# Read the hex of the verification key
verify_key_hex = self.ssl_skt.recv(defaults.BUFFER_SIZE)
# Send back explicit ACK
self.ssl_skt.write(defaults.MAGIC_ACK)
self.priv_key = nacl.secret.SecretBox(private_key)
self.verify_key = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder) | [
"def",
"authenticate",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Authenticate to %s:%d, using the certificate %s'",
",",
"self",
".",
"address",
",",
"self",
".",
"port",
",",
"self",
".",
"certificate",
")",
"if",
"':'",
"in",
"self",
".",
"address",
":",
"skt_ver",
"=",
"socket",
".",
"AF_INET6",
"else",
":",
"skt_ver",
"=",
"socket",
".",
"AF_INET",
"skt",
"=",
"socket",
".",
"socket",
"(",
"skt_ver",
",",
"socket",
".",
"SOCK_STREAM",
")",
"self",
".",
"ssl_skt",
"=",
"ssl",
".",
"wrap_socket",
"(",
"skt",
",",
"ca_certs",
"=",
"self",
".",
"certificate",
",",
"cert_reqs",
"=",
"ssl",
".",
"CERT_REQUIRED",
")",
"try",
":",
"self",
".",
"ssl_skt",
".",
"connect",
"(",
"(",
"self",
".",
"address",
",",
"self",
".",
"port",
")",
")",
"self",
".",
"auth_try_id",
"=",
"0",
"except",
"socket",
".",
"error",
"as",
"err",
":",
"log",
".",
"error",
"(",
"'Unable to open the SSL socket.'",
")",
"self",
".",
"auth_try_id",
"+=",
"1",
"if",
"not",
"self",
".",
"max_try",
"or",
"self",
".",
"auth_try_id",
"<",
"self",
".",
"max_try",
":",
"log",
".",
"error",
"(",
"'Trying to authenticate again in %d seconds'",
",",
"self",
".",
"timeout",
")",
"time",
".",
"sleep",
"(",
"self",
".",
"timeout",
")",
"self",
".",
"authenticate",
"(",
")",
"log",
".",
"critical",
"(",
"'Giving up, unable to authenticate to %s:%d using the certificate %s'",
",",
"self",
".",
"address",
",",
"self",
".",
"port",
",",
"self",
".",
"certificate",
")",
"raise",
"ClientConnectException",
"(",
"err",
")",
"# Explicit INIT",
"self",
".",
"ssl_skt",
".",
"write",
"(",
"defaults",
".",
"MAGIC_REQ",
")",
"# Receive the private key",
"private_key",
"=",
"self",
".",
"ssl_skt",
".",
"recv",
"(",
"defaults",
".",
"BUFFER_SIZE",
")",
"# Send back explicit ACK",
"self",
".",
"ssl_skt",
".",
"write",
"(",
"defaults",
".",
"MAGIC_ACK",
")",
"# Read the hex of the verification key",
"verify_key_hex",
"=",
"self",
".",
"ssl_skt",
".",
"recv",
"(",
"defaults",
".",
"BUFFER_SIZE",
")",
"# Send back explicit ACK",
"self",
".",
"ssl_skt",
".",
"write",
"(",
"defaults",
".",
"MAGIC_ACK",
")",
"self",
".",
"priv_key",
"=",
"nacl",
".",
"secret",
".",
"SecretBox",
"(",
"private_key",
")",
"self",
".",
"verify_key",
"=",
"nacl",
".",
"signing",
".",
"VerifyKey",
"(",
"verify_key_hex",
",",
"encoder",
"=",
"nacl",
".",
"encoding",
".",
"HexEncoder",
")"
] | Authenticate the client and return the private
and signature keys.
Establish a connection through a secured socket,
then do the handshake using the napalm-logs
auth algorithm. | [
"Authenticate",
"the",
"client",
"and",
"return",
"the",
"private",
"and",
"signature",
"keys",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L106-L150 |
679 | napalm-automation/napalm-logs | napalm_logs/utils/__init__.py | ClientAuth.decrypt | def decrypt(self, binary):
'''
Decrypt and unpack the original OpenConfig object,
serialized using MessagePack.
Raise BadSignatureException when the signature
was forged or corrupted.
'''
try:
encrypted = self.verify_key.verify(binary)
except BadSignatureError:
log.error('Signature was forged or corrupt', exc_info=True)
raise BadSignatureException('Signature was forged or corrupt')
try:
packed = self.priv_key.decrypt(encrypted)
except CryptoError:
log.error('Unable to decrypt', exc_info=True)
raise CryptoException('Unable to decrypt')
return umsgpack.unpackb(packed) | python | def decrypt(self, binary):
'''
Decrypt and unpack the original OpenConfig object,
serialized using MessagePack.
Raise BadSignatureException when the signature
was forged or corrupted.
'''
try:
encrypted = self.verify_key.verify(binary)
except BadSignatureError:
log.error('Signature was forged or corrupt', exc_info=True)
raise BadSignatureException('Signature was forged or corrupt')
try:
packed = self.priv_key.decrypt(encrypted)
except CryptoError:
log.error('Unable to decrypt', exc_info=True)
raise CryptoException('Unable to decrypt')
return umsgpack.unpackb(packed) | [
"def",
"decrypt",
"(",
"self",
",",
"binary",
")",
":",
"try",
":",
"encrypted",
"=",
"self",
".",
"verify_key",
".",
"verify",
"(",
"binary",
")",
"except",
"BadSignatureError",
":",
"log",
".",
"error",
"(",
"'Signature was forged or corrupt'",
",",
"exc_info",
"=",
"True",
")",
"raise",
"BadSignatureException",
"(",
"'Signature was forged or corrupt'",
")",
"try",
":",
"packed",
"=",
"self",
".",
"priv_key",
".",
"decrypt",
"(",
"encrypted",
")",
"except",
"CryptoError",
":",
"log",
".",
"error",
"(",
"'Unable to decrypt'",
",",
"exc_info",
"=",
"True",
")",
"raise",
"CryptoException",
"(",
"'Unable to decrypt'",
")",
"return",
"umsgpack",
".",
"unpackb",
"(",
"packed",
")"
] | Decrypt and unpack the original OpenConfig object,
serialized using MessagePack.
Raise BadSignatureException when the signature
was forged or corrupted. | [
"Decrypt",
"and",
"unpack",
"the",
"original",
"OpenConfig",
"object",
"serialized",
"using",
"MessagePack",
".",
"Raise",
"BadSignatureException",
"when",
"the",
"signature",
"was",
"forged",
"or",
"corrupted",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/utils/__init__.py#L152-L169 |
680 | napalm-automation/napalm-logs | napalm_logs/listener/tcp.py | TCPListener._client_connection | def _client_connection(self, conn, addr):
'''
Handle the connecition with one client.
'''
log.debug('Established connection with %s:%d', addr[0], addr[1])
conn.settimeout(self.socket_timeout)
try:
while self.__up:
msg = conn.recv(self.buffer_size)
if not msg:
# log.debug('Received empty message from %s', addr)
# disabled ^ as it was too noisy
continue
log.debug('[%s] Received %s from %s. Adding in the queue', time.time(), msg, addr)
self.buffer.put((msg, '{}:{}'.format(addr[0], addr[1])))
except socket.timeout:
if not self.__up:
return
log.debug('Connection %s:%d timed out', addr[1], addr[0])
raise ListenerException('Connection %s:%d timed out' % addr)
finally:
log.debug('Closing connection with %s', addr)
conn.close() | python | def _client_connection(self, conn, addr):
'''
Handle the connecition with one client.
'''
log.debug('Established connection with %s:%d', addr[0], addr[1])
conn.settimeout(self.socket_timeout)
try:
while self.__up:
msg = conn.recv(self.buffer_size)
if not msg:
# log.debug('Received empty message from %s', addr)
# disabled ^ as it was too noisy
continue
log.debug('[%s] Received %s from %s. Adding in the queue', time.time(), msg, addr)
self.buffer.put((msg, '{}:{}'.format(addr[0], addr[1])))
except socket.timeout:
if not self.__up:
return
log.debug('Connection %s:%d timed out', addr[1], addr[0])
raise ListenerException('Connection %s:%d timed out' % addr)
finally:
log.debug('Closing connection with %s', addr)
conn.close() | [
"def",
"_client_connection",
"(",
"self",
",",
"conn",
",",
"addr",
")",
":",
"log",
".",
"debug",
"(",
"'Established connection with %s:%d'",
",",
"addr",
"[",
"0",
"]",
",",
"addr",
"[",
"1",
"]",
")",
"conn",
".",
"settimeout",
"(",
"self",
".",
"socket_timeout",
")",
"try",
":",
"while",
"self",
".",
"__up",
":",
"msg",
"=",
"conn",
".",
"recv",
"(",
"self",
".",
"buffer_size",
")",
"if",
"not",
"msg",
":",
"# log.debug('Received empty message from %s', addr)",
"# disabled ^ as it was too noisy",
"continue",
"log",
".",
"debug",
"(",
"'[%s] Received %s from %s. Adding in the queue'",
",",
"time",
".",
"time",
"(",
")",
",",
"msg",
",",
"addr",
")",
"self",
".",
"buffer",
".",
"put",
"(",
"(",
"msg",
",",
"'{}:{}'",
".",
"format",
"(",
"addr",
"[",
"0",
"]",
",",
"addr",
"[",
"1",
"]",
")",
")",
")",
"except",
"socket",
".",
"timeout",
":",
"if",
"not",
"self",
".",
"__up",
":",
"return",
"log",
".",
"debug",
"(",
"'Connection %s:%d timed out'",
",",
"addr",
"[",
"1",
"]",
",",
"addr",
"[",
"0",
"]",
")",
"raise",
"ListenerException",
"(",
"'Connection %s:%d timed out'",
"%",
"addr",
")",
"finally",
":",
"log",
".",
"debug",
"(",
"'Closing connection with %s'",
",",
"addr",
")",
"conn",
".",
"close",
"(",
")"
] | Handle the connecition with one client. | [
"Handle",
"the",
"connecition",
"with",
"one",
"client",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L53-L75 |
681 | napalm-automation/napalm-logs | napalm_logs/listener/tcp.py | TCPListener._serve_clients | def _serve_clients(self):
'''
Accept cients and serve, one separate thread per client.
'''
self.__up = True
while self.__up:
log.debug('Waiting for a client to connect')
try:
conn, addr = self.skt.accept()
log.debug('Received connection from %s:%d', addr[0], addr[1])
except socket.error as error:
if not self.__up:
return
msg = 'Received listener socket error: {}'.format(error)
log.error(msg, exc_info=True)
raise ListenerException(msg)
client_thread = threading.Thread(target=self._client_connection, args=(conn, addr,))
client_thread.start() | python | def _serve_clients(self):
'''
Accept cients and serve, one separate thread per client.
'''
self.__up = True
while self.__up:
log.debug('Waiting for a client to connect')
try:
conn, addr = self.skt.accept()
log.debug('Received connection from %s:%d', addr[0], addr[1])
except socket.error as error:
if not self.__up:
return
msg = 'Received listener socket error: {}'.format(error)
log.error(msg, exc_info=True)
raise ListenerException(msg)
client_thread = threading.Thread(target=self._client_connection, args=(conn, addr,))
client_thread.start() | [
"def",
"_serve_clients",
"(",
"self",
")",
":",
"self",
".",
"__up",
"=",
"True",
"while",
"self",
".",
"__up",
":",
"log",
".",
"debug",
"(",
"'Waiting for a client to connect'",
")",
"try",
":",
"conn",
",",
"addr",
"=",
"self",
".",
"skt",
".",
"accept",
"(",
")",
"log",
".",
"debug",
"(",
"'Received connection from %s:%d'",
",",
"addr",
"[",
"0",
"]",
",",
"addr",
"[",
"1",
"]",
")",
"except",
"socket",
".",
"error",
"as",
"error",
":",
"if",
"not",
"self",
".",
"__up",
":",
"return",
"msg",
"=",
"'Received listener socket error: {}'",
".",
"format",
"(",
"error",
")",
"log",
".",
"error",
"(",
"msg",
",",
"exc_info",
"=",
"True",
")",
"raise",
"ListenerException",
"(",
"msg",
")",
"client_thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_client_connection",
",",
"args",
"=",
"(",
"conn",
",",
"addr",
",",
")",
")",
"client_thread",
".",
"start",
"(",
")"
] | Accept cients and serve, one separate thread per client. | [
"Accept",
"cients",
"and",
"serve",
"one",
"separate",
"thread",
"per",
"client",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L77-L94 |
682 | napalm-automation/napalm-logs | napalm_logs/listener/tcp.py | TCPListener.start | def start(self):
'''
Start listening for messages.
'''
log.debug('Creating the TCP server')
if ':' in self.address:
self.skt = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.reuse_port:
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'SO_REUSEPORT'):
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
else:
log.error('SO_REUSEPORT not supported')
try:
self.skt.bind((self.address, int(self.port)))
except socket.error as msg:
error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg)
log.error(error_string, exc_info=True)
raise BindException(error_string)
log.debug('Accepting max %d parallel connections', self.max_clients)
self.skt.listen(self.max_clients)
self.thread_serve = threading.Thread(target=self._serve_clients)
self.thread_serve.start() | python | def start(self):
'''
Start listening for messages.
'''
log.debug('Creating the TCP server')
if ':' in self.address:
self.skt = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
self.skt = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if self.reuse_port:
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'SO_REUSEPORT'):
self.skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
else:
log.error('SO_REUSEPORT not supported')
try:
self.skt.bind((self.address, int(self.port)))
except socket.error as msg:
error_string = 'Unable to bind to port {} on {}: {}'.format(self.port, self.address, msg)
log.error(error_string, exc_info=True)
raise BindException(error_string)
log.debug('Accepting max %d parallel connections', self.max_clients)
self.skt.listen(self.max_clients)
self.thread_serve = threading.Thread(target=self._serve_clients)
self.thread_serve.start() | [
"def",
"start",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Creating the TCP server'",
")",
"if",
"':'",
"in",
"self",
".",
"address",
":",
"self",
".",
"skt",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET6",
",",
"socket",
".",
"SOCK_STREAM",
")",
"else",
":",
"self",
".",
"skt",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_STREAM",
")",
"if",
"self",
".",
"reuse_port",
":",
"self",
".",
"skt",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEADDR",
",",
"1",
")",
"if",
"hasattr",
"(",
"socket",
",",
"'SO_REUSEPORT'",
")",
":",
"self",
".",
"skt",
".",
"setsockopt",
"(",
"socket",
".",
"SOL_SOCKET",
",",
"socket",
".",
"SO_REUSEPORT",
",",
"1",
")",
"else",
":",
"log",
".",
"error",
"(",
"'SO_REUSEPORT not supported'",
")",
"try",
":",
"self",
".",
"skt",
".",
"bind",
"(",
"(",
"self",
".",
"address",
",",
"int",
"(",
"self",
".",
"port",
")",
")",
")",
"except",
"socket",
".",
"error",
"as",
"msg",
":",
"error_string",
"=",
"'Unable to bind to port {} on {}: {}'",
".",
"format",
"(",
"self",
".",
"port",
",",
"self",
".",
"address",
",",
"msg",
")",
"log",
".",
"error",
"(",
"error_string",
",",
"exc_info",
"=",
"True",
")",
"raise",
"BindException",
"(",
"error_string",
")",
"log",
".",
"debug",
"(",
"'Accepting max %d parallel connections'",
",",
"self",
".",
"max_clients",
")",
"self",
".",
"skt",
".",
"listen",
"(",
"self",
".",
"max_clients",
")",
"self",
".",
"thread_serve",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_serve_clients",
")",
"self",
".",
"thread_serve",
".",
"start",
"(",
")"
] | Start listening for messages. | [
"Start",
"listening",
"for",
"messages",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L96-L120 |
683 | napalm-automation/napalm-logs | napalm_logs/listener/tcp.py | TCPListener.receive | def receive(self):
'''
Return one message dequeued from the listen buffer.
'''
while self.buffer.empty() and self.__up:
# This sequence is skipped when the buffer is not empty.
sleep_ms = random.randint(0, 1000)
# log.debug('The message queue is empty, waiting %d miliseconds', sleep_ms)
# disabled ^ as it was too noisy
time.sleep(sleep_ms / 1000.0)
if not self.buffer.empty():
return self.buffer.get(block=False)
return '', '' | python | def receive(self):
'''
Return one message dequeued from the listen buffer.
'''
while self.buffer.empty() and self.__up:
# This sequence is skipped when the buffer is not empty.
sleep_ms = random.randint(0, 1000)
# log.debug('The message queue is empty, waiting %d miliseconds', sleep_ms)
# disabled ^ as it was too noisy
time.sleep(sleep_ms / 1000.0)
if not self.buffer.empty():
return self.buffer.get(block=False)
return '', '' | [
"def",
"receive",
"(",
"self",
")",
":",
"while",
"self",
".",
"buffer",
".",
"empty",
"(",
")",
"and",
"self",
".",
"__up",
":",
"# This sequence is skipped when the buffer is not empty.",
"sleep_ms",
"=",
"random",
".",
"randint",
"(",
"0",
",",
"1000",
")",
"# log.debug('The message queue is empty, waiting %d miliseconds', sleep_ms)",
"# disabled ^ as it was too noisy",
"time",
".",
"sleep",
"(",
"sleep_ms",
"/",
"1000.0",
")",
"if",
"not",
"self",
".",
"buffer",
".",
"empty",
"(",
")",
":",
"return",
"self",
".",
"buffer",
".",
"get",
"(",
"block",
"=",
"False",
")",
"return",
"''",
",",
"''"
] | Return one message dequeued from the listen buffer. | [
"Return",
"one",
"message",
"dequeued",
"from",
"the",
"listen",
"buffer",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L122-L134 |
684 | napalm-automation/napalm-logs | napalm_logs/listener/tcp.py | TCPListener.stop | def stop(self):
'''
Closing the socket.
'''
log.info('Stopping the TCP listener')
self.__up = False
try:
self.skt.shutdown(socket.SHUT_RDWR)
except socket.error:
log.error('The following error may not be critical:', exc_info=True)
self.skt.close() | python | def stop(self):
'''
Closing the socket.
'''
log.info('Stopping the TCP listener')
self.__up = False
try:
self.skt.shutdown(socket.SHUT_RDWR)
except socket.error:
log.error('The following error may not be critical:', exc_info=True)
self.skt.close() | [
"def",
"stop",
"(",
"self",
")",
":",
"log",
".",
"info",
"(",
"'Stopping the TCP listener'",
")",
"self",
".",
"__up",
"=",
"False",
"try",
":",
"self",
".",
"skt",
".",
"shutdown",
"(",
"socket",
".",
"SHUT_RDWR",
")",
"except",
"socket",
".",
"error",
":",
"log",
".",
"error",
"(",
"'The following error may not be critical:'",
",",
"exc_info",
"=",
"True",
")",
"self",
".",
"skt",
".",
"close",
"(",
")"
] | Closing the socket. | [
"Closing",
"the",
"socket",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener/tcp.py#L136-L146 |
685 | napalm-automation/napalm-logs | napalm_logs/listener_proc.py | NapalmLogsListenerProc._setup_ipc | def _setup_ipc(self):
'''
Setup the listener ICP pusher.
'''
log.debug('Setting up the listener IPC pusher')
self.ctx = zmq.Context()
self.pub = self.ctx.socket(zmq.PUSH)
self.pub.connect(LST_IPC_URL)
log.debug('Setting HWM for the listener: %d', self.opts['hwm'])
try:
self.pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) | python | def _setup_ipc(self):
'''
Setup the listener ICP pusher.
'''
log.debug('Setting up the listener IPC pusher')
self.ctx = zmq.Context()
self.pub = self.ctx.socket(zmq.PUSH)
self.pub.connect(LST_IPC_URL)
log.debug('Setting HWM for the listener: %d', self.opts['hwm'])
try:
self.pub.setsockopt(zmq.HWM, self.opts['hwm'])
# zmq 2
except AttributeError:
# zmq 3
self.pub.setsockopt(zmq.SNDHWM, self.opts['hwm']) | [
"def",
"_setup_ipc",
"(",
"self",
")",
":",
"log",
".",
"debug",
"(",
"'Setting up the listener IPC pusher'",
")",
"self",
".",
"ctx",
"=",
"zmq",
".",
"Context",
"(",
")",
"self",
".",
"pub",
"=",
"self",
".",
"ctx",
".",
"socket",
"(",
"zmq",
".",
"PUSH",
")",
"self",
".",
"pub",
".",
"connect",
"(",
"LST_IPC_URL",
")",
"log",
".",
"debug",
"(",
"'Setting HWM for the listener: %d'",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"try",
":",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"HWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")",
"# zmq 2",
"except",
"AttributeError",
":",
"# zmq 3",
"self",
".",
"pub",
".",
"setsockopt",
"(",
"zmq",
".",
"SNDHWM",
",",
"self",
".",
"opts",
"[",
"'hwm'",
"]",
")"
] | Setup the listener ICP pusher. | [
"Setup",
"the",
"listener",
"ICP",
"pusher",
"."
] | 4b89100a6e4f994aa004f3ea42a06dc803a7ccb0 | https://github.com/napalm-automation/napalm-logs/blob/4b89100a6e4f994aa004f3ea42a06dc803a7ccb0/napalm_logs/listener_proc.py#L62-L76 |
686 | poppy-project/pypot | pypot/utils/stoppablethread.py | make_update_loop | def make_update_loop(thread, update_func):
""" Makes a run loop which calls an update function at a predefined frequency. """
while not thread.should_stop():
if thread.should_pause():
thread.wait_to_resume()
start = time.time()
if hasattr(thread, '_updated'):
thread._updated.clear()
update_func()
if hasattr(thread, '_updated'):
thread._updated.set()
end = time.time()
dt = thread.period - (end - start)
if dt > 0:
time.sleep(dt) | python | def make_update_loop(thread, update_func):
""" Makes a run loop which calls an update function at a predefined frequency. """
while not thread.should_stop():
if thread.should_pause():
thread.wait_to_resume()
start = time.time()
if hasattr(thread, '_updated'):
thread._updated.clear()
update_func()
if hasattr(thread, '_updated'):
thread._updated.set()
end = time.time()
dt = thread.period - (end - start)
if dt > 0:
time.sleep(dt) | [
"def",
"make_update_loop",
"(",
"thread",
",",
"update_func",
")",
":",
"while",
"not",
"thread",
".",
"should_stop",
"(",
")",
":",
"if",
"thread",
".",
"should_pause",
"(",
")",
":",
"thread",
".",
"wait_to_resume",
"(",
")",
"start",
"=",
"time",
".",
"time",
"(",
")",
"if",
"hasattr",
"(",
"thread",
",",
"'_updated'",
")",
":",
"thread",
".",
"_updated",
".",
"clear",
"(",
")",
"update_func",
"(",
")",
"if",
"hasattr",
"(",
"thread",
",",
"'_updated'",
")",
":",
"thread",
".",
"_updated",
".",
"set",
"(",
")",
"end",
"=",
"time",
".",
"time",
"(",
")",
"dt",
"=",
"thread",
".",
"period",
"-",
"(",
"end",
"-",
"start",
")",
"if",
"dt",
">",
"0",
":",
"time",
".",
"sleep",
"(",
"dt",
")"
] | Makes a run loop which calls an update function at a predefined frequency. | [
"Makes",
"a",
"run",
"loop",
"which",
"calls",
"an",
"update",
"function",
"at",
"a",
"predefined",
"frequency",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/utils/stoppablethread.py#L166-L183 |
687 | poppy-project/pypot | pypot/utils/stoppablethread.py | StoppableThread.start | def start(self):
""" Start the run method as a new thread.
It will first stop the thread if it is already running.
"""
if self.running:
self.stop()
self._thread = threading.Thread(target=self._wrapped_target)
self._thread.daemon = True
self._thread.start() | python | def start(self):
""" Start the run method as a new thread.
It will first stop the thread if it is already running.
"""
if self.running:
self.stop()
self._thread = threading.Thread(target=self._wrapped_target)
self._thread.daemon = True
self._thread.start() | [
"def",
"start",
"(",
"self",
")",
":",
"if",
"self",
".",
"running",
":",
"self",
".",
"stop",
"(",
")",
"self",
".",
"_thread",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_wrapped_target",
")",
"self",
".",
"_thread",
".",
"daemon",
"=",
"True",
"self",
".",
"_thread",
".",
"start",
"(",
")"
] | Start the run method as a new thread.
It will first stop the thread if it is already running. | [
"Start",
"the",
"run",
"method",
"as",
"a",
"new",
"thread",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/utils/stoppablethread.py#L33-L44 |
688 | poppy-project/pypot | pypot/utils/stoppablethread.py | StoppableThread.wait_to_start | def wait_to_start(self, allow_failure=False):
""" Wait for the thread to actually starts. """
self._started.wait()
if self._crashed and not allow_failure:
self._thread.join()
raise RuntimeError('Setup failed, see {} Traceback'
'for details.'.format(self._thread.name)) | python | def wait_to_start(self, allow_failure=False):
""" Wait for the thread to actually starts. """
self._started.wait()
if self._crashed and not allow_failure:
self._thread.join()
raise RuntimeError('Setup failed, see {} Traceback'
'for details.'.format(self._thread.name)) | [
"def",
"wait_to_start",
"(",
"self",
",",
"allow_failure",
"=",
"False",
")",
":",
"self",
".",
"_started",
".",
"wait",
"(",
")",
"if",
"self",
".",
"_crashed",
"and",
"not",
"allow_failure",
":",
"self",
".",
"_thread",
".",
"join",
"(",
")",
"raise",
"RuntimeError",
"(",
"'Setup failed, see {} Traceback'",
"'for details.'",
".",
"format",
"(",
"self",
".",
"_thread",
".",
"name",
")",
")"
] | Wait for the thread to actually starts. | [
"Wait",
"for",
"the",
"thread",
"to",
"actually",
"starts",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/utils/stoppablethread.py#L82-L89 |
689 | poppy-project/pypot | pypot/vrep/__init__.py | from_vrep | def from_vrep(config, vrep_host='127.0.0.1', vrep_port=19997, scene=None,
tracked_objects=[], tracked_collisions=[],
id=None, shared_vrep_io=None):
""" Create a robot from a V-REP instance.
:param config: robot configuration (either the path to the json or directly the dictionary)
:type config: str or dict
:param str vrep_host: host of the V-REP server
:param int vrep_port: port of the V-REP server
:param str scene: path to the V-REP scene to load and start
:param list tracked_objects: list of V-REP dummy object to track
:param list tracked_collisions: list of V-REP collision to track
:param int id: robot id in simulator (useful when using a scene with multiple robots)
:param vrep_io: use an already connected VrepIO (useful when using a scene with multiple robots)
:type vrep_io: :class:`~pypot.vrep.io.VrepIO`
This function tries to connect to a V-REP instance and expects to find motors with names corresponding as the ones found in the config.
.. note:: The :class:`~pypot.robot.robot.Robot` returned will also provide a convenience reset_simulation method which resets the simulation and the robot position to its intial stance.
.. note:: Using the same configuration, you should be able to switch from a real to a simulated robot just by switching from :func:`~pypot.robot.config.from_config` to :func:`~pypot.vrep.from_vrep`.
For instance::
import json
with open('my_config.json') as f:
config = json.load(f)
from pypot.robot import from_config
from pypot.vrep import from_vrep
real_robot = from_config(config)
simulated_robot = from_vrep(config, '127.0.0.1', 19997, 'poppy.ttt')
"""
if shared_vrep_io is None:
vrep_io = VrepIO(vrep_host, vrep_port)
else:
vrep_io = shared_vrep_io
vreptime = vrep_time(vrep_io)
pypot_time.time = vreptime.get_time
pypot_time.sleep = vreptime.sleep
if isinstance(config, basestring):
with open(config) as f:
config = json.load(f, object_pairs_hook=OrderedDict)
motors = [motor_from_confignode(config, name)
for name in config['motors'].keys()]
vc = VrepController(vrep_io, scene, motors, id=id)
vc._init_vrep_streaming()
sensor_controllers = []
if tracked_objects:
sensors = [ObjectTracker(name) for name in tracked_objects]
vot = VrepObjectTracker(vrep_io, sensors)
sensor_controllers.append(vot)
if tracked_collisions:
sensors = [VrepCollisionDetector(name) for name in tracked_collisions]
vct = VrepCollisionTracker(vrep_io, sensors)
sensor_controllers.append(vct)
robot = Robot(motor_controllers=[vc],
sensor_controllers=sensor_controllers)
for m in robot.motors:
m.goto_behavior = 'minjerk'
init_pos = {m: m.goal_position for m in robot.motors}
make_alias(config, robot)
def start_simu():
vrep_io.start_simulation()
for m, p in init_pos.iteritems():
m.goal_position = p
vc.start()
if tracked_objects:
vot.start()
if tracked_collisions:
vct.start()
while vrep_io.get_simulation_current_time() < 1.:
sys_time.sleep(0.1)
def stop_simu():
if tracked_objects:
vot.stop()
if tracked_collisions:
vct.stop()
vc.stop()
vrep_io.stop_simulation()
def reset_simu():
stop_simu()
sys_time.sleep(0.5)
start_simu()
robot.start_simulation = start_simu
robot.stop_simulation = stop_simu
robot.reset_simulation = reset_simu
def current_simulation_time(robot):
return robot._controllers[0].io.get_simulation_current_time()
Robot.current_simulation_time = property(lambda robot: current_simulation_time(robot))
def get_object_position(robot, object, relative_to_object=None):
return vrep_io.get_object_position(object, relative_to_object)
Robot.get_object_position = partial(get_object_position, robot)
def get_object_orientation(robot, object, relative_to_object=None):
return vrep_io.get_object_orientation(object, relative_to_object)
Robot.get_object_orientation = partial(get_object_orientation, robot)
return robot | python | def from_vrep(config, vrep_host='127.0.0.1', vrep_port=19997, scene=None,
tracked_objects=[], tracked_collisions=[],
id=None, shared_vrep_io=None):
""" Create a robot from a V-REP instance.
:param config: robot configuration (either the path to the json or directly the dictionary)
:type config: str or dict
:param str vrep_host: host of the V-REP server
:param int vrep_port: port of the V-REP server
:param str scene: path to the V-REP scene to load and start
:param list tracked_objects: list of V-REP dummy object to track
:param list tracked_collisions: list of V-REP collision to track
:param int id: robot id in simulator (useful when using a scene with multiple robots)
:param vrep_io: use an already connected VrepIO (useful when using a scene with multiple robots)
:type vrep_io: :class:`~pypot.vrep.io.VrepIO`
This function tries to connect to a V-REP instance and expects to find motors with names corresponding as the ones found in the config.
.. note:: The :class:`~pypot.robot.robot.Robot` returned will also provide a convenience reset_simulation method which resets the simulation and the robot position to its intial stance.
.. note:: Using the same configuration, you should be able to switch from a real to a simulated robot just by switching from :func:`~pypot.robot.config.from_config` to :func:`~pypot.vrep.from_vrep`.
For instance::
import json
with open('my_config.json') as f:
config = json.load(f)
from pypot.robot import from_config
from pypot.vrep import from_vrep
real_robot = from_config(config)
simulated_robot = from_vrep(config, '127.0.0.1', 19997, 'poppy.ttt')
"""
if shared_vrep_io is None:
vrep_io = VrepIO(vrep_host, vrep_port)
else:
vrep_io = shared_vrep_io
vreptime = vrep_time(vrep_io)
pypot_time.time = vreptime.get_time
pypot_time.sleep = vreptime.sleep
if isinstance(config, basestring):
with open(config) as f:
config = json.load(f, object_pairs_hook=OrderedDict)
motors = [motor_from_confignode(config, name)
for name in config['motors'].keys()]
vc = VrepController(vrep_io, scene, motors, id=id)
vc._init_vrep_streaming()
sensor_controllers = []
if tracked_objects:
sensors = [ObjectTracker(name) for name in tracked_objects]
vot = VrepObjectTracker(vrep_io, sensors)
sensor_controllers.append(vot)
if tracked_collisions:
sensors = [VrepCollisionDetector(name) for name in tracked_collisions]
vct = VrepCollisionTracker(vrep_io, sensors)
sensor_controllers.append(vct)
robot = Robot(motor_controllers=[vc],
sensor_controllers=sensor_controllers)
for m in robot.motors:
m.goto_behavior = 'minjerk'
init_pos = {m: m.goal_position for m in robot.motors}
make_alias(config, robot)
def start_simu():
vrep_io.start_simulation()
for m, p in init_pos.iteritems():
m.goal_position = p
vc.start()
if tracked_objects:
vot.start()
if tracked_collisions:
vct.start()
while vrep_io.get_simulation_current_time() < 1.:
sys_time.sleep(0.1)
def stop_simu():
if tracked_objects:
vot.stop()
if tracked_collisions:
vct.stop()
vc.stop()
vrep_io.stop_simulation()
def reset_simu():
stop_simu()
sys_time.sleep(0.5)
start_simu()
robot.start_simulation = start_simu
robot.stop_simulation = stop_simu
robot.reset_simulation = reset_simu
def current_simulation_time(robot):
return robot._controllers[0].io.get_simulation_current_time()
Robot.current_simulation_time = property(lambda robot: current_simulation_time(robot))
def get_object_position(robot, object, relative_to_object=None):
return vrep_io.get_object_position(object, relative_to_object)
Robot.get_object_position = partial(get_object_position, robot)
def get_object_orientation(robot, object, relative_to_object=None):
return vrep_io.get_object_orientation(object, relative_to_object)
Robot.get_object_orientation = partial(get_object_orientation, robot)
return robot | [
"def",
"from_vrep",
"(",
"config",
",",
"vrep_host",
"=",
"'127.0.0.1'",
",",
"vrep_port",
"=",
"19997",
",",
"scene",
"=",
"None",
",",
"tracked_objects",
"=",
"[",
"]",
",",
"tracked_collisions",
"=",
"[",
"]",
",",
"id",
"=",
"None",
",",
"shared_vrep_io",
"=",
"None",
")",
":",
"if",
"shared_vrep_io",
"is",
"None",
":",
"vrep_io",
"=",
"VrepIO",
"(",
"vrep_host",
",",
"vrep_port",
")",
"else",
":",
"vrep_io",
"=",
"shared_vrep_io",
"vreptime",
"=",
"vrep_time",
"(",
"vrep_io",
")",
"pypot_time",
".",
"time",
"=",
"vreptime",
".",
"get_time",
"pypot_time",
".",
"sleep",
"=",
"vreptime",
".",
"sleep",
"if",
"isinstance",
"(",
"config",
",",
"basestring",
")",
":",
"with",
"open",
"(",
"config",
")",
"as",
"f",
":",
"config",
"=",
"json",
".",
"load",
"(",
"f",
",",
"object_pairs_hook",
"=",
"OrderedDict",
")",
"motors",
"=",
"[",
"motor_from_confignode",
"(",
"config",
",",
"name",
")",
"for",
"name",
"in",
"config",
"[",
"'motors'",
"]",
".",
"keys",
"(",
")",
"]",
"vc",
"=",
"VrepController",
"(",
"vrep_io",
",",
"scene",
",",
"motors",
",",
"id",
"=",
"id",
")",
"vc",
".",
"_init_vrep_streaming",
"(",
")",
"sensor_controllers",
"=",
"[",
"]",
"if",
"tracked_objects",
":",
"sensors",
"=",
"[",
"ObjectTracker",
"(",
"name",
")",
"for",
"name",
"in",
"tracked_objects",
"]",
"vot",
"=",
"VrepObjectTracker",
"(",
"vrep_io",
",",
"sensors",
")",
"sensor_controllers",
".",
"append",
"(",
"vot",
")",
"if",
"tracked_collisions",
":",
"sensors",
"=",
"[",
"VrepCollisionDetector",
"(",
"name",
")",
"for",
"name",
"in",
"tracked_collisions",
"]",
"vct",
"=",
"VrepCollisionTracker",
"(",
"vrep_io",
",",
"sensors",
")",
"sensor_controllers",
".",
"append",
"(",
"vct",
")",
"robot",
"=",
"Robot",
"(",
"motor_controllers",
"=",
"[",
"vc",
"]",
",",
"sensor_controllers",
"=",
"sensor_controllers",
")",
"for",
"m",
"in",
"robot",
".",
"motors",
":",
"m",
".",
"goto_behavior",
"=",
"'minjerk'",
"init_pos",
"=",
"{",
"m",
":",
"m",
".",
"goal_position",
"for",
"m",
"in",
"robot",
".",
"motors",
"}",
"make_alias",
"(",
"config",
",",
"robot",
")",
"def",
"start_simu",
"(",
")",
":",
"vrep_io",
".",
"start_simulation",
"(",
")",
"for",
"m",
",",
"p",
"in",
"init_pos",
".",
"iteritems",
"(",
")",
":",
"m",
".",
"goal_position",
"=",
"p",
"vc",
".",
"start",
"(",
")",
"if",
"tracked_objects",
":",
"vot",
".",
"start",
"(",
")",
"if",
"tracked_collisions",
":",
"vct",
".",
"start",
"(",
")",
"while",
"vrep_io",
".",
"get_simulation_current_time",
"(",
")",
"<",
"1.",
":",
"sys_time",
".",
"sleep",
"(",
"0.1",
")",
"def",
"stop_simu",
"(",
")",
":",
"if",
"tracked_objects",
":",
"vot",
".",
"stop",
"(",
")",
"if",
"tracked_collisions",
":",
"vct",
".",
"stop",
"(",
")",
"vc",
".",
"stop",
"(",
")",
"vrep_io",
".",
"stop_simulation",
"(",
")",
"def",
"reset_simu",
"(",
")",
":",
"stop_simu",
"(",
")",
"sys_time",
".",
"sleep",
"(",
"0.5",
")",
"start_simu",
"(",
")",
"robot",
".",
"start_simulation",
"=",
"start_simu",
"robot",
".",
"stop_simulation",
"=",
"stop_simu",
"robot",
".",
"reset_simulation",
"=",
"reset_simu",
"def",
"current_simulation_time",
"(",
"robot",
")",
":",
"return",
"robot",
".",
"_controllers",
"[",
"0",
"]",
".",
"io",
".",
"get_simulation_current_time",
"(",
")",
"Robot",
".",
"current_simulation_time",
"=",
"property",
"(",
"lambda",
"robot",
":",
"current_simulation_time",
"(",
"robot",
")",
")",
"def",
"get_object_position",
"(",
"robot",
",",
"object",
",",
"relative_to_object",
"=",
"None",
")",
":",
"return",
"vrep_io",
".",
"get_object_position",
"(",
"object",
",",
"relative_to_object",
")",
"Robot",
".",
"get_object_position",
"=",
"partial",
"(",
"get_object_position",
",",
"robot",
")",
"def",
"get_object_orientation",
"(",
"robot",
",",
"object",
",",
"relative_to_object",
"=",
"None",
")",
":",
"return",
"vrep_io",
".",
"get_object_orientation",
"(",
"object",
",",
"relative_to_object",
")",
"Robot",
".",
"get_object_orientation",
"=",
"partial",
"(",
"get_object_orientation",
",",
"robot",
")",
"return",
"robot"
] | Create a robot from a V-REP instance.
:param config: robot configuration (either the path to the json or directly the dictionary)
:type config: str or dict
:param str vrep_host: host of the V-REP server
:param int vrep_port: port of the V-REP server
:param str scene: path to the V-REP scene to load and start
:param list tracked_objects: list of V-REP dummy object to track
:param list tracked_collisions: list of V-REP collision to track
:param int id: robot id in simulator (useful when using a scene with multiple robots)
:param vrep_io: use an already connected VrepIO (useful when using a scene with multiple robots)
:type vrep_io: :class:`~pypot.vrep.io.VrepIO`
This function tries to connect to a V-REP instance and expects to find motors with names corresponding as the ones found in the config.
.. note:: The :class:`~pypot.robot.robot.Robot` returned will also provide a convenience reset_simulation method which resets the simulation and the robot position to its intial stance.
.. note:: Using the same configuration, you should be able to switch from a real to a simulated robot just by switching from :func:`~pypot.robot.config.from_config` to :func:`~pypot.vrep.from_vrep`.
For instance::
import json
with open('my_config.json') as f:
config = json.load(f)
from pypot.robot import from_config
from pypot.vrep import from_vrep
real_robot = from_config(config)
simulated_robot = from_vrep(config, '127.0.0.1', 19997, 'poppy.ttt') | [
"Create",
"a",
"robot",
"from",
"a",
"V",
"-",
"REP",
"instance",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/__init__.py#L52-L176 |
690 | poppy-project/pypot | pypot/dynamixel/io/io.py | DxlIO.set_wheel_mode | def set_wheel_mode(self, ids):
""" Sets the specified motors to wheel mode. """
self.set_control_mode(dict(zip(ids, itertools.repeat('wheel')))) | python | def set_wheel_mode(self, ids):
""" Sets the specified motors to wheel mode. """
self.set_control_mode(dict(zip(ids, itertools.repeat('wheel')))) | [
"def",
"set_wheel_mode",
"(",
"self",
",",
"ids",
")",
":",
"self",
".",
"set_control_mode",
"(",
"dict",
"(",
"zip",
"(",
"ids",
",",
"itertools",
".",
"repeat",
"(",
"'wheel'",
")",
")",
")",
")"
] | Sets the specified motors to wheel mode. | [
"Sets",
"the",
"specified",
"motors",
"to",
"wheel",
"mode",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/io/io.py#L30-L32 |
691 | poppy-project/pypot | pypot/dynamixel/io/io.py | DxlIO.set_joint_mode | def set_joint_mode(self, ids):
""" Sets the specified motors to joint mode. """
self.set_control_mode(dict(zip(ids, itertools.repeat('joint')))) | python | def set_joint_mode(self, ids):
""" Sets the specified motors to joint mode. """
self.set_control_mode(dict(zip(ids, itertools.repeat('joint')))) | [
"def",
"set_joint_mode",
"(",
"self",
",",
"ids",
")",
":",
"self",
".",
"set_control_mode",
"(",
"dict",
"(",
"zip",
"(",
"ids",
",",
"itertools",
".",
"repeat",
"(",
"'joint'",
")",
")",
")",
")"
] | Sets the specified motors to joint mode. | [
"Sets",
"the",
"specified",
"motors",
"to",
"joint",
"mode",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/io/io.py#L34-L36 |
692 | poppy-project/pypot | pypot/dynamixel/io/io.py | DxlIO.set_angle_limit | def set_angle_limit(self, limit_for_id, **kwargs):
""" Sets the angle limit to the specified motors. """
convert = kwargs['convert'] if 'convert' in kwargs else self._convert
if 'wheel' in self.get_control_mode(limit_for_id.keys()):
raise ValueError('can not change the angle limit of a motor in wheel mode')
if (0, 0) in limit_for_id.values():
raise ValueError('can not set limit to (0, 0)')
self._set_angle_limit(limit_for_id, convert=convert) | python | def set_angle_limit(self, limit_for_id, **kwargs):
""" Sets the angle limit to the specified motors. """
convert = kwargs['convert'] if 'convert' in kwargs else self._convert
if 'wheel' in self.get_control_mode(limit_for_id.keys()):
raise ValueError('can not change the angle limit of a motor in wheel mode')
if (0, 0) in limit_for_id.values():
raise ValueError('can not set limit to (0, 0)')
self._set_angle_limit(limit_for_id, convert=convert) | [
"def",
"set_angle_limit",
"(",
"self",
",",
"limit_for_id",
",",
"*",
"*",
"kwargs",
")",
":",
"convert",
"=",
"kwargs",
"[",
"'convert'",
"]",
"if",
"'convert'",
"in",
"kwargs",
"else",
"self",
".",
"_convert",
"if",
"'wheel'",
"in",
"self",
".",
"get_control_mode",
"(",
"limit_for_id",
".",
"keys",
"(",
")",
")",
":",
"raise",
"ValueError",
"(",
"'can not change the angle limit of a motor in wheel mode'",
")",
"if",
"(",
"0",
",",
"0",
")",
"in",
"limit_for_id",
".",
"values",
"(",
")",
":",
"raise",
"ValueError",
"(",
"'can not set limit to (0, 0)'",
")",
"self",
".",
"_set_angle_limit",
"(",
"limit_for_id",
",",
"convert",
"=",
"convert",
")"
] | Sets the angle limit to the specified motors. | [
"Sets",
"the",
"angle",
"limit",
"to",
"the",
"specified",
"motors",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/dynamixel/io/io.py#L55-L65 |
693 | poppy-project/pypot | pypot/robot/robot.py | Robot.close | def close(self):
""" Cleans the robot by stopping synchronization and all controllers."""
self.stop_sync()
[c.io.close() for c in self._controllers if c.io is not None] | python | def close(self):
""" Cleans the robot by stopping synchronization and all controllers."""
self.stop_sync()
[c.io.close() for c in self._controllers if c.io is not None] | [
"def",
"close",
"(",
"self",
")",
":",
"self",
".",
"stop_sync",
"(",
")",
"[",
"c",
".",
"io",
".",
"close",
"(",
")",
"for",
"c",
"in",
"self",
".",
"_controllers",
"if",
"c",
".",
"io",
"is",
"not",
"None",
"]"
] | Cleans the robot by stopping synchronization and all controllers. | [
"Cleans",
"the",
"robot",
"by",
"stopping",
"synchronization",
"and",
"all",
"controllers",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L51-L54 |
694 | poppy-project/pypot | pypot/robot/robot.py | Robot.goto_position | def goto_position(self, position_for_motors, duration, control=None, wait=False):
""" Moves a subset of the motors to a position within a specific duration.
:param dict position_for_motors: which motors you want to move {motor_name: pos, motor_name: pos,...}
:param float duration: duration of the move
:param str control: control type ('dummy', 'minjerk')
:param bool wait: whether or not to wait for the end of the move
.. note::In case of dynamixel motors, the speed is automatically adjusted so the goal position is reached after the chosen duration.
"""
for i, (motor_name, position) in enumerate(position_for_motors.iteritems()):
w = False if i < len(position_for_motors) - 1 else wait
m = getattr(self, motor_name)
m.goto_position(position, duration, control, wait=w) | python | def goto_position(self, position_for_motors, duration, control=None, wait=False):
""" Moves a subset of the motors to a position within a specific duration.
:param dict position_for_motors: which motors you want to move {motor_name: pos, motor_name: pos,...}
:param float duration: duration of the move
:param str control: control type ('dummy', 'minjerk')
:param bool wait: whether or not to wait for the end of the move
.. note::In case of dynamixel motors, the speed is automatically adjusted so the goal position is reached after the chosen duration.
"""
for i, (motor_name, position) in enumerate(position_for_motors.iteritems()):
w = False if i < len(position_for_motors) - 1 else wait
m = getattr(self, motor_name)
m.goto_position(position, duration, control, wait=w) | [
"def",
"goto_position",
"(",
"self",
",",
"position_for_motors",
",",
"duration",
",",
"control",
"=",
"None",
",",
"wait",
"=",
"False",
")",
":",
"for",
"i",
",",
"(",
"motor_name",
",",
"position",
")",
"in",
"enumerate",
"(",
"position_for_motors",
".",
"iteritems",
"(",
")",
")",
":",
"w",
"=",
"False",
"if",
"i",
"<",
"len",
"(",
"position_for_motors",
")",
"-",
"1",
"else",
"wait",
"m",
"=",
"getattr",
"(",
"self",
",",
"motor_name",
")",
"m",
".",
"goto_position",
"(",
"position",
",",
"duration",
",",
"control",
",",
"wait",
"=",
"w",
")"
] | Moves a subset of the motors to a position within a specific duration.
:param dict position_for_motors: which motors you want to move {motor_name: pos, motor_name: pos,...}
:param float duration: duration of the move
:param str control: control type ('dummy', 'minjerk')
:param bool wait: whether or not to wait for the end of the move
.. note::In case of dynamixel motors, the speed is automatically adjusted so the goal position is reached after the chosen duration. | [
"Moves",
"a",
"subset",
"of",
"the",
"motors",
"to",
"a",
"position",
"within",
"a",
"specific",
"duration",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L126-L141 |
695 | poppy-project/pypot | pypot/robot/robot.py | Robot.power_up | def power_up(self):
""" Changes all settings to guarantee the motors will be used at their maximum power. """
for m in self.motors:
m.compliant = False
m.moving_speed = 0
m.torque_limit = 100.0 | python | def power_up(self):
""" Changes all settings to guarantee the motors will be used at their maximum power. """
for m in self.motors:
m.compliant = False
m.moving_speed = 0
m.torque_limit = 100.0 | [
"def",
"power_up",
"(",
"self",
")",
":",
"for",
"m",
"in",
"self",
".",
"motors",
":",
"m",
".",
"compliant",
"=",
"False",
"m",
".",
"moving_speed",
"=",
"0",
"m",
".",
"torque_limit",
"=",
"100.0"
] | Changes all settings to guarantee the motors will be used at their maximum power. | [
"Changes",
"all",
"settings",
"to",
"guarantee",
"the",
"motors",
"will",
"be",
"used",
"at",
"their",
"maximum",
"power",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L143-L148 |
696 | poppy-project/pypot | pypot/robot/robot.py | Robot.to_config | def to_config(self):
""" Generates the config for the current robot.
.. note:: The generated config should be used as a basis and must probably be modified.
"""
from ..dynamixel.controller import DxlController
dxl_controllers = [c for c in self._controllers
if isinstance(c, DxlController)]
config = {}
config['controllers'] = {}
for i, c in enumerate(dxl_controllers):
name = 'dxl_controller_{}'.format(i)
config['controllers'][name] = {
'port': c.io.port,
'sync_read': c.io._sync_read,
'attached_motors': [m.name for m in c.motors],
}
config['motors'] = {}
for m in self.motors:
config['motors'][m.name] = {
'id': m.id,
'type': m.model,
'offset': m.offset,
'orientation': 'direct' if m.direct else 'indirect',
'angle_limit': m.angle_limit,
}
if m.angle_limit == (0, 0):
config['motors']['wheel_mode'] = True
config['motorgroups'] = {}
return config | python | def to_config(self):
""" Generates the config for the current robot.
.. note:: The generated config should be used as a basis and must probably be modified.
"""
from ..dynamixel.controller import DxlController
dxl_controllers = [c for c in self._controllers
if isinstance(c, DxlController)]
config = {}
config['controllers'] = {}
for i, c in enumerate(dxl_controllers):
name = 'dxl_controller_{}'.format(i)
config['controllers'][name] = {
'port': c.io.port,
'sync_read': c.io._sync_read,
'attached_motors': [m.name for m in c.motors],
}
config['motors'] = {}
for m in self.motors:
config['motors'][m.name] = {
'id': m.id,
'type': m.model,
'offset': m.offset,
'orientation': 'direct' if m.direct else 'indirect',
'angle_limit': m.angle_limit,
}
if m.angle_limit == (0, 0):
config['motors']['wheel_mode'] = True
config['motorgroups'] = {}
return config | [
"def",
"to_config",
"(",
"self",
")",
":",
"from",
".",
".",
"dynamixel",
".",
"controller",
"import",
"DxlController",
"dxl_controllers",
"=",
"[",
"c",
"for",
"c",
"in",
"self",
".",
"_controllers",
"if",
"isinstance",
"(",
"c",
",",
"DxlController",
")",
"]",
"config",
"=",
"{",
"}",
"config",
"[",
"'controllers'",
"]",
"=",
"{",
"}",
"for",
"i",
",",
"c",
"in",
"enumerate",
"(",
"dxl_controllers",
")",
":",
"name",
"=",
"'dxl_controller_{}'",
".",
"format",
"(",
"i",
")",
"config",
"[",
"'controllers'",
"]",
"[",
"name",
"]",
"=",
"{",
"'port'",
":",
"c",
".",
"io",
".",
"port",
",",
"'sync_read'",
":",
"c",
".",
"io",
".",
"_sync_read",
",",
"'attached_motors'",
":",
"[",
"m",
".",
"name",
"for",
"m",
"in",
"c",
".",
"motors",
"]",
",",
"}",
"config",
"[",
"'motors'",
"]",
"=",
"{",
"}",
"for",
"m",
"in",
"self",
".",
"motors",
":",
"config",
"[",
"'motors'",
"]",
"[",
"m",
".",
"name",
"]",
"=",
"{",
"'id'",
":",
"m",
".",
"id",
",",
"'type'",
":",
"m",
".",
"model",
",",
"'offset'",
":",
"m",
".",
"offset",
",",
"'orientation'",
":",
"'direct'",
"if",
"m",
".",
"direct",
"else",
"'indirect'",
",",
"'angle_limit'",
":",
"m",
".",
"angle_limit",
",",
"}",
"if",
"m",
".",
"angle_limit",
"==",
"(",
"0",
",",
"0",
")",
":",
"config",
"[",
"'motors'",
"]",
"[",
"'wheel_mode'",
"]",
"=",
"True",
"config",
"[",
"'motorgroups'",
"]",
"=",
"{",
"}",
"return",
"config"
] | Generates the config for the current robot.
.. note:: The generated config should be used as a basis and must probably be modified. | [
"Generates",
"the",
"config",
"for",
"the",
"current",
"robot",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/robot/robot.py#L150-L187 |
697 | poppy-project/pypot | pypot/vrep/controller.py | VrepController.update | def update(self):
""" Synchronization update loop.
At each update all motor position are read from vrep and set to the motors. The motors target position are also send to v-rep.
"""
# Read all the angle limits
h, _, l, _ = self.io.call_remote_api('simxGetObjectGroupData',
remote_api.sim_object_joint_type,
16,
streaming=True)
limits4handle = {hh: (ll, lr) for hh, ll, lr in zip(h, l[::2], l[1::2])}
for m in self.motors:
tmax = torque_max[m.model]
# Read values from V-REP and set them to the Motor
p = round(
rad2deg(self.io.get_motor_position(motor_name=self._motor_name(m))), 1)
m.__dict__['present_position'] = p
l = 100. * self.io.get_motor_force(motor_name=self._motor_name(m)) / tmax
m.__dict__['present_load'] = l
m.__dict__['_load_fifo'].append(abs(l))
m.__dict__['present_temperature'] = 25 + \
round(2.5 * sum(m.__dict__['_load_fifo']) / len(m.__dict__['_load_fifo']), 1)
ll, lr = limits4handle[self.io._object_handles[self._motor_name(m)]]
m.__dict__['lower_limit'] = rad2deg(ll)
m.__dict__['upper_limit'] = rad2deg(ll) + rad2deg(lr)
# Send new values from Motor to V-REP
p = deg2rad(round(m.__dict__['goal_position'], 1))
self.io.set_motor_position(motor_name=self._motor_name(m), position=p)
t = m.__dict__['torque_limit'] * tmax / 100.
if m.__dict__['compliant']:
t = 0.
self.io.set_motor_force(motor_name=self._motor_name(m), force=t) | python | def update(self):
""" Synchronization update loop.
At each update all motor position are read from vrep and set to the motors. The motors target position are also send to v-rep.
"""
# Read all the angle limits
h, _, l, _ = self.io.call_remote_api('simxGetObjectGroupData',
remote_api.sim_object_joint_type,
16,
streaming=True)
limits4handle = {hh: (ll, lr) for hh, ll, lr in zip(h, l[::2], l[1::2])}
for m in self.motors:
tmax = torque_max[m.model]
# Read values from V-REP and set them to the Motor
p = round(
rad2deg(self.io.get_motor_position(motor_name=self._motor_name(m))), 1)
m.__dict__['present_position'] = p
l = 100. * self.io.get_motor_force(motor_name=self._motor_name(m)) / tmax
m.__dict__['present_load'] = l
m.__dict__['_load_fifo'].append(abs(l))
m.__dict__['present_temperature'] = 25 + \
round(2.5 * sum(m.__dict__['_load_fifo']) / len(m.__dict__['_load_fifo']), 1)
ll, lr = limits4handle[self.io._object_handles[self._motor_name(m)]]
m.__dict__['lower_limit'] = rad2deg(ll)
m.__dict__['upper_limit'] = rad2deg(ll) + rad2deg(lr)
# Send new values from Motor to V-REP
p = deg2rad(round(m.__dict__['goal_position'], 1))
self.io.set_motor_position(motor_name=self._motor_name(m), position=p)
t = m.__dict__['torque_limit'] * tmax / 100.
if m.__dict__['compliant']:
t = 0.
self.io.set_motor_force(motor_name=self._motor_name(m), force=t) | [
"def",
"update",
"(",
"self",
")",
":",
"# Read all the angle limits",
"h",
",",
"_",
",",
"l",
",",
"_",
"=",
"self",
".",
"io",
".",
"call_remote_api",
"(",
"'simxGetObjectGroupData'",
",",
"remote_api",
".",
"sim_object_joint_type",
",",
"16",
",",
"streaming",
"=",
"True",
")",
"limits4handle",
"=",
"{",
"hh",
":",
"(",
"ll",
",",
"lr",
")",
"for",
"hh",
",",
"ll",
",",
"lr",
"in",
"zip",
"(",
"h",
",",
"l",
"[",
":",
":",
"2",
"]",
",",
"l",
"[",
"1",
":",
":",
"2",
"]",
")",
"}",
"for",
"m",
"in",
"self",
".",
"motors",
":",
"tmax",
"=",
"torque_max",
"[",
"m",
".",
"model",
"]",
"# Read values from V-REP and set them to the Motor",
"p",
"=",
"round",
"(",
"rad2deg",
"(",
"self",
".",
"io",
".",
"get_motor_position",
"(",
"motor_name",
"=",
"self",
".",
"_motor_name",
"(",
"m",
")",
")",
")",
",",
"1",
")",
"m",
".",
"__dict__",
"[",
"'present_position'",
"]",
"=",
"p",
"l",
"=",
"100.",
"*",
"self",
".",
"io",
".",
"get_motor_force",
"(",
"motor_name",
"=",
"self",
".",
"_motor_name",
"(",
"m",
")",
")",
"/",
"tmax",
"m",
".",
"__dict__",
"[",
"'present_load'",
"]",
"=",
"l",
"m",
".",
"__dict__",
"[",
"'_load_fifo'",
"]",
".",
"append",
"(",
"abs",
"(",
"l",
")",
")",
"m",
".",
"__dict__",
"[",
"'present_temperature'",
"]",
"=",
"25",
"+",
"round",
"(",
"2.5",
"*",
"sum",
"(",
"m",
".",
"__dict__",
"[",
"'_load_fifo'",
"]",
")",
"/",
"len",
"(",
"m",
".",
"__dict__",
"[",
"'_load_fifo'",
"]",
")",
",",
"1",
")",
"ll",
",",
"lr",
"=",
"limits4handle",
"[",
"self",
".",
"io",
".",
"_object_handles",
"[",
"self",
".",
"_motor_name",
"(",
"m",
")",
"]",
"]",
"m",
".",
"__dict__",
"[",
"'lower_limit'",
"]",
"=",
"rad2deg",
"(",
"ll",
")",
"m",
".",
"__dict__",
"[",
"'upper_limit'",
"]",
"=",
"rad2deg",
"(",
"ll",
")",
"+",
"rad2deg",
"(",
"lr",
")",
"# Send new values from Motor to V-REP",
"p",
"=",
"deg2rad",
"(",
"round",
"(",
"m",
".",
"__dict__",
"[",
"'goal_position'",
"]",
",",
"1",
")",
")",
"self",
".",
"io",
".",
"set_motor_position",
"(",
"motor_name",
"=",
"self",
".",
"_motor_name",
"(",
"m",
")",
",",
"position",
"=",
"p",
")",
"t",
"=",
"m",
".",
"__dict__",
"[",
"'torque_limit'",
"]",
"*",
"tmax",
"/",
"100.",
"if",
"m",
".",
"__dict__",
"[",
"'compliant'",
"]",
":",
"t",
"=",
"0.",
"self",
".",
"io",
".",
"set_motor_force",
"(",
"motor_name",
"=",
"self",
".",
"_motor_name",
"(",
"m",
")",
",",
"force",
"=",
"t",
")"
] | Synchronization update loop.
At each update all motor position are read from vrep and set to the motors. The motors target position are also send to v-rep. | [
"Synchronization",
"update",
"loop",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/controller.py#L41-L82 |
698 | poppy-project/pypot | pypot/vrep/controller.py | VrepObjectTracker.update | def update(self):
""" Updates the position and orientation of the tracked objects. """
for s in self.sensors:
s.position = self.io.get_object_position(object_name=s.name)
s.orientation = self.io.get_object_orientation(object_name=s.name) | python | def update(self):
""" Updates the position and orientation of the tracked objects. """
for s in self.sensors:
s.position = self.io.get_object_position(object_name=s.name)
s.orientation = self.io.get_object_orientation(object_name=s.name) | [
"def",
"update",
"(",
"self",
")",
":",
"for",
"s",
"in",
"self",
".",
"sensors",
":",
"s",
".",
"position",
"=",
"self",
".",
"io",
".",
"get_object_position",
"(",
"object_name",
"=",
"s",
".",
"name",
")",
"s",
".",
"orientation",
"=",
"self",
".",
"io",
".",
"get_object_orientation",
"(",
"object_name",
"=",
"s",
".",
"name",
")"
] | Updates the position and orientation of the tracked objects. | [
"Updates",
"the",
"position",
"and",
"orientation",
"of",
"the",
"tracked",
"objects",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/controller.py#L149-L153 |
699 | poppy-project/pypot | pypot/vrep/controller.py | VrepCollisionTracker.update | def update(self):
""" Update the state of the collision detectors. """
for s in self.sensors:
s.colliding = self.io.get_collision_state(collision_name=s.name) | python | def update(self):
""" Update the state of the collision detectors. """
for s in self.sensors:
s.colliding = self.io.get_collision_state(collision_name=s.name) | [
"def",
"update",
"(",
"self",
")",
":",
"for",
"s",
"in",
"self",
".",
"sensors",
":",
"s",
".",
"colliding",
"=",
"self",
".",
"io",
".",
"get_collision_state",
"(",
"collision_name",
"=",
"s",
".",
"name",
")"
] | Update the state of the collision detectors. | [
"Update",
"the",
"state",
"of",
"the",
"collision",
"detectors",
"."
] | d9c6551bbc87d45d9d1f0bc15e35b616d0002afd | https://github.com/poppy-project/pypot/blob/d9c6551bbc87d45d9d1f0bc15e35b616d0002afd/pypot/vrep/controller.py#L180-L184 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.