code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def associn(m, path, value):
""" Copy-on-write associates a value in a nested dict """
def assoc_recursively(m, path, value):
if not path:
return value
p = path[0]
return assoc(m, p, assoc_recursively(m.get(p,{}), path[1:], value))
return assoc_recursively(m, path,... | Copy-on-write associates a value in a nested dict |
def _start_new_cdx_file(self):
'''Create and set current CDX file.'''
self._cdx_filename = '{0}.cdx'.format(self._prefix_filename)
if not self._params.appending:
wpull.util.truncate_file(self._cdx_filename)
self._write_cdx_header()
elif not os.path.exists(self._c... | Create and set current CDX file. |
def findAll(self, strSeq) :
"""Same as find but returns a list of all occurences"""
arr = self.encode(strSeq)
lst = []
lst = self._kmp_find(arr[0], self, lst)
return lst | Same as find but returns a list of all occurences |
def do_step(self, values, xy_values,coeff, width):
"""Calculates forces between two diagrams and pushes them apart by tenth of width"""
forces = {k:[] for k,i in enumerate(xy_values)}
for (index1, value1), (index2,value2) in combinations(enumerate(xy_values),2):
f = self.calc_2d_forc... | Calculates forces between two diagrams and pushes them apart by tenth of width |
def get_sort_field(attr, model):
"""
Get's the field to sort on for the given
attr.
Currently returns attr if it is a field on
the given model.
If the models has an attribute matching that name
and that value has an attribute 'sort_field' than
that value is used.
TODO: Provide a w... | Get's the field to sort on for the given
attr.
Currently returns attr if it is a field on
the given model.
If the models has an attribute matching that name
and that value has an attribute 'sort_field' than
that value is used.
TODO: Provide a way to sort based on a non field
attribute... |
def commit(self, offset=None, limit=None, dryrun=False):
""" Start the rsync download """
self.stream.command = "rsync -avRK --files-from={path} {source} {destination}"
self.stream.append_tasks_to_streamlets(offset=offset, limit=limit)
self.stream.commit_streamlets()
self.stream... | Start the rsync download |
def _compare_columns(self, new_columns, old_columns):
''' a helper method for generating differences between column properties '''
# print(new_columns)
# print(old_columns)
add_columns = {}
remove_columns = {}
rename_columns = {}
retype_... | a helper method for generating differences between column properties |
def get_container_info(self, obj):
"""Returns the info for a Container
"""
info = self.get_base_info(obj)
info.update({})
return info | Returns the info for a Container |
def get_minimum_score_metadata(self):
"""Gets the metadata for the minimum score.
return: (osid.Metadata) - metadata for the minimum score
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for osid.resource.ResourceForm.get_group_metada... | Gets the metadata for the minimum score.
return: (osid.Metadata) - metadata for the minimum score
*compliance: mandatory -- This method must be implemented.* |
def to_dict(self):
"""
Returns:
dict: Combined global and thread-specific logging context
"""
with self._lock:
result = {}
if self._gpayload:
result.update(self._gpayload)
if self._tpayload:
result.update(get... | Returns:
dict: Combined global and thread-specific logging context |
def face_angles_sparse(mesh):
"""
A sparse matrix representation of the face angles.
Returns
----------
sparse: scipy.sparse.coo_matrix with:
dtype: float
shape: (len(mesh.vertices), len(mesh.faces))
"""
matrix = coo_matrix((mesh.face_angles.flatten(),
... | A sparse matrix representation of the face angles.
Returns
----------
sparse: scipy.sparse.coo_matrix with:
dtype: float
shape: (len(mesh.vertices), len(mesh.faces)) |
def unique_rows(arr, return_index=False, return_inverse=False):
"""Returns a copy of arr with duplicate rows removed.
From Stackoverflow "Find unique rows in numpy.array."
Parameters
----------
arr : :py:class:`Array`, (`m`, `n`)
The array to find the unique rows of.
return_ind... | Returns a copy of arr with duplicate rows removed.
From Stackoverflow "Find unique rows in numpy.array."
Parameters
----------
arr : :py:class:`Array`, (`m`, `n`)
The array to find the unique rows of.
return_index : bool, optional
If True, the indices of the unique rows in ... |
def serialize_operator_greater_than(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
Example::
<greater-than>
<value>text</value>
<value><attribute>foobar</attribute></value>
</greater-than>
"""
elem... | Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
Example::
<greater-than>
<value>text</value>
<value><attribute>foobar</attribute></value>
</greater-than> |
def parse(self, template):
"""
Parse a template string starting at some index.
This method uses the current tag delimiter.
Arguments:
template: a unicode string that is the template to parse.
index: the index at which to start parsing.
Returns:
... | Parse a template string starting at some index.
This method uses the current tag delimiter.
Arguments:
template: a unicode string that is the template to parse.
index: the index at which to start parsing.
Returns:
a ParsedTemplate instance. |
def __definitions_descriptor(self):
"""Describes the definitions section of the OpenAPI spec.
Returns:
Dictionary describing the definitions of the spec.
"""
# Filter out any keys that aren't 'properties' or 'type'
result = {}
for def_key, def_value in self.__parser.schemas().iteritems():... | Describes the definitions section of the OpenAPI spec.
Returns:
Dictionary describing the definitions of the spec. |
def run(self):
"""Evaluate the command line arguments, performing the appropriate
actions so the application can be started.
"""
# The list command prevents any other processing of args
if self._args.list:
self._print_installed_apps(self._args.controller)
... | Evaluate the command line arguments, performing the appropriate
actions so the application can be started. |
def service(self):
""" Returns a Splunk service object for this command invocation or None.
The service object is created from the Splunkd URI and authentication token passed to the command invocation in
the search results info file. This data is not passed to a command invocation by default. Y... | Returns a Splunk service object for this command invocation or None.
The service object is created from the Splunkd URI and authentication token passed to the command invocation in
the search results info file. This data is not passed to a command invocation by default. You must request it by
s... |
async def get_parameters(self, parameters=None):
"""Get the settings for the requested component(s) of QTM in XML format.
:param parameters: A list of parameters to request.
Could be 'all' or any combination
of 'general', '3d', '6d', 'analog', 'force', 'gazevector', 'image'.
... | Get the settings for the requested component(s) of QTM in XML format.
:param parameters: A list of parameters to request.
Could be 'all' or any combination
of 'general', '3d', '6d', 'analog', 'force', 'gazevector', 'image'.
:rtype: An XML string containing the requested settings... |
def login_required(func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary.
"""
def decorator(view_func):
@functools.wraps(view_func, assigned=available_attrs(view_func))
... | Decorator for views that checks that the user is logged in, redirecting
to the log in page if necessary. |
def health():
"""Check the health of this service."""
up_time = time.time() - START_TIME
response = dict(service=__service_id__,
uptime='{:.2f}s'.format(up_time))
return response, HTTPStatus.OK | Check the health of this service. |
def _load_config():
"""Helper to load prefs from ~/.vispy/vispy.json"""
fname = _get_config_fname()
if fname is None or not op.isfile(fname):
return dict()
with open(fname, 'r') as fid:
config = json.load(fid)
return config | Helper to load prefs from ~/.vispy/vispy.json |
def pseudo_partial_waves(self):
"""Dictionary with the pseudo partial waves indexed by state."""
pseudo_partial_waves = OrderedDict()
for (mesh, values, attrib) in self._parse_all_radfuncs("pseudo_partial_wave"):
state = attrib["state"]
#val_state = self.valence_states[st... | Dictionary with the pseudo partial waves indexed by state. |
def get_gradient_x(shape, px):
"""Calculate the gradient in the x direction to the line at px
The y gradient operator is a block diagonal matrix, where each block is the size of the image width.
The matrix itself is made up of (img_height x img_height) blocks, most of which are all zeros.
"""
impor... | Calculate the gradient in the x direction to the line at px
The y gradient operator is a block diagonal matrix, where each block is the size of the image width.
The matrix itself is made up of (img_height x img_height) blocks, most of which are all zeros. |
def tag_list(self, tags):
"""
Generates a list of tags identifying those previously selected.
Returns a list of tuples of the form (<tag name>, <CSS class name>).
Uses the string names rather than the tags themselves in order to work
with tag lists built from forms not fully su... | Generates a list of tags identifying those previously selected.
Returns a list of tuples of the form (<tag name>, <CSS class name>).
Uses the string names rather than the tags themselves in order to work
with tag lists built from forms not fully submitted. |
def create_default_item_node(field, state):
"""Create a definition list item node that describes the default value
of a Field config.
Parameters
----------
field : ``lsst.pex.config.Field``
A configuration field.
state : ``docutils.statemachine.State``
Usually the directive's ``... | Create a definition list item node that describes the default value
of a Field config.
Parameters
----------
field : ``lsst.pex.config.Field``
A configuration field.
state : ``docutils.statemachine.State``
Usually the directive's ``state`` attribute.
Returns
-------
``d... |
def get_store_local_final_result(self):
"""Store/Retrieve the final result.
Retrieve the final result for FW create/delete from DB and store it
locally.
"""
fw_dict = self.get_fw_dict()
fw_data, fw_data_dict = self.get_fw(fw_dict.get('fw_id'))
res = fw_data.resul... | Store/Retrieve the final result.
Retrieve the final result for FW create/delete from DB and store it
locally. |
def get_value(self):
"""
Evaluate self.expr to get the parameter's value
"""
if (self._value is None) and (self.expr is not None):
self._value = self.expr.get_value()
return self._value | Evaluate self.expr to get the parameter's value |
def sky2pix_ellipse(self, pos, a, b, pa):
"""
Convert an ellipse from sky to pixel coordinates.
Parameters
----------
pos : (float, float)
The (ra, dec) of the ellipse center (degrees).
a, b, pa: float
The semi-major axis, semi-minor axis and posi... | Convert an ellipse from sky to pixel coordinates.
Parameters
----------
pos : (float, float)
The (ra, dec) of the ellipse center (degrees).
a, b, pa: float
The semi-major axis, semi-minor axis and position angle of the ellipse (degrees).
Returns
... |
def sample(self, n):
""" Samples data into a Pandas DataFrame.
Args:
n: number of sampled counts.
Returns:
A dataframe containing sampled data.
Raises:
Exception if n is larger than number of rows.
"""
row_total_count = 0
row_counts = []
for file in self.files:
w... | Samples data into a Pandas DataFrame.
Args:
n: number of sampled counts.
Returns:
A dataframe containing sampled data.
Raises:
Exception if n is larger than number of rows. |
def get_token_network_events(
chain: BlockChainService,
token_network_address: Address,
contract_manager: ContractManager,
events: Optional[List[str]] = ALL_EVENTS,
from_block: BlockSpecification = GENESIS_BLOCK_NUMBER,
to_block: BlockSpecification = 'latest',
) -> List[D... | Helper to get all events of the ChannelManagerContract at `token_address`. |
def align(aligner, reads):
'''
Test if reads can get aligned to the lambda genome,
if not: write to stdout
'''
i = 0
for record in SeqIO.parse(reads, "fastq"):
try:
next(aligner.map(str(record.seq)))
i += 1
except StopIteration:
print(record.fo... | Test if reads can get aligned to the lambda genome,
if not: write to stdout |
def percentage_progress(self):
"""
Returns a float between 0 and 1, representing the current job's progress in its task.
If total_progress is not given or 0, just return self.progress.
:return: float corresponding to the total percentage progress of the job.
"""
if self... | Returns a float between 0 and 1, representing the current job's progress in its task.
If total_progress is not given or 0, just return self.progress.
:return: float corresponding to the total percentage progress of the job. |
def handle_delivered(
chain_state: ChainState,
state_change: ReceiveDelivered,
) -> TransitionResult[ChainState]:
""" Check if the "Delivered" message exists in the global queue and delete if found."""
queueid = QueueIdentifier(state_change.sender, CHANNEL_IDENTIFIER_GLOBAL_QUEUE)
inplace_de... | Check if the "Delivered" message exists in the global queue and delete if found. |
def _fill_sample_count(self, node):
"""Counts and fills sample counts inside call tree."""
node['sampleCount'] += sum(
self._fill_sample_count(child) for child in node['children'])
return node['sampleCount'] | Counts and fills sample counts inside call tree. |
def any_channel_validate_token_create(self, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/channel_framework#validate-token"
api_path = "/api/v2/any_channel/validate_token"
return self.call(api_path, method="POST", data=data, **kwargs) | https://developer.zendesk.com/rest_api/docs/core/channel_framework#validate-token |
def to_string(s, encoding='utf-8'):
"""
Accept unicode(py2) or bytes(py3)
Returns:
py2 type: str
py3 type: str
"""
if six.PY2:
return s.encode(encoding)
if isinstance(s, bytes):
return s.decode(encoding)
return s | Accept unicode(py2) or bytes(py3)
Returns:
py2 type: str
py3 type: str |
def parse(soup):
"""Parses the results for a company search and return the results
if is_direct_match. If no company is found, a list of suggestions
are returned as dict. If one such recommendation is found to be an
exact match, re-perform request for this exact match
"""
if is_direct_match(sou... | Parses the results for a company search and return the results
if is_direct_match. If no company is found, a list of suggestions
are returned as dict. If one such recommendation is found to be an
exact match, re-perform request for this exact match |
def rules(self):
"""Iterate over the defined Rules."""
rule = lib.EnvGetNextDefrule(self._env, ffi.NULL)
while rule != ffi.NULL:
yield Rule(self._env, rule)
rule = lib.EnvGetNextDefrule(self._env, rule) | Iterate over the defined Rules. |
def parse_field_path(api_repr):
"""Parse a **field path** from into a list of nested field names.
See :func:`field_path` for more on **field paths**.
Args:
api_repr (str):
The unique Firestore api representation which consists of
either simple or UTF-8 field names. It canno... | Parse a **field path** from into a list of nested field names.
See :func:`field_path` for more on **field paths**.
Args:
api_repr (str):
The unique Firestore api representation which consists of
either simple or UTF-8 field names. It cannot exceed
1500 bytes, and ca... |
def stopped(self):
"""Return if the stream is stopped."""
if self.tune and self.tune.get('@stopped'):
return True if self.tune.get('@stopped') == 'true' else False
else:
raise PyMediaroomError("No information in <node> about @stopped") | Return if the stream is stopped. |
def unicode_compatible(cls):
"""
A decorator that defines ``__str__`` and ``__unicode__`` methods
under Python 2.
"""
if not is_py3:
cls.__unicode__ = cls.__str__
cls.__str__ = lambda self: self.__unicode__().encode('utf-8')
return cls | A decorator that defines ``__str__`` and ``__unicode__`` methods
under Python 2. |
def default(self, obj, **kwargs):
"""Handles the adapting of special types from mongo"""
if isinstance(obj, datetime.datetime):
return time.mktime(obj.timetuple())
if isinstance(obj, Timestamp):
return obj.time
if isinstance(obj, ObjectId):
return ob... | Handles the adapting of special types from mongo |
def _sample(self, position, stepsize):
"""
Returns a sample using a single iteration of NUTS
"""
# Re-sampling momentum
momentum = np.random.normal(0, 1, len(position))
# Initializations
depth = 0
position_backward, position_forward = position, position
... | Returns a sample using a single iteration of NUTS |
def _get_name_and_version(name, version, for_filename=False):
"""Return the distribution name with version.
If for_filename is true, return a filename-escaped form."""
if for_filename:
# For both name and version any runs of non-alphanumeric or '.'
# characters are replaced with a single '-... | Return the distribution name with version.
If for_filename is true, return a filename-escaped form. |
def meta(*bases, **kwargs):
"""
Allows unique syntax similar to Python 3 for working with metaclasses in
both Python 2 and Python 3.
Examples
--------
>>> class BadMeta(type): # An usual metaclass definition
... def __new__(mcls, name, bases, namespace):
... if "bad" not in namespace: # A bad con... | Allows unique syntax similar to Python 3 for working with metaclasses in
both Python 2 and Python 3.
Examples
--------
>>> class BadMeta(type): # An usual metaclass definition
... def __new__(mcls, name, bases, namespace):
... if "bad" not in namespace: # A bad constraint
... raise Exception(... |
def NameGroups(data_arr,id_key):
"""Get group name associated with ID.
TODO - not yet implemented
"""
new_data_arr = []
for data in data_arr:
try:
data_arr[id_key] = clc._GROUP_MAPPING[data[id_key]]
except:
pass
new_data_arr.append(data)
if clc.args: clc.v1.output.Status("ERROR",2,"Group ... | Get group name associated with ID.
TODO - not yet implemented |
def _if_statement(test, if_function, else_function) -> None:
"""
Evaluate an if statement within a @magicquil block.
If the test value is a Quil Addr then unwind it into quil code equivalent to an if then statement using jumps. Both
sides of the if statement need to be evaluated and placed into separat... | Evaluate an if statement within a @magicquil block.
If the test value is a Quil Addr then unwind it into quil code equivalent to an if then statement using jumps. Both
sides of the if statement need to be evaluated and placed into separate Programs, which is why we create new
program contexts for their eva... |
def checkSimbad(g, target, maxobj=5, timeout=5):
"""
Sends off a request to Simbad to check whether a target is recognised.
Returns with a list of results, or raises an exception if it times out
"""
url = 'http://simbad.u-strasbg.fr/simbad/sim-script'
q = 'set limit ' + str(maxobj) + \
'... | Sends off a request to Simbad to check whether a target is recognised.
Returns with a list of results, or raises an exception if it times out |
def update(self, table_name, primary_key, instance):
""" replaces document identified by the primary_key or creates one if a matching document does not exist"""
assert isinstance(primary_key, dict)
assert isinstance(instance, BaseDocument)
collection = self._db[table_name]
# wor... | replaces document identified by the primary_key or creates one if a matching document does not exist |
def bond_microcanonical_statistics(
perc_graph, num_nodes, num_edges, seed,
spanning_cluster=True,
auxiliary_node_attributes=None, auxiliary_edge_attributes=None,
spanning_sides=None,
**kwargs
):
"""
Evolve a single run over all microstates (bond occupation numbers)
Return the cluster s... | Evolve a single run over all microstates (bond occupation numbers)
Return the cluster statistics for each microstate
Parameters
----------
perc_graph : networkx.Graph
The substrate graph on which percolation is to take place
num_nodes : int
Number ``N`` of sites in the graph
... |
def _ReadFlowResponseCounts(self, request_keys, cursor=None):
"""Reads counts of responses for the given requests."""
query = """
SELECT
flow_requests.client_id, flow_requests.flow_id,
flow_requests.request_id, COUNT(*)
FROM flow_responses, flow_requests
WHERE ({conditions}) A... | Reads counts of responses for the given requests. |
def get_xritdecompress_cmd():
"""Find a valid binary for the xRITDecompress command."""
cmd = os.environ.get('XRIT_DECOMPRESS_PATH', None)
if not cmd:
raise IOError("XRIT_DECOMPRESS_PATH is not defined (complete path to xRITDecompress)")
question = ("Did you set the environment variable XRIT_DE... | Find a valid binary for the xRITDecompress command. |
async def serialize_rctsig_prunable(self, ar, type, inputs, outputs, mixin):
"""
Serialize rct sig
:param ar:
:type ar: x.Archive
:param type:
:param inputs:
:param outputs:
:param mixin:
:return:
"""
if type == RctType.Null:
... | Serialize rct sig
:param ar:
:type ar: x.Archive
:param type:
:param inputs:
:param outputs:
:param mixin:
:return: |
def _prepPointsForSegments(points):
"""
Move any off curves at the end of the contour
to the beginning of the contour. This makes
segmentation easier.
"""
while 1:
point = points[-1]
if point.segmentType:
break
else:
point = points.pop()
... | Move any off curves at the end of the contour
to the beginning of the contour. This makes
segmentation easier. |
def set_language(self, request, org):
"""Set the current language from the org configuration."""
if org:
lang = org.language or settings.DEFAULT_LANGUAGE
translation.activate(lang) | Set the current language from the org configuration. |
def get_default_fields(self):
"""
get all fields of model, execpt id
"""
field_names = self._meta.get_all_field_names()
if 'id' in field_names:
field_names.remove('id')
return field_names | get all fields of model, execpt id |
def get_data_port_m(self, data_port_id):
"""Searches and returns the model of a data port of a given state
The method searches a port with the given id in the data ports of the given state model. If the state model
is a container state, not only the input and output data ports are looked at, bu... | Searches and returns the model of a data port of a given state
The method searches a port with the given id in the data ports of the given state model. If the state model
is a container state, not only the input and output data ports are looked at, but also the scoped variables.
:param data_po... |
def multiclass_logloss(actual, predicted, eps=1e-15):
"""Multi class version of Logarithmic Loss metric.
:param actual: Array containing the actual target classes
:param predicted: Matrix with class predictions, one probability per class
"""
# Convert 'actual' to a binary array if it's not already:... | Multi class version of Logarithmic Loss metric.
:param actual: Array containing the actual target classes
:param predicted: Matrix with class predictions, one probability per class |
def list_issues(
self, status=None, tags=None, assignee=None, author=None,
milestones=None, priority=None, no_stones=None, since=None,
order=None
):
"""
List all issues of a project.
:param status: filters the status of the issues
:param tags: file... | List all issues of a project.
:param status: filters the status of the issues
:param tags: filers the tags of the issues
:param assignee: filters the assignee of the issues
:param author: filters the author of the issues
:param milestones: filters the milestones of the issues (li... |
def reverse_whois(self, query, exclude=[], scope='current', mode=None, **kwargs):
"""List of one or more terms to search for in the Whois record,
as a Python list or separated with the pipe character ( | ).
"""
return self._results('reverse-whois', '/v1/reverse-whois', terms=delimited... | List of one or more terms to search for in the Whois record,
as a Python list or separated with the pipe character ( | ). |
def filters(self, *filters):
"""
Add a list of Filter ingredients to the query. These can either be
Filter objects or strings representing filters on the service's shelf.
``.filters()`` are additive, calling .filters() more than once will add
to the list of filters being used by ... | Add a list of Filter ingredients to the query. These can either be
Filter objects or strings representing filters on the service's shelf.
``.filters()`` are additive, calling .filters() more than once will add
to the list of filters being used by the recipe.
The Filter expression will b... |
def cfms(self, cfms):
'''Set the CFM values for this object's degrees of freedom.
Parameters
----------
cfms : float or sequence of float
A CFM value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom.
'''
... | Set the CFM values for this object's degrees of freedom.
Parameters
----------
cfms : float or sequence of float
A CFM value to set on all degrees of freedom, or a list
containing one such value for each degree of freedom. |
def compute_rollover(self, current_time: int) -> int:
"""
Work out the rollover time based on the specified time.
If we are rolling over at midnight or weekly, then the interval is
already known. need to figure out is WHEN the next interval is.
In other words, if you are rolling... | Work out the rollover time based on the specified time.
If we are rolling over at midnight or weekly, then the interval is
already known. need to figure out is WHEN the next interval is.
In other words, if you are rolling over at midnight, then your base
interval is 1 day, but you want ... |
def OIDC_UNAUTHENTICATED_SESSION_MANAGEMENT_KEY(self):
"""
OPTIONAL. Supply a fixed string to use as browser-state key for unauthenticated clients.
"""
# Memoize generated value
if not self._unauthenticated_session_management_key:
self._unauthenticated_session_manage... | OPTIONAL. Supply a fixed string to use as browser-state key for unauthenticated clients. |
def plot_returns(perf_attrib_data, cost=None, ax=None):
"""
Plot total, specific, and common returns.
Parameters
----------
perf_attrib_data : pd.DataFrame
df with factors, common returns, and specific returns as columns,
and datetimes as index. Assumes the `total_returns` column is... | Plot total, specific, and common returns.
Parameters
----------
perf_attrib_data : pd.DataFrame
df with factors, common returns, and specific returns as columns,
and datetimes as index. Assumes the `total_returns` column is NOT
cost adjusted.
- Example:
... |
def _setup(app, *, schema, title=None, app_key=APP_KEY, db=None):
"""Initialize the admin-on-rest admin"""
admin = web.Application(loop=app.loop)
app[app_key] = admin
loader = jinja2.FileSystemLoader([TEMPLATES_ROOT, ])
aiohttp_jinja2.setup(admin, loader=loader, app_key=TEMPLATE_APP_KEY)
if t... | Initialize the admin-on-rest admin |
def process_event(self, event_name: str, data: dict) -> None:
"""
Update learning rate and momentum variables after event (given by `event_name`)
Args:
event_name: name of event after which the method was called.
Set of values: `"after_validation"`, `"after_batch... | Update learning rate and momentum variables after event (given by `event_name`)
Args:
event_name: name of event after which the method was called.
Set of values: `"after_validation"`, `"after_batch"`, `"after_epoch"`, `"after_train_log"`
data: dictionary with paramet... |
def start_http_server(self, port, host='0.0.0.0', endpoint='/metrics'):
"""
Start an HTTP server for exposing the metrics.
This will be an individual Flask application,
not the one registered with this class.
:param port: the HTTP port to expose the metrics endpoint on
:... | Start an HTTP server for exposing the metrics.
This will be an individual Flask application,
not the one registered with this class.
:param port: the HTTP port to expose the metrics endpoint on
:param host: the HTTP host to listen on (default: `0.0.0.0`)
:param endpoint: the URL... |
def save_as(self, fname, obj=None):
""" Save DICOM file given a GDCM DICOM object.
Examples of a GDCM DICOM object:
* gdcm.Writer()
* gdcm.Reader()
* gdcm.Anonymizer()
:param fname: DICOM file name to be saved
:param obj: DICOM object to be saved, if None, Anonym... | Save DICOM file given a GDCM DICOM object.
Examples of a GDCM DICOM object:
* gdcm.Writer()
* gdcm.Reader()
* gdcm.Anonymizer()
:param fname: DICOM file name to be saved
:param obj: DICOM object to be saved, if None, Anonymizer() is used |
def _multi_permission_mask(mode):
"""
Support multiple, comma-separated Unix chmod symbolic modes.
>>> _multi_permission_mask('a=r,u+w')(0) == 0o644
True
"""
def compose(f, g):
return lambda *args, **kwargs: g(f(*args, **kwargs))
return functools.reduce(compose, map(_permission_mask... | Support multiple, comma-separated Unix chmod symbolic modes.
>>> _multi_permission_mask('a=r,u+w')(0) == 0o644
True |
def do_file(self, line):
"""PErform some file operation"""
opts = self.FILE_OPTS
if not self.all_ontologies:
self._help_nofiles()
return
line = line.split()
if not line or line[0] not in opts:
self.help_file()
return
if ... | PErform some file operation |
def blackbody_spectral_radiance(T, wavelength):
r'''Returns the spectral radiance, in units of W/m^2/sr/µm.
.. math::
I_{\lambda,blackbody,e}(\lambda,T)=\frac{2hc_o^2}
{\lambda^5[\exp(hc_o/\lambda k T)-1]}
Parameters
----------
T : float
Temperature of the surface, [K]
... | r'''Returns the spectral radiance, in units of W/m^2/sr/µm.
.. math::
I_{\lambda,blackbody,e}(\lambda,T)=\frac{2hc_o^2}
{\lambda^5[\exp(hc_o/\lambda k T)-1]}
Parameters
----------
T : float
Temperature of the surface, [K]
wavelength : float
Length of the wave to be ... |
def get_metric_group_definitions(self):
"""
Get the faked metric group definitions for this context object
that are to be returned from its create operation.
If a 'metric-groups' property had been specified for this context,
only those faked metric group definitions of its manag... | Get the faked metric group definitions for this context object
that are to be returned from its create operation.
If a 'metric-groups' property had been specified for this context,
only those faked metric group definitions of its manager object that
are in that list, are included in the... |
def buildFromJsbString(self, jsb, nocompressjs=False):
"""
Build from the given config file using ``sencha build``.
:param jsb: The JSB config as a string.
:param nocompressjs: Compress the javascript? If ``True``, run ``sencha build --nocompress``.
"""
tempconffile = 't... | Build from the given config file using ``sencha build``.
:param jsb: The JSB config as a string.
:param nocompressjs: Compress the javascript? If ``True``, run ``sencha build --nocompress``. |
def interleave_keys(a, b):
"""Interleave bits from two sort keys to form a joint sort key.
Examples that are similar in both of the provided keys will have similar
values for the key defined by this function. Useful for tasks with two
text fields like machine translation or natural language inference.
... | Interleave bits from two sort keys to form a joint sort key.
Examples that are similar in both of the provided keys will have similar
values for the key defined by this function. Useful for tasks with two
text fields like machine translation or natural language inference. |
def remove_children(self, reset_parent=True):
"""
Remove all the children of this node.
:param bool reset_parent: if ``True``, set to ``None`` the parent attribute
of the children
"""
if reset_parent:
for child in self.children:
... | Remove all the children of this node.
:param bool reset_parent: if ``True``, set to ``None`` the parent attribute
of the children |
def read_into(self, buf: bytearray, partial: bool = False) -> Awaitable[int]:
"""Asynchronously read a number of bytes.
``buf`` must be a writable buffer into which data will be read.
If ``partial`` is true, the callback is run as soon as any bytes
have been read. Otherwise, it is run... | Asynchronously read a number of bytes.
``buf`` must be a writable buffer into which data will be read.
If ``partial`` is true, the callback is run as soon as any bytes
have been read. Otherwise, it is run when the ``buf`` has been
entirely filled with read data.
.. versionadd... |
def _graph_connected_component(graph, node_id):
"""
Find the largest graph connected components the contains one
given node
Parameters
----------
graph : array-like, shape: (n_samples, n_samples)
adjacency matrix of the graph, non-zero weight means an edge
between the nodes
... | Find the largest graph connected components the contains one
given node
Parameters
----------
graph : array-like, shape: (n_samples, n_samples)
adjacency matrix of the graph, non-zero weight means an edge
between the nodes
node_id : int
The index of the query node of the gr... |
def qual_name(self) -> QualName:
"""Return the receiver's qualified name."""
p, s, loc = self._key.partition(":")
return (loc, p) if s else (p, self.namespace) | Return the receiver's qualified name. |
def get_quizzes(self, course_id):
"""
List quizzes for a given course
https://canvas.instructure.com/doc/api/quizzes.html#method.quizzes_api.index
"""
url = QUIZZES_API.format(course_id)
data = self._get_resource(url)
quizzes = []
for datum in data:
... | List quizzes for a given course
https://canvas.instructure.com/doc/api/quizzes.html#method.quizzes_api.index |
def read_event(suppress=False):
"""
Blocks until a keyboard event happens, then returns that event.
"""
queue = _queue.Queue(maxsize=1)
hooked = hook(queue.put, suppress=suppress)
while True:
event = queue.get()
unhook(hooked)
return event | Blocks until a keyboard event happens, then returns that event. |
def standardize(self):
"""
standardize functional groups
:return: number of found groups
"""
self.reset_query_marks()
seen = set()
total = 0
for n, atom in self.atoms():
if n in seen:
continue
for k, center in centr... | standardize functional groups
:return: number of found groups |
def _checkConsistency(richInputs, fsm, inputContext):
"""
Verify that the outputs that can be generated by fsm have their
requirements satisfied by the given rich inputs.
@param richInputs: A L{list} of all of the types which will serve as rich
inputs to an L{IFiniteStateMachine}.
@type ric... | Verify that the outputs that can be generated by fsm have their
requirements satisfied by the given rich inputs.
@param richInputs: A L{list} of all of the types which will serve as rich
inputs to an L{IFiniteStateMachine}.
@type richInputs: L{list} of L{IRichInput} providers
@param fsm: The L... |
def find_package_data():
""" Returns package_data, because setuptools is too stupid to handle nested directories.
Returns:
dict: key is "ambry", value is list of paths.
"""
l = list()
for start in ('ambry/support', 'ambry/bundle/default_files'):
for root, dirs, files in os.walk(sta... | Returns package_data, because setuptools is too stupid to handle nested directories.
Returns:
dict: key is "ambry", value is list of paths. |
def _damerau_levenshtein(a, b):
"""Returns Damerau-Levenshtein edit distance from a to b."""
memo = {}
def distance(x, y):
"""Recursively defined string distance with memoization."""
if (x, y) in memo:
return memo[x, y]
if not x:
d = len(y)
elif not y:
d = len(x)
else:
... | Returns Damerau-Levenshtein edit distance from a to b. |
def train_net(net, train_path, num_classes, batch_size,
data_shape, mean_pixels, resume, finetune, pretrained, epoch,
prefix, ctx, begin_epoch, end_epoch, frequent, learning_rate,
momentum, weight_decay, lr_refactor_step, lr_refactor_ratio,
freeze_layer_pattern=''... | Wrapper for training phase.
Parameters:
----------
net : str
symbol name for the network structure
train_path : str
record file path for training
num_classes : int
number of object classes, not including background
batch_size : int
training batch-size
data_sh... |
def get_modis_tile_list(ds):
"""Helper function to identify MODIS tiles that intersect input geometry
modis_gird.py contains dictionary of tile boundaries (tile name and WKT polygon ring from bbox)
See: https://modis-land.gsfc.nasa.gov/MODLAND_grid.html
"""
from demcoreg import modis_grid
modi... | Helper function to identify MODIS tiles that intersect input geometry
modis_gird.py contains dictionary of tile boundaries (tile name and WKT polygon ring from bbox)
See: https://modis-land.gsfc.nasa.gov/MODLAND_grid.html |
def wait_for_browser_close(b):
"""
Can be used to wait until a TBrowser is closed
"""
if b:
if not __ACTIVE:
wait_failover(wait_for_browser_close)
return
wait_for_frame(b.GetBrowserImp().GetMainFrame()) | Can be used to wait until a TBrowser is closed |
def delete_user_pin(self, user_token, pin_id):
"""
Delete a user pin.
:param str user_token: The token of the user.
:param str pin_id: The id of the pin to delete.
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred.
"""
response = _request('DELET... | Delete a user pin.
:param str user_token: The token of the user.
:param str pin_id: The id of the pin to delete.
:raises `requests.exceptions.HTTPError`: If an HTTP error occurred. |
def put_attributes(self, item_name, attributes,
replace=True, expected_value=None):
"""
Store attributes for a given item.
:type item_name: string
:param item_name: The name of the item whose attributes are being stored.
:type attribute_names: dict or dic... | Store attributes for a given item.
:type item_name: string
:param item_name: The name of the item whose attributes are being stored.
:type attribute_names: dict or dict-like object
:param attribute_names: The name/value pairs to store as attributes
:type expected_value: list
... |
def volume_delete(name, profile=None, **kwargs):
'''
Destroy the volume
name
Name of the volume
profile
Profile to build on
CLI Example:
.. code-block:: bash
salt '*' nova.volume_delete myblock profile=openstack
'''
conn = _auth(profile, **kwargs)
return... | Destroy the volume
name
Name of the volume
profile
Profile to build on
CLI Example:
.. code-block:: bash
salt '*' nova.volume_delete myblock profile=openstack |
def concat(self, other):
"""
Returns the concatenation with another :class:`caspo.core.logicalnetwork.LogicalNetworkList` object instance.
It is assumed (not checked) that both have the same underlying hypergraph.
Parameters
----------
other : :class:`caspo.core.logicaln... | Returns the concatenation with another :class:`caspo.core.logicalnetwork.LogicalNetworkList` object instance.
It is assumed (not checked) that both have the same underlying hypergraph.
Parameters
----------
other : :class:`caspo.core.logicalnetwork.LogicalNetworkList`
The li... |
def rand_bivar(X, rho):
"""Transform two unrelated random variables into correlated bivariate data
X : ndarray
two univariate random variables
with N observations as <N x 2> matrix
rho : float
The Pearson correlations coefficient
as number between [-1, +1]
"""
impor... | Transform two unrelated random variables into correlated bivariate data
X : ndarray
two univariate random variables
with N observations as <N x 2> matrix
rho : float
The Pearson correlations coefficient
as number between [-1, +1] |
def execute_on_all_members(self, task):
"""
Executes a task on all of the known cluster members.
:param task: (Task), the task executed on the all of the members.
:return: (Map), :class:`~hazelcast.future.Future` tuples representing pending completion of the task on each member.
... | Executes a task on all of the known cluster members.
:param task: (Task), the task executed on the all of the members.
:return: (Map), :class:`~hazelcast.future.Future` tuples representing pending completion of the task on each member. |
def examples(self):
"""Returns the examples of the synset.
Returns
-------
list of str
List of its variants' examples.
"""
examples = []
for example in [variant.examples for variant in self._raw_synset.variants if len(variant.examples)... | Returns the examples of the synset.
Returns
-------
list of str
List of its variants' examples. |
def run_later(self, callable_, timeout, *args, **kwargs):
"""Schedules the specified callable for delayed execution.
Returns a TimerTask instance that can be used to cancel pending
execution.
"""
self.lock.acquire()
try:
if self.die:
raise Ru... | Schedules the specified callable for delayed execution.
Returns a TimerTask instance that can be used to cancel pending
execution. |
def hide_arp_holder_arp_entry_interfacetype_HundredGigabitEthernet_HundredGigabitEthernet(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_arp_holder = ET.SubElement(config, "hide-arp-holder", xmlns="urn:brocade.com:mgmt:brocade-arp")
arp_entry = ET.... | Auto Generated Code |
def get_image(row, output_dir):
"""Downloads the image that corresponds to the given row.
Prints a notification if the download fails."""
if not download_image(image_id=row[0],
url=row[1],
x1=float(row[2]),
y1=float(row[3]),
... | Downloads the image that corresponds to the given row.
Prints a notification if the download fails. |
def set_epoch(self, year):
"""Updates the epoch for all subsequent conversions.
Parameters
==========
year : float
Decimal year
"""
fa.loadapxsh(self.datafile, np.float(year))
self.year = year | Updates the epoch for all subsequent conversions.
Parameters
==========
year : float
Decimal year |
def discrete(self):
"""
A sequence of connected vertices in space, corresponding to
self.paths.
Returns
---------
discrete : (len(self.paths),)
A sequence of (m*, dimension) float
"""
discrete = np.array([self.discretize_path(i)
... | A sequence of connected vertices in space, corresponding to
self.paths.
Returns
---------
discrete : (len(self.paths),)
A sequence of (m*, dimension) float |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.