code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def execute(self, transition):
"""
Queue a transition for execution.
:param transition: The transition
"""
self._transitions.append(transition)
if self._thread is None or not self._thread.isAlive():
self._thread = threading.Thread(target=self._transition_loop... | Queue a transition for execution.
:param transition: The transition |
def _isinstance(self, model, raise_error=True):
"""Checks if the specified model instance matches the class model.
By default this method will raise a `ValueError` if the model is not of
expected type.
Args:
model (Model) : The instance to be type checked
raise... | Checks if the specified model instance matches the class model.
By default this method will raise a `ValueError` if the model is not of
expected type.
Args:
model (Model) : The instance to be type checked
raise_error (bool) : Flag to specify whether to raise error on
... |
def Search(self,key):
"""Search alert list by providing partial name, ID, or other key.
"""
results = []
for alert in self.alerts:
if alert.id.lower().find(key.lower()) != -1: results.append(alert)
elif alert.name.lower().find(key.lower()) != -1: results.append(alert)
return(results) | Search alert list by providing partial name, ID, or other key. |
def argval(self):
""" Returns the value of the arg (if any) or None.
If the arg. is not an integer, an error be triggered.
"""
if self.arg is None or any(x is None for x in self.arg):
return None
for x in self.arg:
if not isinstance(x, int):
... | Returns the value of the arg (if any) or None.
If the arg. is not an integer, an error be triggered. |
def top_n_list(lang, n, wordlist='best', ascii_only=False):
"""
Return a frequency list of length `n` in descending order of frequency.
This list contains words from `wordlist`, of the given language.
If `ascii_only`, then only ascii words are considered.
"""
results = []
for word in iter_wo... | Return a frequency list of length `n` in descending order of frequency.
This list contains words from `wordlist`, of the given language.
If `ascii_only`, then only ascii words are considered. |
def upsert_entities(self, entities, sync=False):
"""
Upsert a list of entities to the database
:param entities: The entities to sync
:param sync: Do a sync instead of an upsert
"""
# Select the entities we are upserting for update to reduce deadlocks
if entities:... | Upsert a list of entities to the database
:param entities: The entities to sync
:param sync: Do a sync instead of an upsert |
def combine(items, k=None):
"""
Create a matrix in wich each row is a tuple containing one of solutions or
solution k-esima.
"""
length_items = len(items)
lengths = [len(i) for i in items]
length = reduce(lambda x, y: x * y, lengths)
repeats = [reduce(lambda x, y: x * y, lengths[i:])
... | Create a matrix in wich each row is a tuple containing one of solutions or
solution k-esima. |
def _validate(data_type, parent_path):
"""Implementation for the `validate` function."""
if isinstance(data_type, _CLASS_TYPES):
raise TypeError(
"The data type is expected to be an instance object, but got the "
"type '%s' instead." % (_format_type(data_type),))
base = _fin... | Implementation for the `validate` function. |
def _output_from_file(self, entry='git_describe'):
"""
Read the version from a .version file that may exist alongside __init__.py.
This file can be generated by piping the following output to file:
git describe --long --match v*.*
"""
try:
vfile = os.path.jo... | Read the version from a .version file that may exist alongside __init__.py.
This file can be generated by piping the following output to file:
git describe --long --match v*.* |
def pearson_correlation_coefficient(predictions, labels, weights_fn=None):
"""Calculate pearson correlation coefficient.
Args:
predictions: The raw predictions.
labels: The actual labels.
weights_fn: Weighting function.
Returns:
The pearson correlation coefficient.
"""
del weights_fn
_, pe... | Calculate pearson correlation coefficient.
Args:
predictions: The raw predictions.
labels: The actual labels.
weights_fn: Weighting function.
Returns:
The pearson correlation coefficient. |
def copydb(self, sourcedb, destslab, destdbname=None, progresscb=None):
'''
Copy an entire database in this slab to a new database in potentially another slab.
Args:
sourcedb (LmdbDatabase): which database in this slab to copy rows from
destslab (LmdbSlab): which slab to... | Copy an entire database in this slab to a new database in potentially another slab.
Args:
sourcedb (LmdbDatabase): which database in this slab to copy rows from
destslab (LmdbSlab): which slab to copy rows to
destdbname (str): the name of the database to copy rows to in dest... |
def get_translated_items(fapi, file_uri, use_cache, cache_dir=None):
""" Returns the last modified from smarterling
"""
items = None
cache_file = os.path.join(cache_dir, sha1(file_uri)) if use_cache else None
if use_cache and os.path.exists(cache_file):
print("Using cache file %s for transla... | Returns the last modified from smarterling |
def check(cls, dap, network=False, yamls=True, raises=False, logger=logger):
'''Checks if the dap is valid, reports problems
Parameters:
network -- whether to run checks that requires network connection
output -- where to write() problems, might be None
raises -- whe... | Checks if the dap is valid, reports problems
Parameters:
network -- whether to run checks that requires network connection
output -- where to write() problems, might be None
raises -- whether to raise an exception immediately after problem is detected |
def key_pair(i, region):
"""Returns the ith default (aws_key_pair_name, key_pair_path)."""
if i == 0:
return ("{}_{}".format(RAY, region),
os.path.expanduser("~/.ssh/{}_{}.pem".format(RAY, region)))
return ("{}_{}_{}".format(RAY, i, region),
os.path.expanduser("~/.ssh/{}_... | Returns the ith default (aws_key_pair_name, key_pair_path). |
def generate_single_return_period(args):
"""
This function calculates a single return period for a single reach
"""
qout_file, return_period_file, rivid_index_list, step, num_years, \
method, mp_lock = args
skewvals = [-3.0, -2.8, -2.6, -2.4, -2.2, -2.0, -1.8, -1.6, -1.4, -1.2,
... | This function calculates a single return period for a single reach |
def startMultiple(self, zones):
"""Start multiple zones."""
path = 'zone/start_multiple'
payload = {'zones': zones}
return self.rachio.put(path, payload) | Start multiple zones. |
def reset(self, params, repetition):
"""
Take the steps necessary to reset the experiment before each repetition:
- Make sure random seed is different for each repetition
- Create the L2-L4-L6a network
- Generate objects used by the experiment
- Learn all objects used by the experiment
... | Take the steps necessary to reset the experiment before each repetition:
- Make sure random seed is different for each repetition
- Create the L2-L4-L6a network
- Generate objects used by the experiment
- Learn all objects used by the experiment |
def metamodel_from_file(file_name, **kwargs):
"""
Creates new metamodel from the given file.
Args:
file_name(str): The name of the file with textX language description.
other params: See metamodel_from_str.
"""
with codecs.open(file_name, 'r', 'utf-8') as f:
lang_desc = f.re... | Creates new metamodel from the given file.
Args:
file_name(str): The name of the file with textX language description.
other params: See metamodel_from_str. |
def set_console(stream=STDOUT, foreground=None, background=None, style=None):
"""Set console foreground and background attributes."""
if foreground is None:
foreground = _default_foreground
if background is None:
background = _default_background
if style is None:
style = _default... | Set console foreground and background attributes. |
def init():
'''
Get an sqlite3 connection, and initialize the package database if necessary
'''
if not os.path.exists(__opts__['spm_cache_dir']):
log.debug('Creating SPM cache directory at %s', __opts__['spm_db'])
os.makedirs(__opts__['spm_cache_dir'])
if not os.path.exists(__opts__... | Get an sqlite3 connection, and initialize the package database if necessary |
def as_bits( region_start, region_length, intervals ):
"""
Convert a set of intervals overlapping a region of a chromosome into
a bitset for just that region with the bits covered by the intervals
set.
"""
bits = BitSet( region_length )
for chr, start, stop in intervals:
bits.set_r... | Convert a set of intervals overlapping a region of a chromosome into
a bitset for just that region with the bits covered by the intervals
set. |
def _worker_thread_upload(self):
# type: (Uploader) -> None
"""Worker thread upload
:param Uploader self: this
"""
max_set_len = self._general_options.concurrency.transfer_threads << 2
while not self.termination_check:
try:
if len(self._transfe... | Worker thread upload
:param Uploader self: this |
def ping(self, message=None):
'''Write a ping ``frame``.
'''
return self.write(self.parser.ping(message), encode=False) | Write a ping ``frame``. |
def _get_callable_from_trace_tuple(
self, trace_tuple: TraceTuple
) -> Tuple[str, str]:
"""Returns either (caller, caller_port) or (callee, callee_port).
"""
trace_frame = trace_tuple.trace_frame
if trace_tuple.placeholder:
return trace_frame.caller, trace_frame.c... | Returns either (caller, caller_port) or (callee, callee_port). |
def add_body(self, body):
"""
Add a :class:`Body` to the system. This function also sets the
``system`` attribute of the body.
:param body:
The :class:`Body` to add.
"""
body.system = self
self.bodies.append(body)
self.unfrozen = np.concatena... | Add a :class:`Body` to the system. This function also sets the
``system`` attribute of the body.
:param body:
The :class:`Body` to add. |
def load_graphs():
'''load graphs from mavgraphs.xml'''
mestate.graphs = []
gfiles = ['mavgraphs.xml']
if 'HOME' in os.environ:
for dirname, dirnames, filenames in os.walk(os.path.join(os.environ['HOME'], ".mavproxy")):
for filename in filenames:
if f... | load graphs from mavgraphs.xml |
def main() -> None:
""""Execute the main routine."""
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("--outdir", help="output directory", default=os.path.dirname(__file__))
args = parser.parse_args()
outdir = pathlib.Path(args.outdir)
if not outdir.exists():
ra... | Execute the main routine. |
def get_boundaries_of_elements_in_dict(models_dict, clearance=0.):
""" Get boundaries of all handed models
The function checks all model meta data positions to increase boundary starting with a state or scoped variables.
It is finally iterated over all states, data and logical port models and linkage if su... | Get boundaries of all handed models
The function checks all model meta data positions to increase boundary starting with a state or scoped variables.
It is finally iterated over all states, data and logical port models and linkage if sufficient for respective
graphical editor. At the end a clearance is add... |
def has_same_bins(self, other: "HistogramBase") -> bool:
"""Whether two histograms share the same binning."""
if self.shape != other.shape:
return False
elif self.ndim == 1:
return np.allclose(self.bins, other.bins)
elif self.ndim > 1:
for i in range(s... | Whether two histograms share the same binning. |
def handle_pubcomp(self):
"""Handle incoming PUBCOMP packet."""
self.logger.info("PUBCOMP received")
ret, mid = self.in_packet.read_uint16()
if ret != NC.ERR_SUCCESS:
return ret
evt = event.EventPubcomp(mid)
self.push_event(evt)
return NC.ERR_SUCCE... | Handle incoming PUBCOMP packet. |
def run_step(context):
"""Parse input file and replace a search string.
This also does string substitutions from context on the fileReplacePairs.
It does this before it search & replaces the in file.
Be careful of order. If fileReplacePairs is not an ordered collection,
replacements could evaluate... | Parse input file and replace a search string.
This also does string substitutions from context on the fileReplacePairs.
It does this before it search & replaces the in file.
Be careful of order. If fileReplacePairs is not an ordered collection,
replacements could evaluate in any given order. If this i... |
async def close(self) -> None:
"""
Explicit exit. If so configured, populate cache to prove all creds in
wallet offline if need be, archive cache, and purge prior cache archives.
:return: current object
"""
LOGGER.debug('HolderProver.close >>>')
if self.cfg.get... | Explicit exit. If so configured, populate cache to prove all creds in
wallet offline if need be, archive cache, and purge prior cache archives.
:return: current object |
def to_mask(self, method='exact', subpixels=5):
"""
Return a list of `~photutils.ApertureMask` objects, one for each
aperture position.
Parameters
----------
method : {'exact', 'center', 'subpixel'}, optional
The method used to determine the overlap of the ap... | Return a list of `~photutils.ApertureMask` objects, one for each
aperture position.
Parameters
----------
method : {'exact', 'center', 'subpixel'}, optional
The method used to determine the overlap of the aperture on
the pixel grid. Not all options are available... |
def to_iso8601(dt, tz=None):
"""
Returns an ISO-8601 representation of a given datetime instance.
>>> to_iso8601(datetime.datetime.now())
'2014-10-01T23:21:33.718508Z'
:param dt: a :class:`~datetime.datetime` instance
:param tz: a :class:`~datetime.tzinfo` to use; if None - use a defau... | Returns an ISO-8601 representation of a given datetime instance.
>>> to_iso8601(datetime.datetime.now())
'2014-10-01T23:21:33.718508Z'
:param dt: a :class:`~datetime.datetime` instance
:param tz: a :class:`~datetime.tzinfo` to use; if None - use a default one |
def list(self, filter_title=None, filter_ids=None, page=None):
"""
:type filter_title: str
:param filter_title: Filter by dashboard title
:type filter_ids: list of ints
:param filter_ids: Filter by dashboard ids
:type page: int
:param page: Pagination index
... | :type filter_title: str
:param filter_title: Filter by dashboard title
:type filter_ids: list of ints
:param filter_ids: Filter by dashboard ids
:type page: int
:param page: Pagination index
:rtype: dict
:return: The JSON response of the API, with an additional... |
def _apply_orthogonal_view(self):
"""Orthogonal view with respect to current aspect ratio
"""
left, right, bottom, top = self.get_view_coordinates()
glOrtho(left, right, bottom, top, -10, 0) | Orthogonal view with respect to current aspect ratio |
def calculate_retry_delay(attempt, max_delay=300):
"""Calculates an exponential backoff for retry attempts with a small
amount of jitter."""
delay = int(random.uniform(2, 4) ** attempt)
if delay > max_delay:
# After reaching the max delay, stop using expontential growth
# and keep the de... | Calculates an exponential backoff for retry attempts with a small
amount of jitter. |
def to_dict(self):
""" Transform the current specification to a dictionary
"""
data = {"model": {}}
data["model"]["description"] = self.description
data["model"]["entity_name"] = self.entity_name
data["model"]["package"] = self.package
data["model"]["resource_n... | Transform the current specification to a dictionary |
def declare_example(self, source):
"""Execute the given code, adding it to the runner's namespace."""
with patch_modules():
code = compile(source, "<docs>", "exec")
exec(code, self.namespace) | Execute the given code, adding it to the runner's namespace. |
def recalculate_satistics(self):
'''
update self.Data[specimen]['pars'] for all specimens.
'''
gframe = wx.BusyInfo(
"Re-calculating statistics for all specimens\n Please wait..", self)
for specimen in list(self.Data.keys()):
if 'pars' not in list(self.Da... | update self.Data[specimen]['pars'] for all specimens. |
def init_heartbeat(self):
"""start the heart beating"""
# heartbeat doesn't share context, because it mustn't be blocked
# by the GIL, which is accessed by libzmq when freeing zero-copy messages
hb_ctx = zmq.Context()
self.heartbeat = Heartbeat(hb_ctx, (self.ip, self.hb_port))
... | start the heart beating |
def _bind_method(self, name, unconditionally=False):
"""Generate a Matlab function and bind it to the instance
This is where the magic happens. When an unknown attribute of the
Matlab class is requested, it is assumed to be a call to a
Matlab function, and is generated and bound to the ... | Generate a Matlab function and bind it to the instance
This is where the magic happens. When an unknown attribute of the
Matlab class is requested, it is assumed to be a call to a
Matlab function, and is generated and bound to the instance.
This works because getattr() falls back to __... |
def _get_arrays(self, wavelengths, **kwargs):
"""Get sampled spectrum or bandpass in user units."""
x = self._validate_wavelengths(wavelengths)
y = self(x, **kwargs)
if isinstance(wavelengths, u.Quantity):
w = x.to(wavelengths.unit, u.spectral())
else:
w ... | Get sampled spectrum or bandpass in user units. |
def delete_api_key(apiKey, region=None, key=None, keyid=None, profile=None):
'''
Deletes a given apiKey
CLI Example:
.. code-block:: bash
salt myminion boto_apigateway.delete_api_key apikeystring
'''
try:
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
... | Deletes a given apiKey
CLI Example:
.. code-block:: bash
salt myminion boto_apigateway.delete_api_key apikeystring |
def create(self, name):
"""Creates a new bucket.
Args:
name: a unique name for the new bucket.
Returns:
The newly created bucket.
Raises:
Exception if there was an error creating the bucket.
"""
return Bucket(name, context=self._context).create(self._project_id) | Creates a new bucket.
Args:
name: a unique name for the new bucket.
Returns:
The newly created bucket.
Raises:
Exception if there was an error creating the bucket. |
def _prime_user_perm_caches(self):
"""
Prime both the user and group caches and put them on the ``self.user``.
In addition add a cache filled flag on ``self.user``.
"""
perm_cache, group_perm_cache = self._get_user_cached_perms()
self.user._authority_perm_cache = perm_cac... | Prime both the user and group caches and put them on the ``self.user``.
In addition add a cache filled flag on ``self.user``. |
def set_continue(self, name, action, seqno, value=None, default=False,
disable=False):
"""Configures the routemap continue value
Args:
name (string): The full name of the routemap.
action (string): The action to take for this routemap clause.
seq... | Configures the routemap continue value
Args:
name (string): The full name of the routemap.
action (string): The action to take for this routemap clause.
seqno (integer): The sequence number for the routemap clause.
value (integer): The value to configure for the ... |
def get_vouchers(self, vid_encoded=None,
uid_from=None, uid_to=None, gid=None,
valid_after=None, valid_before=None,
last=None, first=None):
"""
FETCHES a filtered list of vouchers.
:type vid_encoded: ``alphanumeric(64)``
... | FETCHES a filtered list of vouchers.
:type vid_encoded: ``alphanumeric(64)``
:param vid_encoded:
Voucher ID, as a string with CRC.
:type uid_from: ``bigint``
:param uid_from:
Filter by source account UID.
:type uid_to: ``bigint``
:param ui... |
def make_stream_tls_features(self, stream, features):
"""Update the <features/> element with StartTLS feature.
[receving entity only]
:Parameters:
- `features`: the <features/> element of the stream.
:Types:
- `features`: :etree:`ElementTree.Element`
:r... | Update the <features/> element with StartTLS feature.
[receving entity only]
:Parameters:
- `features`: the <features/> element of the stream.
:Types:
- `features`: :etree:`ElementTree.Element`
:returns: update <features/> element.
:returntype: :etree:`... |
def missing_pids(self):
"""Filter persistent identifiers."""
missing = []
for p in self.pids:
try:
PersistentIdentifier.get(p.pid_type, p.pid_value)
except PIDDoesNotExistError:
missing.append(p)
return missing | Filter persistent identifiers. |
def filter_step(G, covY, pred, yt):
"""Filtering step of Kalman filter.
Parameters
----------
G: (dy, dx) numpy array
mean of Y_t | X_t is G * X_t
covX: (dx, dx) numpy array
covariance of Y_t | X_t
pred: MeanAndCov object
predictive distribution at time t
Returns
... | Filtering step of Kalman filter.
Parameters
----------
G: (dy, dx) numpy array
mean of Y_t | X_t is G * X_t
covX: (dx, dx) numpy array
covariance of Y_t | X_t
pred: MeanAndCov object
predictive distribution at time t
Returns
-------
pred: MeanAndCov object
... |
def replace_namespaced_custom_object_scale(self, group, version, namespace, plural, name, body, **kwargs): # noqa: E501
"""replace_namespaced_custom_object_scale # noqa: E501
replace scale of the specified namespace scoped custom object # noqa: E501
This method makes a synchronous HTTP reque... | replace_namespaced_custom_object_scale # noqa: E501
replace scale of the specified namespace scoped custom object # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_... |
def ReadHuntCounters(self, hunt_id):
"""Reads hunt counters."""
num_clients = self.CountHuntFlows(hunt_id)
num_successful_clients = self.CountHuntFlows(
hunt_id, filter_condition=db.HuntFlowsCondition.SUCCEEDED_FLOWS_ONLY)
num_failed_clients = self.CountHuntFlows(
hunt_id, filter_conditi... | Reads hunt counters. |
def get_item_hrefs(result_collection):
"""
Given a result_collection (returned by a previous API call that
returns a collection, like get_bundle_list() or search()), return a
list of item hrefs.
'result_collection' a JSON object returned by a previous API
call.
Returns a list, which may be... | Given a result_collection (returned by a previous API call that
returns a collection, like get_bundle_list() or search()), return a
list of item hrefs.
'result_collection' a JSON object returned by a previous API
call.
Returns a list, which may be empty if no items were found. |
def get_url(self, url, dest, makedirs=False, saltenv='base',
no_cache=False, cachedir=None, source_hash=None):
'''
Get a single file from a URL.
'''
url_data = urlparse(url)
url_scheme = url_data.scheme
url_path = os.path.join(
url_data.net... | Get a single file from a URL. |
def _calc_delta(self,ensemble,scaling_matrix=None):
'''
calc the scaled ensemble differences from the mean
'''
mean = np.array(ensemble.mean(axis=0))
delta = ensemble.as_pyemu_matrix()
for i in range(ensemble.shape[0]):
delta.x[i,:] -= mean
if scaling... | calc the scaled ensemble differences from the mean |
def SubmitJob(self, *params, **kw):
"""Asynchronously execute the specified GP task. This will return a
Geoprocessing Job object. Parameters are passed in either in order
or as keywords."""
fp = self.__expandparamstodict(params, kw)
return self._get_subfolder('submitJob/',... | Asynchronously execute the specified GP task. This will return a
Geoprocessing Job object. Parameters are passed in either in order
or as keywords. |
def extract(self, start, end):
"""Extracts the selected time frame as a new object.
:param int start: Start time.
:param int end: End time.
:returns: class:`pympi.Elan.Eaf` object containing the extracted frame.
"""
from copy import deepcopy
eaf_out = deepcopy(se... | Extracts the selected time frame as a new object.
:param int start: Start time.
:param int end: End time.
:returns: class:`pympi.Elan.Eaf` object containing the extracted frame. |
def index(in_bam, config, check_timestamp=True):
"""Index a BAM file, skipping if index present.
Centralizes BAM indexing providing ability to switch indexing approaches.
"""
assert is_bam(in_bam), "%s in not a BAM file" % in_bam
index_file = "%s.bai" % in_bam
alt_index_file = "%s.bai" % os.pat... | Index a BAM file, skipping if index present.
Centralizes BAM indexing providing ability to switch indexing approaches. |
def _create_update_from_file(mode='create', uuid=None, path=None):
'''
Create vm from file
'''
ret = {}
if not os.path.isfile(path) or path is None:
ret['Error'] = 'File ({0}) does not exists!'.format(path)
return ret
# vmadm validate create|update [-f <filename>]
cmd = 'vmad... | Create vm from file |
def watt_m(simulated_array, observed_array, replace_nan=None, replace_inf=None,
remove_neg=False, remove_zero=False):
"""Compute Watterson's M (M).
.. image:: /pictures/M.png
**Range:** -1 ≤ M < 1, does not indicate bias, larger is better.
**Notes:**
Parameters
----------
simu... | Compute Watterson's M (M).
.. image:: /pictures/M.png
**Range:** -1 ≤ M < 1, does not indicate bias, larger is better.
**Notes:**
Parameters
----------
simulated_array: one dimensional ndarray
An array of simulated data from the time series.
observed_array: one dimensional ndarr... |
def lightcurve_moments(ftimes, fmags, ferrs):
'''This calculates the weighted mean, stdev, median, MAD, percentiles, skew,
kurtosis, fraction of LC beyond 1-stdev, and IQR.
Parameters
----------
ftimes,fmags,ferrs : np.array
The input mag/flux time-series with all non-finite elements remov... | This calculates the weighted mean, stdev, median, MAD, percentiles, skew,
kurtosis, fraction of LC beyond 1-stdev, and IQR.
Parameters
----------
ftimes,fmags,ferrs : np.array
The input mag/flux time-series with all non-finite elements removed.
Returns
-------
dict
A dict... |
def _all_get_table_col(self, key, column, fullname):
""" Creates a pytables column instance.
The type of column depends on the type of `column[0]`.
Note that data in `column` must be homogeneous!
"""
val = column[0]
try:
# # We do not want to loose int_
... | Creates a pytables column instance.
The type of column depends on the type of `column[0]`.
Note that data in `column` must be homogeneous! |
def _send(self, key, value, metric_type):
"""Send the specified value to the statsd daemon via UDP without a
direct socket connection.
:param str key: The key name to send
:param int or float value: The value for the key
"""
try:
payload = self._build_payloa... | Send the specified value to the statsd daemon via UDP without a
direct socket connection.
:param str key: The key name to send
:param int or float value: The value for the key |
def mdaArray(arry, dtype=numpy.float, mask=None):
"""
Array constructor for masked distributed array
@param arry numpy-like array
@param mask mask array (or None if all data elements are valid)
"""
a = numpy.array(arry, dtype)
res = MaskedDistArray(a.shape, a.dtype)
res[:] = a
res.ma... | Array constructor for masked distributed array
@param arry numpy-like array
@param mask mask array (or None if all data elements are valid) |
def main():
'''
Main part of command line utility
'''
arguments = docopt.docopt(__doc__, version='Naval Fate 2.0')
if arguments['show_diag']:
diag.show()
if arguments['show_reporting']:
diag.reporting()
diag.show()
if arguments['ping_couchdb']:
try:
... | Main part of command line utility |
def export_users(self, body):
"""Export all users to a file using a long running job.
Check job status with get(). URL pointing to the export file will be
included in the status once the job is complete.
Args:
body (dict): Please see: https://auth0.com/docs/api/management/v... | Export all users to a file using a long running job.
Check job status with get(). URL pointing to the export file will be
included in the status once the job is complete.
Args:
body (dict): Please see: https://auth0.com/docs/api/management/v2#!/Jobs/post_users_exports |
def expireat(self, key, when):
"""Emulate expireat"""
expire_time = datetime.fromtimestamp(when)
key = self._encode(key)
if key in self.redis:
self.timeouts[key] = expire_time
return True
return False | Emulate expireat |
def get_pdos(dos, lm_orbitals=None, atoms=None, elements=None):
"""Extract the projected density of states from a CompleteDos object.
Args:
dos (:obj:`~pymatgen.electronic_structure.dos.CompleteDos`): The
density of states.
elements (:obj:`dict`, optional): The elements and orbitals... | Extract the projected density of states from a CompleteDos object.
Args:
dos (:obj:`~pymatgen.electronic_structure.dos.CompleteDos`): The
density of states.
elements (:obj:`dict`, optional): The elements and orbitals to extract
from the projected density of states. Should be... |
def default_antenna1(self, context):
""" Default antenna1 values """
ant1, ant2 = default_base_ant_pairs(self, context)
(tl, tu), (bl, bu) = context.dim_extents('ntime', 'nbl')
ant1_result = np.empty(context.shape, context.dtype)
ant1_result[:,:] = ant1[np.newaxis,bl:bu]
return ant1_result | Default antenna1 values |
def compile(self, pretty=True):
""" Compile all code and return a dict {name: code} where the keys
are determined by the keyword arguments passed to __init__().
Parameters
----------
pretty : bool
If True, use a slower method to mangle object names. This produces
... | Compile all code and return a dict {name: code} where the keys
are determined by the keyword arguments passed to __init__().
Parameters
----------
pretty : bool
If True, use a slower method to mangle object names. This produces
GLSL that is more readable.
... |
def mtabstr2doestr(st1):
"""mtabstr2doestr"""
seperator = '$ =============='
alist = st1.split(seperator)
#this removes all the tabs that excel
#puts after the seperator and before the next line
for num in range(0, len(alist)):
alist[num] = alist[num].lstrip()
st2 = ''
for num i... | mtabstr2doestr |
def get_pmids(self):
"""Get list of all PMIDs associated with edges in the network."""
pmids = []
for ea in self._edge_attributes.values():
edge_pmids = ea.get('pmids')
if edge_pmids:
pmids += edge_pmids
return list(set(pmids)) | Get list of all PMIDs associated with edges in the network. |
def valid_conkey(self, conkey):
"""Check that the conkey is a valid one. Return True if valid. A
condition key is valid if it is one in the _COND_PREFIXES
list. With the prefix removed, the remaining string must be
either a number or the empty string."""
for prefix in _COND_PREF... | Check that the conkey is a valid one. Return True if valid. A
condition key is valid if it is one in the _COND_PREFIXES
list. With the prefix removed, the remaining string must be
either a number or the empty string. |
def getBagTags(bagInfoPath):
"""
get bag tags
"""
try:
bagInfoString = open(bagInfoPath, "r").read().decode('utf-8')
except UnicodeDecodeError:
bagInfoString = open(bagInfoPath, "r").read().decode('iso-8859-1')
bagTags = anvl.readANVLString(bagInfoString)
return bagTags | get bag tags |
def retention_period(self, value):
"""Set the retention period for items in the bucket.
:type value: int
:param value:
number of seconds to retain items after upload or release from
event-based lock.
:raises ValueError: if the bucket's retention policy is locked... | Set the retention period for items in the bucket.
:type value: int
:param value:
number of seconds to retain items after upload or release from
event-based lock.
:raises ValueError: if the bucket's retention policy is locked. |
def matchingAnalyseIndexes(self, tokenJson):
'''Determines whether given token (tokenJson) satisfies all the rules listed
in the WordTemplate and returns a list of analyse indexes that correspond
to tokenJson[ANALYSIS] elements that are matching all the rules.
An empty li... | Determines whether given token (tokenJson) satisfies all the rules listed
in the WordTemplate and returns a list of analyse indexes that correspond
to tokenJson[ANALYSIS] elements that are matching all the rules.
An empty list is returned if none of the analyses match (all the rul... |
def form(value):
"""
Format numbers in a nice way.
>>> form(0)
'0'
>>> form(0.0)
'0.0'
>>> form(0.0001)
'1.000E-04'
>>> form(1003.4)
'1,003'
>>> form(103.4)
'103'
>>> form(9.3)
'9.30000'
>>> form(-1.2)
'-1.2'
"""
if isinstance(value, FLOAT + INT):... | Format numbers in a nice way.
>>> form(0)
'0'
>>> form(0.0)
'0.0'
>>> form(0.0001)
'1.000E-04'
>>> form(1003.4)
'1,003'
>>> form(103.4)
'103'
>>> form(9.3)
'9.30000'
>>> form(-1.2)
'-1.2' |
def _visible(self, element):
"""Used to filter text elements that have invisible text on the page.
"""
if element.name in self._disallowed_names:
return False
elif re.match(u'<!--.*-->', six.text_type(element.extract())):
return False
return True | Used to filter text elements that have invisible text on the page. |
def putResult(self, result):
"""Register the *result* by putting it on all the output tubes."""
self._lock_prev_output.acquire()
for tube in self._tubes_result_output:
tube.put((result, 0))
self._lock_next_output.release() | Register the *result* by putting it on all the output tubes. |
def _random_subprocessors(self):
"""Produces an iterator of subprocessors. If there are fewer than
self._proc_limit subprocessors to consider (by knocking out a
minimal subset of working qubits incident to broken couplers),
we work exhaustively. Otherwise, we generate a random set of
... | Produces an iterator of subprocessors. If there are fewer than
self._proc_limit subprocessors to consider (by knocking out a
minimal subset of working qubits incident to broken couplers),
we work exhaustively. Otherwise, we generate a random set of
``self._proc_limit`` subprocessors.
... |
def add_child(self, child):
""" Add a child node """
if not isinstance(child, DependencyNode):
raise TypeError('"child" must be a DependencyNode')
self._children.append(child) | Add a child node |
def register_up(self):
"""Called by WorkerThread objects to register themselves.
Acquire the condition variable for the WorkerThread objects.
Increment the running-thread count. If we are the last thread to
start, set status to 'up'. This allows startall() to complete
if it wa... | Called by WorkerThread objects to register themselves.
Acquire the condition variable for the WorkerThread objects.
Increment the running-thread count. If we are the last thread to
start, set status to 'up'. This allows startall() to complete
if it was called with wait=True. |
def add_obograph_digraph(self, og, node_type=None, predicates=None, xref_graph=None, logical_definitions=None,
property_chain_axioms=None,
parse_meta=True,
**args):
"""
Converts a single obograph to Digraph edges and ... | Converts a single obograph to Digraph edges and adds to an existing networkx DiGraph |
def _send(self, event):
"""Generic function for sending commands to Alarm.com
:param event: Event command to send to alarm.com
"""
_LOGGER.debug('Sending %s to Alarm.com', event)
try:
with async_timeout.timeout(10, loop=self._loop):
response = yield ... | Generic function for sending commands to Alarm.com
:param event: Event command to send to alarm.com |
def elem_to_container(elem, container=dict, **options):
"""
Convert XML ElementTree Element to a collection of container objects.
Elements are transformed to a node under special tagged nodes, attrs, text
and children, to store the type of these elements basically, however, in
some special cases li... | Convert XML ElementTree Element to a collection of container objects.
Elements are transformed to a node under special tagged nodes, attrs, text
and children, to store the type of these elements basically, however, in
some special cases like the followings, these nodes are attached to the
parent node d... |
def headers(self):
"""
Get http headers or the http/https request.
@return: A dictionary of header/values.
@rtype: dict
"""
action = self.method.soap.action
stock = { 'Content-Type' : 'text/xml; charset=utf-8', 'SOAPAction': action }
result = dict(stock, *... | Get http headers or the http/https request.
@return: A dictionary of header/values.
@rtype: dict |
def _init_metadata(self):
"""stub"""
QuestionFilesFormRecord._init_metadata(self)
FirstAngleProjectionFormRecord._init_metadata(self)
super(MultiChoiceOrthoQuestionFormRecord, self)._init_metadata() | stub |
def _decorate_namespace_property(bases: List[type], namespace: MutableMapping[str, Any], key: str) -> None:
"""Collect contracts for all getters/setters/deleters corresponding to ``key`` and decorate them."""
# pylint: disable=too-many-locals
# pylint: disable=too-many-branches
# pylint: disable=too-man... | Collect contracts for all getters/setters/deleters corresponding to ``key`` and decorate them. |
def register(cls, range_mixin):
"""
Decorator for registering range set mixins for global use. This works
the same as :meth:`~spans.settypes.MetaRangeSet.add`
:param range_mixin: A :class:`~spans.types.Range` mixin class to
to register a decorated range set m... | Decorator for registering range set mixins for global use. This works
the same as :meth:`~spans.settypes.MetaRangeSet.add`
:param range_mixin: A :class:`~spans.types.Range` mixin class to
to register a decorated range set mixin class for
:return: A decorator to use o... |
def get_parent(self):
"""Get Parent.
Fetch parent product if it exists.
Use `parent_asin` to check if a parent exist before fetching.
:return:
An instance of :class:`~.AmazonProduct` representing the
parent product.
"""
if not self.parent:
... | Get Parent.
Fetch parent product if it exists.
Use `parent_asin` to check if a parent exist before fetching.
:return:
An instance of :class:`~.AmazonProduct` representing the
parent product. |
def load(self):
"""Load the state from the JSON file in the config dir."""
if not op.exists(self.path):
logger.debug("The GUI state file `%s` doesn't exist.", self.path)
# TODO: create the default state.
return
assert op.exists(self.path)
logger.debug(... | Load the state from the JSON file in the config dir. |
def line_iterator_to_intermediary(line_iterator):
""" Parse an iterator of str (one string per line) to the intermediary syntax"""
current_table = None
tables = []
relations = []
errors = []
for line_nb, line, raw_line in filter_lines_from_comments(line_iterator):
try:
new_ob... | Parse an iterator of str (one string per line) to the intermediary syntax |
def remove_class(self, ioclass):
"""Remove VNXIOClass instance from policy."""
current_ioclasses = self.ioclasses
new_ioclasses = filter(lambda x: x.name != ioclass.name,
current_ioclasses)
self.modify(new_ioclasses=new_ioclasses) | Remove VNXIOClass instance from policy. |
def surface_to_image(surface):
"""Renders current buffer surface to IPython image"""
from IPython.display import Image
buf = BytesIO()
surface.write_to_png(buf)
data = buf.getvalue()
buf.close()
return Image(data=data) | Renders current buffer surface to IPython image |
def tagmask(self, tags):
"""
:returns: a boolean array with True where the assets has tags
"""
mask = numpy.zeros(len(tags), bool)
for t, tag in enumerate(tags):
tagname, tagvalue = tag.split('=')
mask[t] = self.tagvalue(tagname) == tagvalue
return... | :returns: a boolean array with True where the assets has tags |
def get(self, endpoint, params=None):
"""Send an HTTP GET request to QuadrigaCX.
:param endpoint: API endpoint.
:type endpoint: str | unicode
:param params: URL parameters.
:type params: dict
:return: Response body from QuadrigaCX.
:rtype: dict
:raise qua... | Send an HTTP GET request to QuadrigaCX.
:param endpoint: API endpoint.
:type endpoint: str | unicode
:param params: URL parameters.
:type params: dict
:return: Response body from QuadrigaCX.
:rtype: dict
:raise quadriga.exceptions.RequestError: If HTTP OK was not... |
def clearness_index(ghi, solar_zenith, extra_radiation, min_cos_zenith=0.065,
max_clearness_index=2.0):
"""
Calculate the clearness index.
The clearness index is the ratio of global to extraterrestrial
irradiance on a horizontal plane.
Parameters
----------
ghi : numeri... | Calculate the clearness index.
The clearness index is the ratio of global to extraterrestrial
irradiance on a horizontal plane.
Parameters
----------
ghi : numeric
Global horizontal irradiance in W/m^2.
solar_zenith : numeric
True (not refraction-corrected) solar zenith angle ... |
def calculate_manual_reading(basic_data: BasicMeterData) -> Reading:
""" Calculate the interval between two manual readings """
t_start = basic_data.previous_register_read_datetime
t_end = basic_data.current_register_read_datetime
read_start = basic_data.previous_register_read
read_end = basic_data.... | Calculate the interval between two manual readings |
def one(iterable, cmp=None):
"""
Return the object in the given iterable that evaluates to True.
If the given iterable has more than one object that evaluates to True,
or if there is no object that fulfills such condition, return False.
If a callable ``cmp`` is given, it's used to evaluate each el... | Return the object in the given iterable that evaluates to True.
If the given iterable has more than one object that evaluates to True,
or if there is no object that fulfills such condition, return False.
If a callable ``cmp`` is given, it's used to evaluate each element.
>>> one((True, False, Fa... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.