code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _updateModelDBResults(self):
""" Retrieves the current results and updates the model's record in
the Model database.
"""
# -----------------------------------------------------------------------
# Get metrics
metrics = self._getMetrics()
# ----------------------------------------------... | Retrieves the current results and updates the model's record in
the Model database. |
def sort_cyclic_graph_best_effort(graph, pick_first='head'):
"""Fallback for cases in which the graph has cycles."""
ordered = []
visited = set()
# Go first on the pick_first chain then go back again on the others
# that were not visited. Given the way the graph is built both chains
# will alway... | Fallback for cases in which the graph has cycles. |
def download_manifest_v2(self, manifest, replica,
num_retries=10,
min_delay_seconds=0.25,
download_dir='.'):
"""
Process the given manifest file in TSV (tab-separated values) format and download the files referenced b... | Process the given manifest file in TSV (tab-separated values) format and download the files referenced by it.
The files are downloaded in the version 2 format.
This download format will serve as the main storage format for downloaded files. If a user specifies a different
format for download (c... |
def edit(self, **args):
'''
Doesn't require manual fetching of gistID of a gist
passing gistName will return edit the gist
'''
self.gist_name = ''
if 'description' in args:
self.description = args['description']
else:
self.description = ''
if 'name' in args and 'id' in args:
self.gist_name = ... | Doesn't require manual fetching of gistID of a gist
passing gistName will return edit the gist |
def change_selected(self,new_fit):
"""
updates passed in fit or index as current fit for the editor (does not affect parent),
if no parameters are passed in it sets first fit as current
@param: new_fit -> fit object to highlight as selected
"""
if len(self.fit_list)==0: r... | updates passed in fit or index as current fit for the editor (does not affect parent),
if no parameters are passed in it sets first fit as current
@param: new_fit -> fit object to highlight as selected |
def has_false(self, e, extra_constraints=(), solver=None, model_callback=None): #pylint:disable=unused-argument
"""
Should return False if `e` can possibly be False.
:param e: The AST.
:param extra_constraints: Extra constraints (as ASTs) to add to the solver for thi... | Should return False if `e` can possibly be False.
:param e: The AST.
:param extra_constraints: Extra constraints (as ASTs) to add to the solver for this solve.
:param solver: A solver, for backends that require it.
:param model_callback: a function ... |
def _ansi_color(code, theme):
"""
Converts an ansi code to a QColor, taking the color scheme (theme) into account.
"""
red = 170 if code & 1 else 0
green = 170 if code & 2 else 0
blue = 170 if code & 4 else 0
color = QtGui.QColor(red, green, blue)
if theme is not None:
mappings =... | Converts an ansi code to a QColor, taking the color scheme (theme) into account. |
def many(parser):
"""Applies the parser to input zero or more times.
Returns a list of parser results.
"""
results = []
terminate = object()
while local_ps.value:
result = optional(parser, terminate)
if result == terminate:
break
results.append(result)
... | Applies the parser to input zero or more times.
Returns a list of parser results. |
def size(self):
""" Returns the total number of queued jobs on the queue """
if self.id.endswith("/"):
subqueues = self.get_known_subqueues()
if len(subqueues) == 0:
return 0
else:
with context.connections.redis.pipeline(transaction=Fa... | Returns the total number of queued jobs on the queue |
def process_event(self, event_id):
"""Process event in Celery."""
with db.session.begin_nested():
event = Event.query.get(event_id)
event._celery_task = self # internal binding to a Celery task
event.receiver.run(event) # call run directly to avoid circular calls
flag_modified(... | Process event in Celery. |
def _update_capacity(self, data):
""" Update the consumed capacity metrics """
if 'ConsumedCapacity' in data:
# This is all for backwards compatibility
consumed = data['ConsumedCapacity']
if not isinstance(consumed, list):
consumed = [consumed]
... | Update the consumed capacity metrics |
def __valueKeyWithHeaderIndex(self, values):
"""
This is hellper function, so that we can mach decision values with row index
as represented in header index.
Args:
values (dict): Normaly this will have dict of header values and values from decision
Return:
>>> return()
{
values[headerName] : in... | This is hellper function, so that we can mach decision values with row index
as represented in header index.
Args:
values (dict): Normaly this will have dict of header values and values from decision
Return:
>>> return()
{
values[headerName] : int(headerName index in header array),
...
} |
def swap(self, old_chunks, new_chunk):
"""Swaps old consecutive chunks with new chunk.
Args:
old_chunks (:obj:`budou.chunk.ChunkList`): List of consecutive Chunks to
be removed.
new_chunk (:obj:`budou.chunk.Chunk`): A Chunk to be inserted.
"""
indexes = [self.index(chunk) for chun... | Swaps old consecutive chunks with new chunk.
Args:
old_chunks (:obj:`budou.chunk.ChunkList`): List of consecutive Chunks to
be removed.
new_chunk (:obj:`budou.chunk.Chunk`): A Chunk to be inserted. |
def load(cls, primary_key, convert_key=True):
"""
Retrieve a model instance by primary key.
:param primary_key: The primary key of the model instance.
:returns: Corresponding :py:class:`Model` instance.
:raises: ``KeyError`` if object with given primary key does
not ... | Retrieve a model instance by primary key.
:param primary_key: The primary key of the model instance.
:returns: Corresponding :py:class:`Model` instance.
:raises: ``KeyError`` if object with given primary key does
not exist. |
def read(self):
""" read default csp settings from json file """
with open(self.default_file) as json_file:
try:
return json.load(json_file)
except Exception as e:
raise 'empty file' | read default csp settings from json file |
def _init_settings(self):
""" Init setting """
self._show_whitespaces = False
self._tab_length = 4
self._use_spaces_instead_of_tabs = True
self.setTabStopWidth(self._tab_length *
self.fontMetrics().width(" "))
self._set_whitespaces_flags(self.... | Init setting |
def summary(self, raw):
"""Use the Backscatter.io summary data to create a view."""
taxonomies = list()
level = 'info'
namespace = 'Backscatter.io'
if self.service == 'observations':
summary = raw.get('results', dict()).get('summary', dict())
taxonomies =... | Use the Backscatter.io summary data to create a view. |
def _handleAuthorizedEvents(self, component, action, data, user, client):
"""Isolated communication link for authorized events."""
try:
if component == "debugger":
self.log(component, action, data, user, client, lvl=info)
if not user and component in self.author... | Isolated communication link for authorized events. |
def alphabetize_attributes(self):
"""
Orders attributes names alphabetically, except for the class attribute, which is kept last.
"""
self.attributes.sort(key=lambda name: (name == self.class_attr_name, name)) | Orders attributes names alphabetically, except for the class attribute, which is kept last. |
def setup_and_check(self, data, title='', readonly=False,
xlabels=None, ylabels=None):
"""
Setup ArrayEditor:
return False if data is not supported, True otherwise
"""
self.data = data
readonly = readonly or not self.data.flags.writeable
... | Setup ArrayEditor:
return False if data is not supported, True otherwise |
def linkify_sd_by_s(self, hosts, services):
"""Replace dependent_service_description and service_description
in service dependency by the real object
:param hosts: host list, used to look for a specific one
:type hosts: alignak.objects.host.Hosts
:param services: service list to... | Replace dependent_service_description and service_description
in service dependency by the real object
:param hosts: host list, used to look for a specific one
:type hosts: alignak.objects.host.Hosts
:param services: service list to look for a specific one
:type services: aligna... |
def cluster(self, input_fasta_list, reverse_pipe):
'''
cluster - Clusters reads at 100% identity level and writes them to
file. Resets the input_fasta variable as the FASTA file containing the
clusters.
Parameters
----------
input_fasta_list : list
l... | cluster - Clusters reads at 100% identity level and writes them to
file. Resets the input_fasta variable as the FASTA file containing the
clusters.
Parameters
----------
input_fasta_list : list
list of strings, each a path to input fasta files to be clustered.
... |
def encrypt_file(file_path, sender, recipients):
"Returns encrypted binary file content if successful"
for recipient_key in recipients:
crypto.assert_type_and_length('recipient_key', recipient_key, (str, crypto.UserLock))
crypto.assert_type_and_length("sender_key", sender, crypto.UserLock)
if (n... | Returns encrypted binary file content if successful |
def hide_columns(self, subset):
"""
Hide columns from rendering.
.. versionadded:: 0.23.0
Parameters
----------
subset : IndexSlice
An argument to ``DataFrame.loc`` that identifies which columns
are hidden.
Returns
-------
... | Hide columns from rendering.
.. versionadded:: 0.23.0
Parameters
----------
subset : IndexSlice
An argument to ``DataFrame.loc`` that identifies which columns
are hidden.
Returns
-------
self : Styler |
def __read_frame(self):
"""*Attempt* to read a frame. If we get an EAGAIN on the frame header,
it'll raise to our caller. If we get it *after* we already got the
header, wait-out the rest of the frame.
"""
if self.__frame_header_cache is None:
_logger.debug("Readin... | *Attempt* to read a frame. If we get an EAGAIN on the frame header,
it'll raise to our caller. If we get it *after* we already got the
header, wait-out the rest of the frame. |
def image(self):
"""
Generates the image using self.genImage(),
then rotates it to self.direction and returns it.
"""
self._image = self.genImage()
self._image = funcs.rotateImage(self._image, self.direction)
return self._image | Generates the image using self.genImage(),
then rotates it to self.direction and returns it. |
def grp_by_src(self):
"""
:returns: a new CompositeSourceModel with one group per source
"""
smodels = []
grp_id = 0
for sm in self.source_models:
src_groups = []
smodel = sm.__class__(sm.names, sm.weight, sm.path, src_groups,
... | :returns: a new CompositeSourceModel with one group per source |
def set_default_by_index(self, index):
""" Set the default dataset by its index.
After changing the default dataset, all calls without explicitly specifying the
dataset by index or alias will be redirected to this dataset.
Args:
index (int): The index of the dataset that sh... | Set the default dataset by its index.
After changing the default dataset, all calls without explicitly specifying the
dataset by index or alias will be redirected to this dataset.
Args:
index (int): The index of the dataset that should be made the default.
Raises:
... |
def overall_error_rate(self):
"""Overall error rate metrics (error_rate, substitution_rate, deletion_rate, and insertion_rate)
Returns
-------
dict
results in a dictionary format
"""
substitution_rate = metric.substitution_rate(
Nref=self.overal... | Overall error rate metrics (error_rate, substitution_rate, deletion_rate, and insertion_rate)
Returns
-------
dict
results in a dictionary format |
def _get_maxcov_downsample(data):
"""Calculate maximum coverage downsampling for whole genome samples.
Returns None if we're not doing downsampling.
"""
from bcbio.bam import ref
from bcbio.ngsalign import alignprep, bwa
from bcbio.variation import coverage
fastq_file = data["files"][0]
... | Calculate maximum coverage downsampling for whole genome samples.
Returns None if we're not doing downsampling. |
def clean(inst):
"""Routine to return VEFI data cleaned to the specified level
Parameters
-----------
inst : (pysat.Instrument)
Instrument class object, whose attribute clean_level is used to return
the desired level of data selectivity.
Returns
--------
Void : (NoneType)
... | Routine to return VEFI data cleaned to the specified level
Parameters
-----------
inst : (pysat.Instrument)
Instrument class object, whose attribute clean_level is used to return
the desired level of data selectivity.
Returns
--------
Void : (NoneType)
data in inst is m... |
def on_epoch_end(self, epoch, smooth_loss, last_metrics, **kwargs):
"Logs training loss, validation loss and custom metrics & log prediction samples & save model"
if self.save_model:
# Adapted from fast.ai "SaveModelCallback"
current = self.get_monitor_value()
if cur... | Logs training loss, validation loss and custom metrics & log prediction samples & save model |
def unpack(fmt, data):
"""unpack(fmt, string) -> (v1, v2, ...)
Unpack the string, containing packed C structure data, according
to fmt. Requires len(string)==calcsize(fmt).
See struct.__doc__ for more on format strings."""
formatdef, endianness, i = getmode(fmt)
j = 0
num = 0
result = []
len... | unpack(fmt, string) -> (v1, v2, ...)
Unpack the string, containing packed C structure data, according
to fmt. Requires len(string)==calcsize(fmt).
See struct.__doc__ for more on format strings. |
def _make_plus_helper(obj, fields):
""" add a + prefix to any fields in obj that aren't in fields """
new_obj = {}
for key, value in obj.items():
if key in fields or key.startswith('_'):
# if there's a subschema apply it to a list or subdict
if fields.get(key):
... | add a + prefix to any fields in obj that aren't in fields |
def checkStock(self):
"""check stocks in preference"""
if not self.preferences:
logger.debug("no preferences")
return None
soup = BeautifulSoup(
self.xpath(path['stock-table'])[0].html, "html.parser")
count = 0
# iterate through product in left... | check stocks in preference |
def run(self):
"""
执行任务
"""
while not self._stoped:
self._tx_event.wait()
self._tx_event.clear()
try:
func = self._tx_queue.get_nowait()
if isinstance(func, str):
self._stoped = True
... | 执行任务 |
def localCitesOf(self, rec):
"""Takes in a Record, WOS string, citation string or Citation and returns a RecordCollection of all records that cite it.
# Parameters
_rec_ : `Record, str or Citation`
> The object that is being cited
# Returns
`RecordCollection`
... | Takes in a Record, WOS string, citation string or Citation and returns a RecordCollection of all records that cite it.
# Parameters
_rec_ : `Record, str or Citation`
> The object that is being cited
# Returns
`RecordCollection`
> A `RecordCollection` containing only... |
def union_join(left, right, left_as='left', right_as='right'):
"""
Join function truest to the SQL style join. Merges both objects together in a sum-type,
saving references to each parent in ``left`` and ``right`` attributes.
>>> Dog = namedtuple('Dog', ['name', 'woof', 'weight'])
>>> dog ... | Join function truest to the SQL style join. Merges both objects together in a sum-type,
saving references to each parent in ``left`` and ``right`` attributes.
>>> Dog = namedtuple('Dog', ['name', 'woof', 'weight'])
>>> dog = Dog('gatsby', 'Ruff!', 15)
>>> Cat = namedtuple('Cat', ['name', '... |
def magic(adata,
name_list=None,
k=10,
a=15,
t='auto',
n_pca=100,
knn_dist='euclidean',
random_state=None,
n_jobs=None,
verbose=False,
copy=None,
**kwargs):
"""Markov Affinity-based Graph Imputation of Cell... | Markov Affinity-based Graph Imputation of Cells (MAGIC) API [vanDijk18]_.
MAGIC is an algorithm for denoising and transcript recover of single cells
applied to single-cell sequencing data. MAGIC builds a graph from the data
and uses diffusion to smooth out noise and recover the data manifold.
More inf... |
def _is_streaming_request(self):
"""check request is stream request or not"""
arg2 = self.argstreams[1]
arg3 = self.argstreams[2]
return not (isinstance(arg2, InMemStream) and
isinstance(arg3, InMemStream) and
((arg2.auto_close and arg3.auto_close)... | check request is stream request or not |
def DropLocation():
""" Get the directory that file drop is watching """
template = Template(template=PathDirs().cfg_file)
drop_loc = template.option('main', 'files')[1]
drop_loc = expanduser(drop_loc)
drop_loc = abspath(drop_loc)
return (True, drop_loc) | Get the directory that file drop is watching |
def _prune_node(self, node):
"""
Prune the given node if context exits cleanly.
"""
if self.is_pruning:
# node is mutable, so capture the key for later pruning now
prune_key, node_body = self._node_to_db_mapping(node)
should_prune = (node_body is not N... | Prune the given node if context exits cleanly. |
def dist_in_usersite(dist):
"""
Return True if given Distribution is installed in user site.
"""
norm_path = normalize_path(dist_location(dist))
return norm_path.startswith(normalize_path(user_site)) | Return True if given Distribution is installed in user site. |
def p_declarations(self, p):
"""declarations : declarations declaration
| declaration"""
n = len(p)
if n == 3:
p[0] = p[1] + [p[2]]
elif n == 2:
p[0] = [p[1]] | declarations : declarations declaration
| declaration |
def namedb_get_preorder(cur, preorder_hash, current_block_number, include_expired=False, expiry_time=None):
"""
Get a preorder record by hash.
If include_expired is set, then so must expiry_time
Return None if not found.
"""
select_query = None
args = None
if include_expired:
... | Get a preorder record by hash.
If include_expired is set, then so must expiry_time
Return None if not found. |
def lookup_expand(self, stmt, names):
"""Find schema nodes under `stmt`, also in used groupings.
`names` is a list with qualified names of the schema nodes to
look up. All 'uses'/'grouping' pairs between `stmt` and found
schema nodes are marked for expansion.
"""
if not ... | Find schema nodes under `stmt`, also in used groupings.
`names` is a list with qualified names of the schema nodes to
look up. All 'uses'/'grouping' pairs between `stmt` and found
schema nodes are marked for expansion. |
def _get_target_nearest(self):
"""Get nearest target for each origin"""
reps_query = """
SELECT DISTINCT ON(g2.cartodb_id)
g1.cartodb_id As origin_id,
g2.the_geom,
g2.cartodb_id + {maxorigin} as cartodb_id,
g2.the_geom_webm... | Get nearest target for each origin |
def connectionLost(self, reason):
"""If we already have an AMP connection registered on the factory,
get rid of it.
"""
if self.connection is not None:
del self.factory.protocols[self.connection] | If we already have an AMP connection registered on the factory,
get rid of it. |
def aggregate(self, **filters):
"""Conduct an aggregate query"""
url = URL.aggregate.format(**locals())
return self.get_pages(url, **filters) | Conduct an aggregate query |
def set(self, style={}):
"""overrides style values at the current stack level"""
_style = {}
for attr in style:
if attr in self.cmds and not style[attr] in self.cmds[attr]:
print 'WARNING: ESC/POS PRINTING: ignoring invalid value: '+utfstr(style[attr])+' for style: '+... | overrides style values at the current stack level |
def convert_result(converter):
"""Decorator that can convert the result of a function call."""
def decorate(fn):
@inspection.wraps(fn)
def new_fn(*args, **kwargs):
return converter(fn(*args, **kwargs))
return new_fn
return decorate | Decorator that can convert the result of a function call. |
def autoLayout( self,
padX = None,
padY = None,
direction = Qt.Horizontal,
layout = 'Layered',
animate = 0,
centerOn = None,
center = None,
debug=False ):
... | Automatically lays out all the nodes in the scene using the \
autoLayoutNodes method.
:param padX | <int> || None | default is 2 * cell width
padY | <int> || None | default is 2 * cell height
direction | <Qt.Direction>
layout | <s... |
def add_usr_local_bin_to_path(log=False):
""" adds /usr/local/bin to $PATH """
if log:
bookshelf2.logging_helpers.log_green('inserts /usr/local/bin into PATH')
with settings(hide('warnings', 'running', 'stdout', 'stderr'),
capture=True):
try:
sudo('echo "export... | adds /usr/local/bin to $PATH |
def _write(self, f):
"""Serialize an NDEF record to a file-like object."""
log.debug("writing ndef record at offset {0}".format(f.tell()))
record_type = self.type
record_name = self.name
record_data = self.data
if record_type == '':
header_flags = 0;... | Serialize an NDEF record to a file-like object. |
def present(name=None,
table_name=None,
region=None,
key=None,
keyid=None,
profile=None,
read_capacity_units=None,
write_capacity_units=None,
alarms=None,
alarms_from_pillar="boto_dynamodb_alarms",
ha... | Ensure the DynamoDB table exists. Table throughput can be updated after
table creation.
Global secondary indexes (GSIs) are managed with some exceptions:
- If a GSI deletion is detected, a failure will occur (deletes should be
done manually in the AWS console).
- If multiple GSIs are added in a... |
def remove_directory(self, directory_name, *args, **kwargs):
""" :meth:`.WNetworkClientProto.remove_directory` method implementation
"""
client = self.dav_client()
remote_path = self.join_path(self.session_path(), directory_name)
if client.is_dir(remote_path) is False:
raise ValueError('Unable to remove n... | :meth:`.WNetworkClientProto.remove_directory` method implementation |
def _get_metadata(network_id, user_id):
"""
Get all the metadata in a network, across all scenarios
returns a dictionary of dict objects, keyed on dataset ID
"""
log.info("Getting Metadata")
dataset_qry = db.DBSession.query(
Dataset
).outerjoin(DatasetOwner, and_(Data... | Get all the metadata in a network, across all scenarios
returns a dictionary of dict objects, keyed on dataset ID |
def get_document(self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Retrieves the specified document.
Example:
>>> import ... | Retrieves the specified document.
Example:
>>> import dialogflow_v2beta1
>>>
>>> client = dialogflow_v2beta1.DocumentsClient()
>>>
>>> name = client.document_path('[PROJECT]', '[KNOWLEDGE_BASE]', '[DOCUMENT]')
>>>
>>> response ... |
def write_word_at(self, index: int, value: Union[int, BitVec, bool, Bool]) -> None:
"""Writes a 32 byte word to memory at the specified index`
:param index: index to write to
:param value: the value to write to memory
"""
try:
# Attempt to concretize value
... | Writes a 32 byte word to memory at the specified index`
:param index: index to write to
:param value: the value to write to memory |
def bit_flip(
p: Optional[float] = None
) -> Union[common_gates.XPowGate, BitFlipChannel]:
r"""
Construct a BitFlipChannel that flips a qubit state
with probability of a flip given by p. If p is None, return
a guaranteed flip in the form of an X operation.
This channel evolves a density matrix ... | r"""
Construct a BitFlipChannel that flips a qubit state
with probability of a flip given by p. If p is None, return
a guaranteed flip in the form of an X operation.
This channel evolves a density matrix via
$$
\rho \rightarrow M_0 \rho M_0^\dagger + M_1 \rho M_1^\dagger
$$
... |
def enable_rm_ha(self, new_rm_host_id, zk_service_name=None):
"""
Enable high availability for a YARN ResourceManager.
@param new_rm_host_id: id of the host where the second ResourceManager
will be added.
@param zk_service_name: Name of the ZooKeeper service to use for auto-f... | Enable high availability for a YARN ResourceManager.
@param new_rm_host_id: id of the host where the second ResourceManager
will be added.
@param zk_service_name: Name of the ZooKeeper service to use for auto-failover.
If YARN service depends on a ZooKeeper service then th... |
def _parse_args(self, args, known_only):
"""Helper function to do the main argument parsing.
This function goes through args and does the bulk of the flag parsing.
It will find the corresponding flag in our flag dictionary, and call its
.parse() method on the flag value.
Args:
args: [str], a... | Helper function to do the main argument parsing.
This function goes through args and does the bulk of the flag parsing.
It will find the corresponding flag in our flag dictionary, and call its
.parse() method on the flag value.
Args:
args: [str], a list of strings with the arguments to parse.
... |
def cmd_list(args):
"""List all element in pen"""
for penlist in penStore.data:
puts(penlist + " (" + str(len(penStore.data[penlist])) + ")") | List all element in pen |
def list_cameras():
""" List all attached USB cameras that are supported by libgphoto2.
:return: All recognized cameras
:rtype: list of :py:class:`Camera`
"""
ctx = lib.gp_context_new()
camlist_p = new_gp_object("CameraList")
port_list_p = new_gp_object("GPPortInfoList")
lib.gp_p... | List all attached USB cameras that are supported by libgphoto2.
:return: All recognized cameras
:rtype: list of :py:class:`Camera` |
def get(self, request, *args, **kwargs):
"""
Method for handling GET requests. Passes the
following arguments to the context:
* **obj** - The object to publish
* **done_url** - The result of the `get_done_url` method
"""
self.object = self.get_object()
r... | Method for handling GET requests. Passes the
following arguments to the context:
* **obj** - The object to publish
* **done_url** - The result of the `get_done_url` method |
def items(self, prefix=None, delimiter=None):
"""Get an iterator for the items within this bucket.
Args:
prefix: an optional prefix to match items.
delimiter: an optional string to simulate directory-like semantics. The returned items
will be those whose names do not contain the delimite... | Get an iterator for the items within this bucket.
Args:
prefix: an optional prefix to match items.
delimiter: an optional string to simulate directory-like semantics. The returned items
will be those whose names do not contain the delimiter after the prefix. For
the remaining item... |
def record(self):
# type: () -> bytes
'''
A method to generate the string representing this UDF Entity ID.
Parameters:
None.
Returns:
A string representing this UDF Entity ID.
'''
if not self._initialized:
raise pycdlibexception.PyCd... | A method to generate the string representing this UDF Entity ID.
Parameters:
None.
Returns:
A string representing this UDF Entity ID. |
def log_conditional_likelihood(self, x):
"""
likelihood \sum_t log p(y_t | x_t)
Optionally override this in base classes
"""
T, D = self.T, self.D_latent
assert x.shape == (T, D)
ll = 0
for t in range(self.T):
ll += self.local_log_likelihood(x... | likelihood \sum_t log p(y_t | x_t)
Optionally override this in base classes |
def _api_call(function):
"""
Decorator to call a pywebview API, checking for _webview_ready and raisings
appropriate Exceptions on failure.
"""
@wraps(function)
def wrapper(*args, **kwargs):
try:
if not _webview_ready.wait(15):
raise Exception('Main window fai... | Decorator to call a pywebview API, checking for _webview_ready and raisings
appropriate Exceptions on failure. |
def get_profile(profile, caller, runner):
'''
Get profile.
:param profile:
:return:
'''
profiles = profile.split(',')
data = {}
for profile in profiles:
if os.path.basename(profile) == profile:
profile = profile.split('.')[0] # Trim extension if someone added it
... | Get profile.
:param profile:
:return: |
def _position(self):
"""Get media position."""
position = 0
if self.state != STATE_IDLE:
resp = self._player.query_position(_FORMAT_TIME)
position = resp[1] // _NANOSEC_MULT
return position | Get media position. |
def get_handler_stats(self):
''' Return handler read statistics
Returns a dictionary of managed handler data read statistics. The
format is primarily controlled by the
:func:`SocketStreamCapturer.dump_all_handler_stats` function::
{
<capture address>: <list ... | Return handler read statistics
Returns a dictionary of managed handler data read statistics. The
format is primarily controlled by the
:func:`SocketStreamCapturer.dump_all_handler_stats` function::
{
<capture address>: <list of handler capture statistics>
... |
def email_send(text_template, html_template, data, subject, emails, headers=None):
"""Send an HTML/Plaintext email with the following fields.
text_template: URL to a Django template for the text email's contents
html_template: URL to a Django tempalte for the HTML email's contents
data: The context to ... | Send an HTML/Plaintext email with the following fields.
text_template: URL to a Django template for the text email's contents
html_template: URL to a Django tempalte for the HTML email's contents
data: The context to pass to the templates
subject: The subject of the email
emails: The addresses to s... |
def build(self, builder):
"""Build XML by appending to builder"""
builder.start("Protocol", {})
for child in self.study_event_refs:
child.build(builder)
for alias in self.aliases:
alias.build(builder)
builder.end("Protocol") | Build XML by appending to builder |
def cmd_isn(ip, port, count, iface, graph, verbose):
"""Create TCP connections and print the TCP initial sequence
numbers for each one.
\b
$ sudo habu.isn -c 5 www.portantier.com
1962287220
1800895007
589617930
3393793979
469428558
Note: You can get a graphical representation (... | Create TCP connections and print the TCP initial sequence
numbers for each one.
\b
$ sudo habu.isn -c 5 www.portantier.com
1962287220
1800895007
589617930
3393793979
469428558
Note: You can get a graphical representation (needs the matplotlib package)
using the '-g' option to b... |
def _delete_port_profile_from_ucsm(self, handle, port_profile, ucsm_ip):
"""Deletes Port Profile from UCS Manager."""
port_profile_dest = (const.PORT_PROFILESETDN + const.VNIC_PATH_PREFIX +
port_profile)
handle.StartTransaction()
# Find port profile on the U... | Deletes Port Profile from UCS Manager. |
def update(context, id, etag, name, country, parent_id, active, external):
"""update(context, id, etag, name, country, parent_id, active, external)
Update a team.
>>> dcictl team-update [OPTIONS]
:param string id: ID of the team to update [required]
:param string etag: Entity tag of the resource ... | update(context, id, etag, name, country, parent_id, active, external)
Update a team.
>>> dcictl team-update [OPTIONS]
:param string id: ID of the team to update [required]
:param string etag: Entity tag of the resource [required]
:param string name: Name of the team [required]
:param string c... |
def _get_chrbands(self, limit, taxon):
"""
For the given taxon, it will fetch the chr band file.
We will not deal with the coordinate information with this parser.
Here, we only are concerned with building the partonomy.
:param limit:
:return:
"""
model =... | For the given taxon, it will fetch the chr band file.
We will not deal with the coordinate information with this parser.
Here, we only are concerned with building the partonomy.
:param limit:
:return: |
def apply_u_umlaut(stem: str):
"""
Changes the vowel of the last syllable of the given stem if the vowel is affected by an u-umlaut.
>>> apply_u_umlaut("far")
'för'
>>> apply_u_umlaut("ör")
'ör'
>>> apply_u_umlaut("axl")
'öxl'
>>> apply_u_umlaut("hafn")
'höfn'
:param stem:
... | Changes the vowel of the last syllable of the given stem if the vowel is affected by an u-umlaut.
>>> apply_u_umlaut("far")
'för'
>>> apply_u_umlaut("ör")
'ör'
>>> apply_u_umlaut("axl")
'öxl'
>>> apply_u_umlaut("hafn")
'höfn'
:param stem:
:return: |
def process_form(self, instance, field, form, empty_marker = None,
emptyReturnsMarker = False):
""" Some special field handling for disabled fields, which don't
get submitted by the browser but still need to be written away.
"""
bsc = getToolByName(instance, 'bika_se... | Some special field handling for disabled fields, which don't
get submitted by the browser but still need to be written away. |
def notes_to_positions(notes, root):
""" Get notes positions.
ex) notes_to_positions(["C", "E", "G"], "C") -> [0, 4, 7]
:param list[str] notes: list of notes
:param str root: the root note
:rtype: list[int]
:return: list of note positions
"""
root_pos = note_to_val(root)
current_po... | Get notes positions.
ex) notes_to_positions(["C", "E", "G"], "C") -> [0, 4, 7]
:param list[str] notes: list of notes
:param str root: the root note
:rtype: list[int]
:return: list of note positions |
def current_git_dir():
"""Locate the .git directory."""
path = os.path.abspath(os.curdir)
while path != '/':
if os.path.isdir(os.path.join(path, '.git')):
return os.path.join(path, '.git')
path = os.path.dirname(path)
return None | Locate the .git directory. |
def _get_pos(self):
"""
Get current position for scroll bar.
"""
if self._h >= len(self._options):
return 0
else:
return self._start_line / (len(self._options) - self._h) | Get current position for scroll bar. |
def getStrings(lang_dict):
"""
Return a FunctionFS descriptor suitable for serialisation.
lang_dict (dict)
Key: language ID (ex: 0x0409 for en-us)
Value: list of unicode objects
All values must have the same number of items.
"""
field_list = []
kw = {}
try:
s... | Return a FunctionFS descriptor suitable for serialisation.
lang_dict (dict)
Key: language ID (ex: 0x0409 for en-us)
Value: list of unicode objects
All values must have the same number of items. |
def rackconnect(vm_):
'''
Determine if we should wait for rackconnect automation before running.
Either 'False' (default) or 'True'.
'''
return config.get_cloud_config_value(
'rackconnect', vm_, __opts__, default=False,
search_global=False
) | Determine if we should wait for rackconnect automation before running.
Either 'False' (default) or 'True'. |
def update_helper_political_level(self):
"""To update the helper about the country and the admin_level."""
current_country = self.country_comboBox.currentText()
index = self.admin_level_comboBox.currentIndex()
current_level = self.admin_level_comboBox.itemData(index)
content = No... | To update the helper about the country and the admin_level. |
def ADOSC(frame, fast=3, slow=10, high_col='high', low_col='low', close_col='close', vol_col='Volume'):
"""Chaikin A/D oscillator"""
return _frame_to_series(frame, [high_col, low_col, close_col, vol_col], talib.ADOSC, fast, slow) | Chaikin A/D oscillator |
def _max(self):
"""Getter for the maximum series value"""
return (
self.range[1] if (self.range and self.range[1] is not None) else
(max(self._values) if self._values else None)
) | Getter for the maximum series value |
def outputFieldMarkdown(self):
"""
Sends the field definitions ot standard out
"""
f, d = self.getFieldsColumnLengths()
fc, dc = self.printFieldsHeader(f, d)
f = max(fc, f)
d = max(dc, d)
self.printFields(f, d) | Sends the field definitions ot standard out |
def switch_to_window(self, window, wait=None):
"""
If ``window`` is a lambda, it switches to the first window for which ``window`` returns a
value other than False or None. If a window that matches can't be found, the window will be
switched back and :exc:`WindowError` will be raised.
... | If ``window`` is a lambda, it switches to the first window for which ``window`` returns a
value other than False or None. If a window that matches can't be found, the window will be
switched back and :exc:`WindowError` will be raised.
Args:
window (Window | lambda): The window that ... |
def _calc_traceback_limit(tb):
"""Calculates limit-parameter to strip away pytypes' internals when used
with API from traceback module.
"""
limit = 1
tb2 = tb
while not tb2.tb_next is None:
try:
maybe_pytypes = tb2.tb_next.tb_frame.f_code.co_filename.split(os.sep)[-2]
... | Calculates limit-parameter to strip away pytypes' internals when used
with API from traceback module. |
def walker(top, names):
"""
Walks a directory and records all packages and file extensions.
"""
global packages, extensions
if any(exc in top for exc in excludes):
return
package = top[top.rfind('holoviews'):].replace(os.path.sep, '.')
packages.append(package)
for name in names:
... | Walks a directory and records all packages and file extensions. |
def deftypes(self):
"""generator on all definition of type"""
for f in self.body:
if (hasattr(f, '_ctype')
and (f._ctype._storage == Storages.TYPEDEF
or (f._name == '' and isinstance(f._ctype, ComposedType)))):
yield f | generator on all definition of type |
def linearization_error(nodes):
r"""Compute the maximum error of a linear approximation.
.. note::
There is also a Fortran implementation of this function, which
will be used if it can be built.
.. note::
This is a helper for :class:`.Linearization`, which is used during the
... | r"""Compute the maximum error of a linear approximation.
.. note::
There is also a Fortran implementation of this function, which
will be used if it can be built.
.. note::
This is a helper for :class:`.Linearization`, which is used during the
curve-curve intersection process.
... |
def delete_user(self, email):
"""Delete a user from the database
Args:
email(str)
Returns:
user_obj(dict)
"""
LOG.info("Deleting user %s", email)
user_obj = self.user_collection.delete_one({'_id': email})
ret... | Delete a user from the database
Args:
email(str)
Returns:
user_obj(dict) |
def random_color(dtype=np.uint8):
"""
Return a random RGB color using datatype specified.
Parameters
----------
dtype: numpy dtype of result
Returns
----------
color: (4,) dtype, random color that looks OK
"""
hue = np.random.random() + .61803
hue %= 1.0
color = np.arra... | Return a random RGB color using datatype specified.
Parameters
----------
dtype: numpy dtype of result
Returns
----------
color: (4,) dtype, random color that looks OK |
def add_widget_to_content(self, widget):
"""Subclasses should call this to add content in the section's top level column."""
self.__section_content_column.add_spacing(4)
self.__section_content_column.add(widget) | Subclasses should call this to add content in the section's top level column. |
def initialize(self, stormconf, context):
"""Initialization steps:
1. Prepare sequence of terms based on config: TermCycleSpout/terms.
"""
self.terms = get_config()['TermCycleSpout']['terms']
self.term_seq = itertools.cycle(self.terms) | Initialization steps:
1. Prepare sequence of terms based on config: TermCycleSpout/terms. |
def _draw_image(self, ci):
"""
Draw image object to reportlabs canvas.
:param ci: CanvasImage object
"""
img = img_adjust(ci.image, ci.opacity, tempdir=self.dir)
self.can.drawImage(img, x=ci.x, y=ci.y, width=ci.w, height=ci.h, mask=ci.mask,
pre... | Draw image object to reportlabs canvas.
:param ci: CanvasImage object |
def timeout(seconds, error_message=None):
"""Timeout checking just for Linux-like platform, not working in Windows platform."""
def decorated(func):
result = ""
def _handle_timeout(signum, frame):
errmsg = error_message or 'Timeout: The action <%s> is timeout!' % func.__name__
... | Timeout checking just for Linux-like platform, not working in Windows platform. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.