code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _save_translations(sender, instance, *args, **kwargs):
"""
This signal saves model translations.
"""
# If we are in a site with one language there is no need of saving translations
if site_is_monolingual():
return False
cls = sender
# If its class has no "translatable_fields" then there are no translat... | This signal saves model translations. |
def consume(self, seq):
'''Counts all k-mers in sequence.'''
for kmer in iter_kmers(seq, self.k, canonical=self.canonical):
self._incr(kmer) | Counts all k-mers in sequence. |
def _find_home_or_away(self, row):
"""
Determine whether the player is on the home or away team.
Next to every player is their school's name. This name can be matched
with the previously parsed home team's name to determine if the player
is a member of the home or away team.
... | Determine whether the player is on the home or away team.
Next to every player is their school's name. This name can be matched
with the previously parsed home team's name to determine if the player
is a member of the home or away team.
Parameters
----------
row : PyQue... |
def remote_server_command(command, environment, user_profile, **kwargs):
"""
Wraps web_command function with docker bindings needed to connect to
a remote server (such as datacats.com) and run commands there
(for example, when you want to copy your catalog to that server).
The files binded ... | Wraps web_command function with docker bindings needed to connect to
a remote server (such as datacats.com) and run commands there
(for example, when you want to copy your catalog to that server).
The files binded to the docker image include the user's ssh credentials:
ssh_config file,
... |
def generate_gap_bed(fname, outname):
""" Generate a BED file with gap locations.
Parameters
----------
fname : str
Filename of input FASTA file.
outname : str
Filename of output BED file.
"""
f = Fasta(fname)
with open(outname, "w") as bed:
for chrom in f.keys... | Generate a BED file with gap locations.
Parameters
----------
fname : str
Filename of input FASTA file.
outname : str
Filename of output BED file. |
def pkgdb(opts):
'''
Return modules for SPM's package database
.. versionadded:: 2015.8.0
'''
return LazyLoader(
_module_dirs(
opts,
'pkgdb',
base_path=os.path.join(SALT_BASE_PATH, 'spm')
),
opts,
tag='pkgdb'
) | Return modules for SPM's package database
.. versionadded:: 2015.8.0 |
def uninstall(self, pkgname, *args, **kwargs):
"""A context manager which allows uninstallation of packages from the environment
:param str pkgname: The name of a package to uninstall
>>> env = Environment("/path/to/env/root")
>>> with env.uninstall("pytz", auto_confirm=True, verbose=F... | A context manager which allows uninstallation of packages from the environment
:param str pkgname: The name of a package to uninstall
>>> env = Environment("/path/to/env/root")
>>> with env.uninstall("pytz", auto_confirm=True, verbose=False) as uninstaller:
cleaned = uninstalle... |
def registration_form_received(self, stanza):
"""Handle registration form received.
[client only]
Call self.registration_callback with the registration form received
as the argument. Use the value returned by the callback will be a
filled-in form.
:Parameters:
... | Handle registration form received.
[client only]
Call self.registration_callback with the registration form received
as the argument. Use the value returned by the callback will be a
filled-in form.
:Parameters:
- `stanza`: the stanza received.
:Types:
... |
def JZ(cpu, target):
"""
Jumps short if zero.
:param cpu: current CPU.
:param target: destination operand.
"""
cpu.PC = Operators.ITEBV(cpu.address_bit_size, cpu.ZF, target.read(), cpu.PC) | Jumps short if zero.
:param cpu: current CPU.
:param target: destination operand. |
def load(self, fileobj):
'''Load the dict from the file object'''
# try formats from most restrictive to least restrictive
for loader in (pickle.load, json.load, csv.reader):
fileobj.seek(0)
try:
return self.initial_update(loader(fileobj))
exce... | Load the dict from the file object |
def get_stroke_glide_indices(A_g_hf, fs_a, J, t_max):
'''Get stroke and glide indices from high-pass accelerometer data
Args
----
A_g_hf: 1-D ndarray
Animal frame triaxial accelerometer matrix at sampling rate fs_a.
fs_a: int
Number of accelerometer samples per second
J: float... | Get stroke and glide indices from high-pass accelerometer data
Args
----
A_g_hf: 1-D ndarray
Animal frame triaxial accelerometer matrix at sampling rate fs_a.
fs_a: int
Number of accelerometer samples per second
J: float
Frequency threshold for detecting a fluke stroke in ... |
def AddArguments(cls, argument_group):
"""Adds command line arguments the helper supports to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|arg... | Adds command line arguments the helper supports to an argument group.
This function takes an argument parser or an argument group object and adds
to it all the command line arguments this helper supports.
Args:
argument_group (argparse._ArgumentGroup|argparse.ArgumentParser):
argparse grou... |
def config_dict(config):
"""
Given a Sphinx config object, return a dictionary of config
values.
"""
return dict(
(key, getattr(config, key))
for key in config.values
) | Given a Sphinx config object, return a dictionary of config
values. |
def retry(tries, delay=0, back_off=1, raise_msg=''):
"""Retries a function or method until it got True.
- ``delay`` sets the initial delay in seconds
- ``back_off`` sets the factor by which
- ``raise_msg`` if not '', it'll raise an Exception
"""
if back_off < 1:
raise ValueError('back_... | Retries a function or method until it got True.
- ``delay`` sets the initial delay in seconds
- ``back_off`` sets the factor by which
- ``raise_msg`` if not '', it'll raise an Exception |
def bootstrap(ns_var_name: str = NS_VAR_NAME, core_ns_name: str = CORE_NS) -> None:
"""Bootstrap the environment with functions that are are difficult to
express with the very minimal lisp environment."""
core_ns_sym = sym.symbol(core_ns_name)
ns_var_sym = sym.symbol(ns_var_name, ns=core_ns_name)
__... | Bootstrap the environment with functions that are are difficult to
express with the very minimal lisp environment. |
def _storage_list_keys(bucket, pattern):
""" List all storage keys in a specified bucket that match a pattern. """
data = [{'Name': item.metadata.name,
'Type': item.metadata.content_type,
'Size': item.metadata.size,
'Updated': item.metadata.updated_on}
for item in _storage... | List all storage keys in a specified bucket that match a pattern. |
def build_slabs(self):
"""
Builds the reconstructed slab by:
(1) Obtaining the unreconstructed slab using the specified
parameters for the SlabGenerator.
(2) Applying the appropriate lattice transformation in the
a and b lattice vectors.
... | Builds the reconstructed slab by:
(1) Obtaining the unreconstructed slab using the specified
parameters for the SlabGenerator.
(2) Applying the appropriate lattice transformation in the
a and b lattice vectors.
(3) Remove any specified sites from both ... |
def cmServicePrompt():
"""CM SERVICE PROMPT Section 9.2.5a"""
a = TpPd(pd=0x5)
b = MessageType(mesType=0x25) # 00100101
c = PdAndSapi()
packet = a / b / c
return packet | CM SERVICE PROMPT Section 9.2.5a |
def on_song_changed(self, song):
"""bind song changed signal with this"""
if song is None or song.lyric is None:
self._lyric = None
self._pos_s_map = {}
else:
self._lyric = song.lyric.content
self._pos_s_map = parse(self._lyric)
self._pos_l... | bind song changed signal with this |
def has_path(nodes, A, B):
r"""Test if nodes from a breadth_first_order search lead from A to
B.
Parameters
----------
nodes : array_like
Nodes from breadth_first_oder_seatch
A : array_like
The set of educt states
B : array_like
The set of product states
Returns... | r"""Test if nodes from a breadth_first_order search lead from A to
B.
Parameters
----------
nodes : array_like
Nodes from breadth_first_oder_seatch
A : array_like
The set of educt states
B : array_like
The set of product states
Returns
-------
has_path : boo... |
def add_string_label(self, str_):
""" Maps ("folds") the given string, returning an unique label ID.
This allows several constant labels to be initialized to the same address
thus saving memory space.
:param str_: the string to map
:return: the unique label ID
"""
... | Maps ("folds") the given string, returning an unique label ID.
This allows several constant labels to be initialized to the same address
thus saving memory space.
:param str_: the string to map
:return: the unique label ID |
def rank_loss(sentence_emb, image_emb, margin=0.2):
"""Experimental rank loss, thanks to kkurach@ for the code."""
with tf.name_scope("rank_loss"):
# Normalize first as this is assumed in cosine similarity later.
sentence_emb = tf.nn.l2_normalize(sentence_emb, 1)
image_emb = tf.nn.l2_normalize(image_emb... | Experimental rank loss, thanks to kkurach@ for the code. |
def setwinsize(self, r, c):
"""This sets the terminal window size of the child tty. This will cause
a SIGWINCH signal to be sent to the child. This does not change the
physical window size. It changes the size reported to TTY-aware
applications like vi or curses -- applications that res... | This sets the terminal window size of the child tty. This will cause
a SIGWINCH signal to be sent to the child. This does not change the
physical window size. It changes the size reported to TTY-aware
applications like vi or curses -- applications that respond to the
SIGWINCH signal. |
def estimate_pos_and_err_parabolic(tsvals):
"""Solve for the position and uncertainty of source in one dimension
assuming that you are near the maximum and the errors are parabolic
Parameters
----------
tsvals : `~numpy.ndarray`
The TS values at the maximum TS, and for each pixel on e... | Solve for the position and uncertainty of source in one dimension
assuming that you are near the maximum and the errors are parabolic
Parameters
----------
tsvals : `~numpy.ndarray`
The TS values at the maximum TS, and for each pixel on either side
Returns
-------
The positio... |
def _effectinit_raise_col_padding_on_focus(self, name, **kwargs):
"""Init the column padding on focus effect.
Keyword arguments can contain enlarge_time and padding.
"""
self._effects[name] = kwargs
if "enlarge_time" not in kwargs:
kwargs['enlarge_time'] = 0.5
... | Init the column padding on focus effect.
Keyword arguments can contain enlarge_time and padding. |
def eval(self, expr, n, extra_constraints=(), solver=None, model_callback=None):
"""
This function returns up to `n` possible solutions for expression `expr`.
:param expr: expression (an AST) to evaluate
:param n: number of results to return
:param solver: a solver object, nativ... | This function returns up to `n` possible solutions for expression `expr`.
:param expr: expression (an AST) to evaluate
:param n: number of results to return
:param solver: a solver object, native to the backend, to assist in
the evaluation (for example, a z3.Solver)
... |
def _is_reference(bpe):
"""Return True if the element is an entity reference."""
if isinstance(bpe, _bp('ProteinReference')) or \
isinstance(bpe, _bpimpl('ProteinReference')) or \
isinstance(bpe, _bp('SmallMoleculeReference')) or \
isinstance(bpe, _bpimpl('SmallMoleculeReference')) or \
... | Return True if the element is an entity reference. |
def close(self):
'''close the graph'''
self.close_graph.set()
if self.is_alive():
self.child.join(2) | close the graph |
def get_api_client():
"""Gets the reference to the API cient (singleton)."""
with _api_lock:
global _api_client
if not _api_client:
conf_file = os.path.join(os.environ.get("HOME"),
".python-grid5000.yaml")
_api_client = Client.from_yam... | Gets the reference to the API cient (singleton). |
def list_teams(profile="github", ignore_cache=False):
'''
Lists all teams with the organization.
profile
The name of the profile configuration to use. Defaults to ``github``.
ignore_cache
Bypasses the use of cached teams.
CLI Example:
.. code-block:: bash
salt mymini... | Lists all teams with the organization.
profile
The name of the profile configuration to use. Defaults to ``github``.
ignore_cache
Bypasses the use of cached teams.
CLI Example:
.. code-block:: bash
salt myminion github.list_teams
.. versionadded:: 2016.11.0 |
def __advice_stack_frame_protection(self, frame):
"""
Overriding of this is only permitted if and only if your name is
Megumin and you have a pet/familiar named Chomusuke.
"""
if frame is None:
logger.debug(
'currentframe() returned None; frame protec... | Overriding of this is only permitted if and only if your name is
Megumin and you have a pet/familiar named Chomusuke. |
def describe_policy(policyName,
region=None, key=None, keyid=None, profile=None):
'''
Given a policy name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_iot.describe_policy mypolicy
'''
... | Given a policy name describe its properties.
Returns a dictionary of interesting properties.
CLI Example:
.. code-block:: bash
salt myminion boto_iot.describe_policy mypolicy |
def write(self, fptr):
"""Write a UUID box to file.
"""
length = 4 + 4 + 16 + len(self.raw_data)
write_buffer = struct.pack('>I4s', length, b'uuid')
fptr.write(write_buffer)
fptr.write(self.uuid.bytes)
fptr.write(self.raw_data) | Write a UUID box to file. |
def _infer(self, request):
"""Returns JSON for the `vz-line-chart`s for a feature.
Args:
request: A request that should contain 'inference_address', 'model_name',
'model_type, 'model_version', 'model_signature' and 'label_vocab_path'.
Returns:
A list of JSON objects, one for each chart... | Returns JSON for the `vz-line-chart`s for a feature.
Args:
request: A request that should contain 'inference_address', 'model_name',
'model_type, 'model_version', 'model_signature' and 'label_vocab_path'.
Returns:
A list of JSON objects, one for each chart. |
def compare_profiles(profile1, profile2):
"""
Given two profiles, determine the ratio of similarity, i.e.
the hamming distance between the strings.
Args:
profile1/2 (str): profile string
Returns:
similarity_ratio (float): the ratio of similiarity (0-1)
"... | Given two profiles, determine the ratio of similarity, i.e.
the hamming distance between the strings.
Args:
profile1/2 (str): profile string
Returns:
similarity_ratio (float): the ratio of similiarity (0-1) |
def insert_first(self, val):
"""Insert in head
:param val: Object to insert
:return: True iff insertion completed successfully
"""
self.head = Node(val, next_node=self.head)
return True | Insert in head
:param val: Object to insert
:return: True iff insertion completed successfully |
def cleanup_lines( lines, **kwargs ):
''' Cleans up annotation after syntactic pre-processing and processing:
-- Removes embedded clause boundaries "<{>" and "<}>";
-- Removes CLBC markings from analysis;
-- Removes additional information between < and > from analysis;
-- Removes add... | Cleans up annotation after syntactic pre-processing and processing:
-- Removes embedded clause boundaries "<{>" and "<}>";
-- Removes CLBC markings from analysis;
-- Removes additional information between < and > from analysis;
-- Removes additional information between " and " from analy... |
def config(self):
"""Get a listing of mobile client configuration settings."""
response = self._call(
mc_calls.Config
)
config_list = response.body.get('data', {}).get('entries', [])
return config_list | Get a listing of mobile client configuration settings. |
def change_puk(ctx, puk, new_puk):
"""
Change the PUK code.
If the PIN is lost or blocked it can be reset using a PUK.
The PUK must be between 6 and 8 characters long, and supports any type of
alphanumeric characters.
"""
controller = ctx.obj['controller']
if not puk:
puk = _pro... | Change the PUK code.
If the PIN is lost or blocked it can be reset using a PUK.
The PUK must be between 6 and 8 characters long, and supports any type of
alphanumeric characters. |
def apply_substitutions(monomial, monomial_substitutions, pure=False):
"""Helper function to remove monomials from the basis."""
if is_number_type(monomial):
return monomial
original_monomial = monomial
changed = True
if not pure:
substitutions = monomial_substitutions
else:
... | Helper function to remove monomials from the basis. |
def add_router_interface(self, context, router_info):
"""Adds an interface to a router created on Arista HW router.
This deals with both IPv6 and IPv4 configurations.
"""
if router_info:
self._select_dicts(router_info['ip_version'])
cidr = router_info['cidr']
... | Adds an interface to a router created on Arista HW router.
This deals with both IPv6 and IPv4 configurations. |
def initiate_tasks(self):
""" Loads all tasks using `TaskLoader` from respective configuration option """
self.tasks_classes = TaskLoader().load_tasks(
paths=self.configuration[Configuration.ALGORITHM][Configuration.TASKS][Configuration.PATHS]) | Loads all tasks using `TaskLoader` from respective configuration option |
def create_snapshot(self, datacenter_id, volume_id,
name=None, description=None):
"""
Creates a snapshot of the specified volume.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param volume_id: The u... | Creates a snapshot of the specified volume.
:param datacenter_id: The unique ID of the data center.
:type datacenter_id: ``str``
:param volume_id: The unique ID of the volume.
:type volume_id: ``str``
:param name: The name given to the volume.
... |
def substitute_minor_for_major(progression, substitute_index,
ignore_suffix=False):
"""Substitute minor chords for its major equivalent.
'm' and 'm7' suffixes recognized, and ['II', 'III', 'VI'] if there is no
suffix.
Examples:
>>> substitute_minor_for_major(['VI'], 0)
['I']
>>> su... | Substitute minor chords for its major equivalent.
'm' and 'm7' suffixes recognized, and ['II', 'III', 'VI'] if there is no
suffix.
Examples:
>>> substitute_minor_for_major(['VI'], 0)
['I']
>>> substitute_minor_for_major(['Vm'], 0)
['bVIIM']
>>> substitute_minor_for_major(['VIm7'], 0)
... |
def run(path, code, params=None, ignore=None, select=None, **meta):
"""Pylint code checking.
:return list: List of errors.
"""
logger.debug('Start pylint')
clear_cache = params.pop('clear_cache', False)
if clear_cache:
MANAGER.astroid_cache.clear()
... | Pylint code checking.
:return list: List of errors. |
def update_redirect(self):
"""
Call it on your own endpoint's to update the back history navigation.
If you bypass it, the next submit or back will go over it.
"""
page_history = Stack(session.get("page_history", []))
page_history.push(request.url)
session... | Call it on your own endpoint's to update the back history navigation.
If you bypass it, the next submit or back will go over it. |
def get_response(self, environ=None):
"""Get a list of headers."""
response = super(SameContentException, self).get_response(
environ=environ
)
if self.etag is not None:
response.set_etag(self.etag)
if self.last_modified is not None:
response.h... | Get a list of headers. |
def get_catfact():
"""Get a cat fact from catfact.ninja and return it as a string.
Functions for Soundhound, Google, IBM Watson, or other APIs can be added
to create the desired functionality into this bot.
"""
response = requests.get(CAT_FACTS_URL, verify=False)
response.raise_for_status()
... | Get a cat fact from catfact.ninja and return it as a string.
Functions for Soundhound, Google, IBM Watson, or other APIs can be added
to create the desired functionality into this bot. |
def compute_transformed(context):
"""Compute transformed key for opening database"""
key_composite = compute_key_composite(
password=context._._.password,
keyfile=context._._.keyfile
)
kdf_parameters = context._.header.value.dynamic_header.kdf_parameters.data.dict
if context._._.tr... | Compute transformed key for opening database |
def as_base_units(self):
"""
Converts all units to base SI units, including derived units.
Returns:
(base_units_dict, scaling factor). base_units_dict will not
contain any constants, which are gathered in the scaling factor.
"""
b = collections.defaultdic... | Converts all units to base SI units, including derived units.
Returns:
(base_units_dict, scaling factor). base_units_dict will not
contain any constants, which are gathered in the scaling factor. |
def stream(self, sha):
"""For now, all lookup is done by git itself"""
hexsha, typename, size, stream = self._git.stream_object_data(bin_to_hex(sha))
return OStream(hex_to_bin(hexsha), typename, size, stream) | For now, all lookup is done by git itself |
def _enable_lock(func):
"""
The decorator for ensuring thread-safe when current cache instance is concurrent status.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.is_concurrent:
only_read = kwargs.get('only_read')
if only_rea... | The decorator for ensuring thread-safe when current cache instance is concurrent status. |
def save_post(self, title, text, user_id, tags, draft=False,
post_date=None, last_modified_date=None, meta_data=None,
post_id=None):
"""
Persist the blog post data. If ``post_id`` is ``None`` or ``post_id``
is invalid, the post must be inserted into the storag... | Persist the blog post data. If ``post_id`` is ``None`` or ``post_id``
is invalid, the post must be inserted into the storage. If ``post_id``
is a valid id, then the data must be updated.
:param title: The title of the blog post
:type title: str
:param text: The text of the blog ... |
def _get_pretty_table(self, indent: int = 0, align: int = ALIGN_CENTER, border: bool = False) -> PrettyTable:
"""
Returns the table format of the scheme, i.e.:
<table name>
+----------------+----------------
| <field1> | <field2>...
+----------------+--------... | Returns the table format of the scheme, i.e.:
<table name>
+----------------+----------------
| <field1> | <field2>...
+----------------+----------------
| value1(field1) | value1(field2)
| value2(field1) | value2(field2)
| value3(field1) | value3(... |
def wait(self):
"""Waits for all submitted jobs to complete."""
logging.info("waiting for {} jobs to complete".format(len(self.submissions)))
while not self.shutdown:
time.sleep(1) | Waits for all submitted jobs to complete. |
def get_logistic_regression_coefs_l2(self, category,
clf=RidgeClassifierCV()):
''' Computes l2-penalized logistic regression score.
Parameters
----------
category : str
category name to score
category : str
categor... | Computes l2-penalized logistic regression score.
Parameters
----------
category : str
category name to score
category : str
category name to score
Returns
-------
(coefficient array, accuracy, majority class baseline accuracy) |
def _add_err(self, exinfo):
"""
Sets the error on this MultiResult. Will be ignored if an error is
already set.
:param exinfo: Return value from ``sys.exc_info()``
"""
if self._err:
return
self._err = exinfo
self.all_ok = False | Sets the error on this MultiResult. Will be ignored if an error is
already set.
:param exinfo: Return value from ``sys.exc_info()`` |
def continuous_binary_search(f, lo, hi, gap=1e-4):
"""Binary search for a function
:param f: boolean monotone function with f(hi) = True
:param int lo:
:param int hi: with hi >= lo
:param float gap:
:returns: first value x in [lo,hi] such that f(x),
x is computed up to some precisi... | Binary search for a function
:param f: boolean monotone function with f(hi) = True
:param int lo:
:param int hi: with hi >= lo
:param float gap:
:returns: first value x in [lo,hi] such that f(x),
x is computed up to some precision
:complexity: `O(log((hi-lo)/gap))` |
def watch(self, flag):
"""Whether or not the Template is being watched."""
lib.EnvSetDeftemplateWatch(self._env, int(flag), self._tpl) | Whether or not the Template is being watched. |
def _multi_take(self, tup):
"""
Create the indexers for the passed tuple of keys, and execute the take
operation. This allows the take operation to be executed all at once -
rather than once for each dimension - improving efficiency.
Parameters
----------
tup : t... | Create the indexers for the passed tuple of keys, and execute the take
operation. This allows the take operation to be executed all at once -
rather than once for each dimension - improving efficiency.
Parameters
----------
tup : tuple
Tuple of indexers, one per axis... |
def write_Bar(file, bar, bpm=120, repeat=0, verbose=False):
"""Write a mingus.Bar to a MIDI file.
Both the key and the meter are written to the file as well.
"""
m = MidiFile()
t = MidiTrack(bpm)
m.tracks = [t]
while repeat >= 0:
t.play_Bar(bar)
repeat -= 1
return m.writ... | Write a mingus.Bar to a MIDI file.
Both the key and the meter are written to the file as well. |
def score(infile, outfile, classifier, xgb_autotune, apply_weights, xeval_fraction, xeval_num_iter, ss_initial_fdr, ss_iteration_fdr, ss_num_iter, ss_main_score, group_id, parametric, pfdr, pi0_lambda, pi0_method, pi0_smooth_df, pi0_smooth_log_pi0, lfdr_truncate, lfdr_monotone, lfdr_transformation, lfdr_adj, lfdr_eps, ... | Conduct semi-supervised learning and error-rate estimation for MS1, MS2 and transition-level data. |
def get_container_metadata(self, container, prefix=None):
"""
Returns a dictionary containing the metadata for the container.
"""
return self._manager.get_metadata(container, prefix=prefix) | Returns a dictionary containing the metadata for the container. |
def print_topics(self, Nwords=10):
"""
Print the top ``Nwords`` words for each topic.
"""
print('Topic\tTop %i words' % Nwords)
for k, words in self.list_topics(Nwords):
print(unicode(k).ljust(3) + '\t' + ' '.join(list(zip(*words))[0])) | Print the top ``Nwords`` words for each topic. |
def absstart(self):
"""Returns the absolute start of the element by including docstrings
outside of the element definition if applicable."""
if hasattr(self, "docstart") and self.docstart > 0:
return self.docstart
else:
return self.start | Returns the absolute start of the element by including docstrings
outside of the element definition if applicable. |
def main(mash_output, hash_cutoff, sample_id, assembly_file):
"""
Main function that allows to dump a mash dist txt file to a json file
Parameters
----------
mash_output: str
A string with the input file.
hash_cutoff: str
the percentage cutoff for the percentage of shared hashes... | Main function that allows to dump a mash dist txt file to a json file
Parameters
----------
mash_output: str
A string with the input file.
hash_cutoff: str
the percentage cutoff for the percentage of shared hashes between query
and plasmid in database that is allowed for the pla... |
def crossover_with(self, other, points=2):
"""Perform 2-point crossover on this bit condition and another of
the same length, returning the two resulting children.
Usage:
offspring1, offspring2 = condition1.crossover_with(condition2)
Arguments:
other: A second B... | Perform 2-point crossover on this bit condition and another of
the same length, returning the two resulting children.
Usage:
offspring1, offspring2 = condition1.crossover_with(condition2)
Arguments:
other: A second BitCondition of the same length as this one.
... |
def clear(zpool, device=None):
'''
Clears device errors in a pool.
.. warning::
The device must not be part of an active pool configuration.
zpool : string
name of storage pool
device : string
(optional) specific device to clear
.. versionadded:: 2018.3.1
CLI Exa... | Clears device errors in a pool.
.. warning::
The device must not be part of an active pool configuration.
zpool : string
name of storage pool
device : string
(optional) specific device to clear
.. versionadded:: 2018.3.1
CLI Example:
.. code-block:: bash
sa... |
def _get_seqprop_to_seqprop_alignment(self, seqprop1, seqprop2):
"""Return the alignment stored in self.sequence_alignments given a seqprop + another seqprop"""
if isinstance(seqprop1, str):
seqprop1_id = seqprop1
else:
seqprop1_id = seqprop1.id
if isinstance(seqp... | Return the alignment stored in self.sequence_alignments given a seqprop + another seqprop |
def clip_image(image, clip_min, clip_max):
""" Clip an image, or an image batch, with upper and lower threshold. """
return np.minimum(np.maximum(clip_min, image), clip_max) | Clip an image, or an image batch, with upper and lower threshold. |
def last_in_date_group(df,
data_query_cutoff_times,
assets,
reindex=True,
have_sids=True,
extra_groupers=None):
"""
Determine the last piece of information known on each date in the date
index... | Determine the last piece of information known on each date in the date
index for each group. Input df MUST be sorted such that the correct last
item is chosen from each group.
Parameters
----------
df : pd.DataFrame
The DataFrame containing the data to be grouped. Must be sorted so that
... |
def get_name_init(self, name):
"""Get initial name of symbol.
"""
self._register_name(name)
return self._var_name_mappers[name].get_init() | Get initial name of symbol. |
def get_composition_lookup_session_for_repository(self, repository_id, proxy):
"""Gets the OsidSession associated with the composition lookup
service for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
... | Gets the OsidSession associated with the composition lookup
service for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.CompositionLookupSession) - the new
CompositionLo... |
def _open(self, mode='r'):
"""Open the password file in the specified mode
"""
open_file = None
writeable = 'w' in mode or 'a' in mode or '+' in mode
try:
# NOTE: currently the MemOpener does not split off any filename
# which causes errors on close(... | Open the password file in the specified mode |
def _heartbeat_manager(self):
"""
Heartbeat DAG file processor and start it if it is not alive.
:return:
"""
if self._process and not self._process.is_alive() and not self.done:
self.start() | Heartbeat DAG file processor and start it if it is not alive.
:return: |
def _make_readline_peeker(self):
"""Make a readline-like function which peeks into the source."""
counter = itertools.count(0)
def readline():
try:
return self._peek_buffer(next(counter))
except StopIteration:
return ''
return readl... | Make a readline-like function which peeks into the source. |
def parse_data(self, data, msg_signature=None, timestamp=None, nonce=None):
"""
解析微信服务器发送过来的数据并保存类中
:param data: HTTP Request 的 Body 数据
:param msg_signature: EncodingAESKey 的 msg_signature
:param timestamp: EncodingAESKey 用时间戳
:param nonce: EncodingAESKey 用随机数
:ra... | 解析微信服务器发送过来的数据并保存类中
:param data: HTTP Request 的 Body 数据
:param msg_signature: EncodingAESKey 的 msg_signature
:param timestamp: EncodingAESKey 用时间戳
:param nonce: EncodingAESKey 用随机数
:raises ParseError: 解析微信服务器数据错误, 数据不合法 |
def _map_type_to_dict(self, type_name):
""" Maps a an instance type representation string (e.g. 'RESULT')
to the corresponding dictionary in root.
"""
root = self._root_instance
if type_name == RESULT:
return root._results
elif type_name == PARAMETER:
... | Maps a an instance type representation string (e.g. 'RESULT')
to the corresponding dictionary in root. |
def unix_ts(dtval):
'''Convert datetime into a unix timestamp.
This is the equivalent to Python 3's int(datetime.timestamp()).
:param dt: datetime to convert
'''
epoch = datetime(1970, 1, 1, 0, 0, tzinfo=tzutc())
delta = (dtval - epoch)
return delta.days * 24 * 3600 + delta.seconds | Convert datetime into a unix timestamp.
This is the equivalent to Python 3's int(datetime.timestamp()).
:param dt: datetime to convert |
def Lexicon(**rules):
"""Create a dictionary mapping symbols to alternative words.
>>> Lexicon(Art = "the | a | an")
{'Art': ['the', 'a', 'an']}
"""
for (lhs, rhs) in rules.items():
rules[lhs] = [word.strip() for word in rhs.split('|')]
return rules | Create a dictionary mapping symbols to alternative words.
>>> Lexicon(Art = "the | a | an")
{'Art': ['the', 'a', 'an']} |
def erfcc(x):
"""
Returns the complementary error function erfc(x) with fractional
error everywhere less than 1.2e-7. Adapted from Numerical Recipies.
Usage: lerfcc(x)
"""
z = abs(x)
t = 1.0 / (1.0 + 0.5 * z)
ans = t * math.exp(
-z * z - 1.26551223 + t * (1.00002368 + t * (0.37409196 + t * (... | Returns the complementary error function erfc(x) with fractional
error everywhere less than 1.2e-7. Adapted from Numerical Recipies.
Usage: lerfcc(x) |
async def _wrap_gen(self, ID: str):
"""异步迭代器包装.
Parameters:
ID (str): - 任务ID
Yield:
(Any): - 从异步迭代器结果队列中获取的结果
Raise:
(StopAsyncIteration): - 异步迭代器终止时抛出该异常
"""
while True:
result = await self._gens_queue[ID].get()
... | 异步迭代器包装.
Parameters:
ID (str): - 任务ID
Yield:
(Any): - 从异步迭代器结果队列中获取的结果
Raise:
(StopAsyncIteration): - 异步迭代器终止时抛出该异常 |
def showMenu(self, point=None):
"""
Displays the menu for this view widget.
:param point | <QPoint>
"""
menu = self.createMenu(self)
menu.exec_(QtGui.QCursor.pos())
menu.deleteLater() | Displays the menu for this view widget.
:param point | <QPoint> |
def cpp_best_split_full_model(X, Uy, C, S, U, noderange, delta,
save_memory=False):
"""wrappe calling cpp splitting function"""
return CSP.best_split_full_model(X, Uy, C, S, U, noderange, delta) | wrappe calling cpp splitting function |
def split_leading_indent(line, max_indents=None):
"""Split line into leading indent and main."""
indent = ""
while (
(max_indents is None or max_indents > 0)
and line.startswith((openindent, closeindent))
) or line.lstrip() != line:
if max_indents is not None and line.startswith(... | Split line into leading indent and main. |
def closure(self):
"""
Returns a new `Independencies()`-object that additionally contains those `IndependenceAssertions`
that are implied by the the current independencies (using with the `semi-graphoid axioms
<https://en.wikipedia.org/w/index.php?title=Conditional_independence&oldid=708... | Returns a new `Independencies()`-object that additionally contains those `IndependenceAssertions`
that are implied by the the current independencies (using with the `semi-graphoid axioms
<https://en.wikipedia.org/w/index.php?title=Conditional_independence&oldid=708760689#Rules_of_conditional_independenc... |
def cut_gmail_quote(html_message):
''' Cuts the outermost block element with class gmail_quote. '''
gmail_quote = cssselect('div.gmail_quote', html_message)
if gmail_quote and (gmail_quote[0].text is None or not RE_FWD.match(gmail_quote[0].text)):
gmail_quote[0].getparent().remove(gmail_quote[0])
... | Cuts the outermost block element with class gmail_quote. |
def http_basic_auth_get_user(request):
"""Inspect the given request to find a logged user. If not found, the header HTTP_AUTHORIZATION
is read for 'Basic Auth' login and password, and try to authenticate against default UserModel.
Always return a User instance (possibly anonymous, meaning authentication fai... | Inspect the given request to find a logged user. If not found, the header HTTP_AUTHORIZATION
is read for 'Basic Auth' login and password, and try to authenticate against default UserModel.
Always return a User instance (possibly anonymous, meaning authentication failed) |
def html2groff(data, name):
"""Convert HTML text from cplusplus.com to Groff-formatted text."""
# Remove sidebar
try:
data = data[data.index('<div class="C_doc">'):]
except ValueError:
pass
# Pre replace all
for rp in pre_rps:
data = re.compile(rp[0], rp[2]).sub(rp[1], d... | Convert HTML text from cplusplus.com to Groff-formatted text. |
def write_Composition(composition, filename, zip=False):
"""Create an XML file (or MXL if compressed) for a given composition."""
text = from_Composition(composition)
if not zip:
f = open(filename + '.xml', 'w')
f.write(text)
f.close()
else:
import zipfile
import ... | Create an XML file (or MXL if compressed) for a given composition. |
def run(self):
"""
This AI simple moves the characters towards the opposite
edges of the grid for 3 steps or until event halts the
simulation
"""
x, y = 1,0 # set the direction
num_steps = 0
while self.s.get_state() != 'Halted':
self.s.comman... | This AI simple moves the characters towards the opposite
edges of the grid for 3 steps or until event halts the
simulation |
def do_edit_settings(fake):
"""Opens legit settings in editor."""
path = resources.user.open('config.ini').name
click.echo('Legit Settings:\n')
for (option, _, description) in legit_settings.config_defaults:
click.echo(columns([crayons.yellow(option), 25], [description, None]))
click.echo... | Opens legit settings in editor. |
def stop(self):
"""Stop stream."""
if self.stream and self.stream.session.state != STATE_STOPPED:
self.stream.stop() | Stop stream. |
def _to_bytes(self, data, key='', expired=None, noc=0, ncalls=0):
"""Serialize (and encrypt if `key` is provided) the data and represent it as string.
**Parameters**
:param data: any python serializable (pickable) object
:param key: If the key is provided and `pycrypto` is inst... | Serialize (and encrypt if `key` is provided) the data and represent it as string.
**Parameters**
:param data: any python serializable (pickable) object
:param key: If the key is provided and `pycrypto` is installed, cached
data will be encrypted (If `pycrypto` i... |
def cprint(color, prefix, message):
"""
prints a message in a given color
:param color: the color as defined in the theme
:param prefix: the prefix (a string)
:param message: the message
:return:
"""
message = message or ""
prefix = prefix or ""
... | prints a message in a given color
:param color: the color as defined in the theme
:param prefix: the prefix (a string)
:param message: the message
:return: |
def info(name):
'''
Return information about a certificate
.. note::
Will output tls.cert_info if that's available, or OpenSSL text if not
:param name: CommonName of cert
CLI example:
.. code-block:: bash
salt 'gitlab.example.com' acme.info dev.example.com
'''
cert_f... | Return information about a certificate
.. note::
Will output tls.cert_info if that's available, or OpenSSL text if not
:param name: CommonName of cert
CLI example:
.. code-block:: bash
salt 'gitlab.example.com' acme.info dev.example.com |
def nodeListGetString(self, list, inLine):
"""Build the string equivalent to the text contained in the
Node list made of TEXTs and ENTITY_REFs """
if list is None: list__o = None
else: list__o = list._o
ret = libxml2mod.xmlNodeListGetString(self._o, list__o, inLine)
re... | Build the string equivalent to the text contained in the
Node list made of TEXTs and ENTITY_REFs |
def heat_process(body, message):
"""
This function deal with the heat notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya d... | This function deal with the heat notification.
First, find process from customer_process that not include wildcard.
if not find from customer_process, then find process from customer_process_wildcard.
if not find from customer_process_wildcard, then use ternya default process.
:param body: dict of open... |
def walk_train_dirs(root_dir: str) -> Iterable[Tuple[str, Iterable[str]]]:
"""
Modify os.walk with the following:
- return only root_dir and sub-dirs
- return only training sub-dirs
- stop recursion at training dirs
:param root_dir: root dir to be walked
:return: generator of (r... | Modify os.walk with the following:
- return only root_dir and sub-dirs
- return only training sub-dirs
- stop recursion at training dirs
:param root_dir: root dir to be walked
:return: generator of (root_dir, training sub-dirs) pairs |
def _remove_germline_filter(rec, name):
"""Check if germline based on STATUS/SS and REJECT flag.
Handles VarDict, FreeBayes, MuTect, MuTect2 and VarScan.
"""
if _is_germline(rec):
if rec.FILTER and name in rec.FILTER:
return vcfutils.cyvcf_remove_filter(rec, name)
elif not _is_s... | Check if germline based on STATUS/SS and REJECT flag.
Handles VarDict, FreeBayes, MuTect, MuTect2 and VarScan. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.