code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def estimate_completion(self):
"""
Estimate completion time for a task.
:returns: deferred that when fired returns a datetime object for the
estimated, or the actual datetime, or None if we could not
estimate a time for this task method.
"""
i... | Estimate completion time for a task.
:returns: deferred that when fired returns a datetime object for the
estimated, or the actual datetime, or None if we could not
estimate a time for this task method. |
def get(self, volume_id):
"""
Get a volume.
Args:
volume_id (str): Volume name.
Returns:
(:py:class:`Volume`): The volume.
Raises:
:py:class:`docker.errors.NotFound`
If the volume does not exist.
:py:class:`docker... | Get a volume.
Args:
volume_id (str): Volume name.
Returns:
(:py:class:`Volume`): The volume.
Raises:
:py:class:`docker.errors.NotFound`
If the volume does not exist.
:py:class:`docker.errors.APIError`
If the serve... |
def read_obo(cls, path, flatten=True, part_of_cc_only=False):
""" Parse an OBO file and store GO term information.
Parameters
----------
path: str
Path of the OBO file.
flatten: bool, optional
If set to False, do not generate a list of all ancestors and
... | Parse an OBO file and store GO term information.
Parameters
----------
path: str
Path of the OBO file.
flatten: bool, optional
If set to False, do not generate a list of all ancestors and
descendants for each GO term.
part_of_cc_only: bool, op... |
def finalize_prov_profile(self, name):
# type: (Optional[Text]) -> List[Identifier]
"""Transfer the provenance related files to the RO."""
# NOTE: Relative posix path
if name is None:
# master workflow, fixed filenames
filename = "primary.cwlprov"
else:
... | Transfer the provenance related files to the RO. |
def send(self):
"""
Entrypoint to send data to Zabbix
If debug is enabled, items are sent one by one
If debug isn't enable, we send items in bulk
Returns a list of results (1 if no debug, as many as items in other case)
"""
if self.logger: # pragma: no cover
... | Entrypoint to send data to Zabbix
If debug is enabled, items are sent one by one
If debug isn't enable, we send items in bulk
Returns a list of results (1 if no debug, as many as items in other case) |
def receive_data(self, data):
# type: (bytes) -> None
"""
Pass some received data to the connection for handling.
A list of events that the remote peer triggered by sending this data can
be retrieved with :meth:`~wsproto.connection.Connection.events`.
:param data: The d... | Pass some received data to the connection for handling.
A list of events that the remote peer triggered by sending this data can
be retrieved with :meth:`~wsproto.connection.Connection.events`.
:param data: The data received from the remote peer on the network.
:type data: ``bytes`` |
def main():
"""Start main part of the wait script."""
logger.info('Checking for available topics: %r', repr(REQUIRED_TOPICS))
client = connect_kafka(hosts=KAFKA_HOSTS)
check_topics(client, REQUIRED_TOPICS) | Start main part of the wait script. |
def flip_alleles(genotypes):
"""Flip the alleles of an Genotypes instance."""
warnings.warn("deprecated: use 'Genotypes.flip_coded'", DeprecationWarning)
genotypes.reference, genotypes.coded = (genotypes.coded,
genotypes.reference)
genotypes.genotypes = 2 - ge... | Flip the alleles of an Genotypes instance. |
def bin_spikes(spike_times, binsz):
"""Sort spike times into bins
:param spike_times: times of spike instances
:type spike_times: list
:param binsz: length of time bin to use
:type binsz: float
:returns: list of bin indicies, one for each element in spike_times
"""
bins = np.empty((len(... | Sort spike times into bins
:param spike_times: times of spike instances
:type spike_times: list
:param binsz: length of time bin to use
:type binsz: float
:returns: list of bin indicies, one for each element in spike_times |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'heading') and self.heading is not None:
_dict['heading'] = self.heading._to_dict()
return _dict | Return a json dictionary representing this model. |
def with_units(self, val, ua, ub):
"""Return value with unit.
args:
val (mixed): result
ua (str): 1st unit
ub (str): 2nd unit
raises:
SyntaxError
returns:
str
"""
if not val:
return str(val)
i... | Return value with unit.
args:
val (mixed): result
ua (str): 1st unit
ub (str): 2nd unit
raises:
SyntaxError
returns:
str |
def get_all_for_project(self, name, **kwargs):
"""
Gets the Build Records produced from the BuildConfiguration by name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when r... | Gets the Build Records produced from the BuildConfiguration by name.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):... |
def build_net(self, is_training):
"""Build the whole neural network for the QA model."""
cfg = self.cfg
with tf.device('/cpu:0'):
word_embed = tf.get_variable(
name='word_embed', initializer=self.embed, dtype=tf.float32, trainable=False)
char_embed = tf.ge... | Build the whole neural network for the QA model. |
def split(self, t):
"""returns two segments, whose union is this segment and which join at
self.point(t)."""
pt = self.point(t)
return Line(self.start, pt), Line(pt, self.end) | returns two segments, whose union is this segment and which join at
self.point(t). |
def embed(args):
"""
%prog embed evidencefile scaffolds.fasta contigs.fasta
Use SSPACE evidencefile to scaffold contigs into existing scaffold
structure, as in `scaffolds.fasta`. Contigs.fasta were used by SSPACE
directly to scaffold.
Rules:
1. Only update existing structure by embedding c... | %prog embed evidencefile scaffolds.fasta contigs.fasta
Use SSPACE evidencefile to scaffold contigs into existing scaffold
structure, as in `scaffolds.fasta`. Contigs.fasta were used by SSPACE
directly to scaffold.
Rules:
1. Only update existing structure by embedding contigs small enough to fit.
... |
def go_from(self, vertex):
"""
Tell the edge to go out from this vertex.
Args:
vertex (Vertex): vertex to go from.
"""
if self.vertex_out:
self.vertex_out.edges_out.remove(self)
self.vertex_out = vertex
vertex.edges_out.add(self) | Tell the edge to go out from this vertex.
Args:
vertex (Vertex): vertex to go from. |
def format_item(self, item, defaults=None, stencil=None):
""" Format an item.
"""
from pyrobase.osutil import shell_escape
try:
item_text = fmt.to_console(formatting.format_item(self.options.output_format, item, defaults))
except (NameError, ValueError, TypeError), e... | Format an item. |
def _handle_sdp_target_state_updated(sdp_state: SDPState):
"""Respond to an SDP target state change event.
This function sets the current state of SDP to the target state if that is
possible.
TODO(BMo) This cant be done as a blocking function as it is here!
"""
LOG.info('Handling SDP target st... | Respond to an SDP target state change event.
This function sets the current state of SDP to the target state if that is
possible.
TODO(BMo) This cant be done as a blocking function as it is here! |
def write(self, learn:Learner, trn_batch:Tuple, val_batch:Tuple, iteration:int, tbwriter:SummaryWriter)->None:
"Writes training and validation batch images to Tensorboard."
self._write_for_dstype(learn=learn, batch=val_batch, iteration=iteration, tbwriter=tbwriter, ds_type=DatasetType.Valid)
sel... | Writes training and validation batch images to Tensorboard. |
def libvlc_video_set_marquee_string(p_mi, option, psz_text):
'''Set a marquee string option.
@param p_mi: libvlc media player.
@param option: marq option to set See libvlc_video_marquee_string_option_t.
@param psz_text: marq option value.
'''
f = _Cfunctions.get('libvlc_video_set_marquee_string'... | Set a marquee string option.
@param p_mi: libvlc media player.
@param option: marq option to set See libvlc_video_marquee_string_option_t.
@param psz_text: marq option value. |
def place_on_gpu(data):
"""Utility to place data on GPU, where data could be a torch.Tensor, a tuple
or list of Tensors, or a tuple or list of tuple or lists of Tensors"""
data_type = type(data)
if data_type in (list, tuple):
data = [place_on_gpu(data[i]) for i in range(len(data))]
data ... | Utility to place data on GPU, where data could be a torch.Tensor, a tuple
or list of Tensors, or a tuple or list of tuple or lists of Tensors |
def terminate(self):
"""
Send termination signal to DAG parsing processor manager
and expect it to terminate all DAG file processors.
"""
self.log.info("Sending termination message to manager.")
self._child_signal_conn.send(DagParsingSignal.TERMINATE_MANAGER) | Send termination signal to DAG parsing processor manager
and expect it to terminate all DAG file processors. |
def get_contributors(self, anon=github.GithubObject.NotSet):
"""
:calls: `GET /repos/:owner/:repo/contributors <http://developer.github.com/v3/repos>`_
:param anon: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
url_p... | :calls: `GET /repos/:owner/:repo/contributors <http://developer.github.com/v3/repos>`_
:param anon: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser` |
def main():
"""function to """
# parse arg to find file(s)
parser = argparse.ArgumentParser()
parser.add_argument("-f", "--file",
help="convert the markdown file to HTML")
parser.add_argument("-d", "--directory",
help="convert the markdown files in the... | function to |
def uri_to_iri(value):
"""
Converts an ASCII URI byte string into a unicode IRI
:param value:
An ASCII-encoded byte string of the URI
:return:
A unicode string of the IRI
"""
if not isinstance(value, byte_cls):
raise TypeError(unwrap(
'''
value ... | Converts an ASCII URI byte string into a unicode IRI
:param value:
An ASCII-encoded byte string of the URI
:return:
A unicode string of the IRI |
def _from_binary_ea(cls, binary_stream):
"""See base class."""
_ea_list = []
offset = 0
#_MOD_LOGGER.debug(f"Creating Ea object from binary stream {binary_stream.tobytes()}...")
_MOD_LOGGER.debug("Creating Ea object from binary '%s'...", binary_stream.tobytes())
while True:
entry = EaEn... | See base class. |
def _authn_context_decl_ref(decl_ref, authn_auth=None):
"""
Construct the authn context with a authn context declaration reference
:param decl_ref: The authn context declaration reference
:param authn_auth: Authenticating Authority
:return: An AuthnContext instance
"""
return factory(saml.Au... | Construct the authn context with a authn context declaration reference
:param decl_ref: The authn context declaration reference
:param authn_auth: Authenticating Authority
:return: An AuthnContext instance |
def _logins(users, user_attrs=None):
'''
FIXME: DOCS...
'''
# FIXME: check for support attrs
# Supported attrs:
# login # DEFAULT, no auth required
# email
# bio
# company
# created_at
# hireable
# location
# updated_at
# url
# 'login' will be the dict ... | FIXME: DOCS... |
def _updateTransitionMatrix(self):
"""
Updates the hidden-state transition matrix and the initial distribution
"""
# TRANSITION MATRIX
C = self.model.count_matrix() + self.prior_C # posterior count matrix
# check if we work with these options
if self.reversible... | Updates the hidden-state transition matrix and the initial distribution |
def buildPaginationHeader(resultCount, resultsPerPage, pageArg, url):
'''Build link header for result pagination'''
lastPage = resultCount / resultsPerPage
if pageArg:
page = int(pageArg)
next_url = re.sub("page=[0-9]+", "page={}".format(page + 1), url)
prev_url = re.sub("page=[0-9]... | Build link header for result pagination |
def _do_shell(self, line):
"""Send a command to the Unix shell.\n==> Usage: shell ls ~"""
if not line:
return
sp = Popen(line,
shell=True,
stdin=PIPE,
stdout=PIPE,
stderr=PIPE,
close_fds=no... | Send a command to the Unix shell.\n==> Usage: shell ls ~ |
def create_space(self, space_name, add_users=True):
"""
Create a new space with the given name in the current target
organization.
"""
body = {
'name': space_name,
'organization_guid': self.api.config.get_organization_guid()
}
# MAINT: may... | Create a new space with the given name in the current target
organization. |
def eval_string(self, s):
"""
Returns the tristate value of the expression 's', represented as 0, 1,
and 2 for n, m, and y, respectively. Raises KconfigError if syntax
errors are detected in 's'. Warns if undefined symbols are referenced.
As an example, if FOO and BAR are trista... | Returns the tristate value of the expression 's', represented as 0, 1,
and 2 for n, m, and y, respectively. Raises KconfigError if syntax
errors are detected in 's'. Warns if undefined symbols are referenced.
As an example, if FOO and BAR are tristate symbols at least one of
which has t... |
def animate(self,*args,**kwargs): #pragma: no cover
"""
NAME:
animate
PURPOSE:
animate an Orbit
INPUT:
d1= first dimension to plot ('x', 'y', 'R', 'vR', 'vT', 'z', 'vz', ...); can be list with up to three entries for three subplots
d2= second d... | NAME:
animate
PURPOSE:
animate an Orbit
INPUT:
d1= first dimension to plot ('x', 'y', 'R', 'vR', 'vT', 'z', 'vz', ...); can be list with up to three entries for three subplots
d2= second dimension to plot; can be list with up to three entries for three subplot... |
def add_user(self, workspace, params={}, **options):
"""The user can be referenced by their globally unique user ID or their email address.
Returns the full user record for the invited user.
Parameters
----------
workspace : {Id} The workspace or organization to invite the user... | The user can be referenced by their globally unique user ID or their email address.
Returns the full user record for the invited user.
Parameters
----------
workspace : {Id} The workspace or organization to invite the user to.
[data] : {Object} Data for the request
- u... |
def ensure_dir_exists(f, fullpath=False):
"""
Ensure the existence of the (parent) directory of f
"""
if fullpath is False:
# Get parent directory
d = os.path.dirname(f)
else:
# Create the full path
d = f
if not os.path.exists(d):
os.makedirs(d) | Ensure the existence of the (parent) directory of f |
def preprocess(input_file,
output_file,
defines=None,
options=None,
content_types_db=None,
_preprocessed_files=None,
_depth=0):
"""
Preprocesses the specified file.
:param input_filename:
The input path.
:... | Preprocesses the specified file.
:param input_filename:
The input path.
:param output_filename:
The output file (NOT path).
:param defines:
a dictionary of defined variables that will be
understood in preprocessor statements. Keys must be strings and,
currently, only... |
def set_leaf_dist(self, attr_value, dist):
"""
Sets the probability distribution at a leaf node.
"""
assert self.attr_name
assert self.tree.data.is_valid(self.attr_name, attr_value), \
"Value %s is invalid for attribute %s." \
% (attr_value, self.attr_... | Sets the probability distribution at a leaf node. |
def init_db(self):
"""
Init database and prepare tables
"""
# database file
db_path = self.get_data_file("data.sqlite")
# comect and create cursor
self.db = sqlite3.connect(db_path)
self.cursor = self.db.cursor()
# prep tables
self.db_ex... | Init database and prepare tables |
def list_build_set_records(id=None, name=None, page_size=200, page_index=0, sort="", q=""):
"""
List all build set records for a BuildConfigurationSet
"""
content = list_build_set_records_raw(id, name, page_size, page_index, sort, q)
if content:
return utils.format_json_list(content) | List all build set records for a BuildConfigurationSet |
def template_instances(cls, dataset, capacity=0):
"""
Uses the Instances as template to create an empty dataset.
:param dataset: the original dataset
:type dataset: Instances
:param capacity: how many data rows to reserve initially (see compactify)
:type capacity: int
... | Uses the Instances as template to create an empty dataset.
:param dataset: the original dataset
:type dataset: Instances
:param capacity: how many data rows to reserve initially (see compactify)
:type capacity: int
:return: the empty dataset
:rtype: Instances |
def entry_point():
""" An entry point for setuptools. This is required because
`if __name__ == '__main__'` is not fired when the entry point
is 'main()'. This just wraps the old behavior in a function so
it can be called from setuptools.
"""
try:
mainret = main()
except (... | An entry point for setuptools. This is required because
`if __name__ == '__main__'` is not fired when the entry point
is 'main()'. This just wraps the old behavior in a function so
it can be called from setuptools. |
def file_ns_handler(importer, path_item, packageName, module):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
normalized = _normalize_cached(subpath)
for item in module.__path__:
if _normalize_cached(item) =... | Compute an ns-package subpath for a filesystem or zipfile importer |
def set_min(self, fmin):
"""
Updates minimum value
"""
if round(100000*fmin) != 100000*fmin:
raise DriverError('utils.widgets.Expose.set_min: ' +
'fmin must be a multiple of 0.00001')
self.fmin = fmin
self.set(self.fmin) | Updates minimum value |
def _from_dict(cls, _dict):
"""Initialize a ListConfigurationsResponse object from a json dictionary."""
args = {}
if 'configurations' in _dict:
args['configurations'] = [
Configuration._from_dict(x)
for x in (_dict.get('configurations'))
]... | Initialize a ListConfigurationsResponse object from a json dictionary. |
def __protocolize(base_url):
"""Internal add-protocol-to-url helper"""
if not base_url.startswith("http://") and not base_url.startswith("https://"):
base_url = "https://" + base_url
# Some API endpoints can't handle extra /'s in path requests
base_url = base_url.rstrip("/")... | Internal add-protocol-to-url helper |
def U(self):
"""
Property to support lazy evaluation of residuals
"""
if self._U is None:
sinv = N.diag(1/self.singular_values)
self._U = dot(self.arr,self.V.T,sinv)
return self._U | Property to support lazy evaluation of residuals |
def create_user(name,
username,
email,
password,
token_manager=None,
app_url=defaults.APP_URL):
"""
create a new user with the specified name, username email and password
"""
headers = token_manager.get_access_token_headers... | create a new user with the specified name, username email and password |
def get_repository_search_session(self):
"""Gets the repository search session.
return: (osid.repository.RepositorySearchSession) - a
RepositorySearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_search() is fals... | Gets the repository search session.
return: (osid.repository.RepositorySearchSession) - a
RepositorySearchSession
raise: OperationFailed - unable to complete request
raise: Unimplemented - supports_repository_search() is false
compliance: optional - This method must be... |
def getResults(uri):
'''
Method that recovers the text for each result in infobel.com
:param uri: Infobel uri
:return: A list of textual information to be processed
'''
# Using i3visio browser to avoid certain issues...
i3Browser = browser.Browser()
data... | Method that recovers the text for each result in infobel.com
:param uri: Infobel uri
:return: A list of textual information to be processed |
def search_converted_models(root=None):
"""
Searches for all converted models generated by
unit tests in folders tests and with function
*dump_data_and_model*.
"""
if root is None:
root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "tests"))
root = os.path.normp... | Searches for all converted models generated by
unit tests in folders tests and with function
*dump_data_and_model*. |
def compute_tls13_handshake_secrets(self):
"""
Ciphers key and IV are updated accordingly for Handshake data.
self.handshake_messages should be ClientHello...ServerHello.
"""
if self.tls13_early_secret is None:
warning("No early secret. This is abnormal.")
hk... | Ciphers key and IV are updated accordingly for Handshake data.
self.handshake_messages should be ClientHello...ServerHello. |
def get_output(self):
"""
:yield: stdout_line, stderr_line, running
Generator that outputs lines captured from stdout and stderr
These can be consumed to output on a widget in an IDE
"""
if self.process.poll() is not None:
self.close()
yield Non... | :yield: stdout_line, stderr_line, running
Generator that outputs lines captured from stdout and stderr
These can be consumed to output on a widget in an IDE |
def post_replicate(request):
"""MNReplication.replicate(session, sysmeta, sourceNode) → boolean."""
d1_gmn.app.views.assert_db.post_has_mime_parts(
request, (('field', 'sourceNode'), ('file', 'sysmeta'))
)
sysmeta_pyxb = d1_gmn.app.sysmeta.deserialize(request.FILES['sysmeta'])
d1_gmn.app.loc... | MNReplication.replicate(session, sysmeta, sourceNode) → boolean. |
def collapse_whitespace(message):
"""Collapses consecutive whitespace into a single space"""
return u' '.join(map(lambda s: s.strip(),
filter(None, message.strip().splitlines()))) | Collapses consecutive whitespace into a single space |
def createInput(self):
"""create a random input vector"""
print "-" * 70 + "Creating a random input vector" + "-" * 70
#clear the inputArray to zero before creating a new input vector
self.inputArray[0:] = 0
for i in range(self.inputSize):
#randrange returns 0 or 1
self.inputArray[i] ... | create a random input vector |
def is_local(self, hadoop_conf=None, hadoop_home=None):
"""\
Is Hadoop configured to run in local mode?
By default, it is. [pseudo-]distributed mode must be
explicitly configured.
"""
conf = self.hadoop_params(hadoop_conf, hadoop_home)
keys = ('mapreduce.framewor... | \
Is Hadoop configured to run in local mode?
By default, it is. [pseudo-]distributed mode must be
explicitly configured. |
def get_status(self, mxit_id, scope='profile/public'):
"""
Retrieve the Mxit user's current status
No user authentication required
"""
status = _get(
token=self.oauth.get_app_token(scope),
uri='/user/public/statusmessage/' + urllib.quote(mxit_id)
)... | Retrieve the Mxit user's current status
No user authentication required |
def _create_binary_trigger(trigger):
"""Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger."""
ops = {
0: ">",
1: "<",
2: ">=",
3: "<=",
4: "==",
5: 'always'
}
op_codes = {y: x for x, y in ops.items()}
source = 0
if i... | Create an 8-bit binary trigger from an InputTrigger, TrueTrigger, FalseTrigger. |
def retention_policy_get(database,
name,
user=None,
password=None,
host=None,
port=None):
'''
Get an existing retention policy.
database
The database to operate on.
name... | Get an existing retention policy.
database
The database to operate on.
name
Name of the policy to modify.
CLI Example:
.. code-block:: bash
salt '*' influxdb08.retention_policy_get metrics default |
def get_surveys(self):
"""Gets all surveys in account
Args:
None
Returns:
list: a list of all surveys
"""
payload = {
'Request': 'getSurveys',
'Format': 'JSON'
}
r = self._session.get(QUALT... | Gets all surveys in account
Args:
None
Returns:
list: a list of all surveys |
def samples(self, gp, Y_metadata=None):
"""
Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable
"""
orig_shape = gp.shape
gp = gp.flatten()
#orig_shape = gp.shape
gp = gp.flatten()
Ysi... | Returns a set of samples of observations based on a given value of the latent variable.
:param gp: latent variable |
def add_context(self, err_context, succ_context=None):
""" Prepend msg to add some context information
:param pmsg: context info
:return: None
"""
self.err_context = err_context
self.succ_context = succ_context | Prepend msg to add some context information
:param pmsg: context info
:return: None |
def process(self, data=None):
"""Fetch incoming data from the Flask request object when no data is supplied
to the process method. By default, the RequestHandler expects the
incoming data to be sent as JSON.
"""
return super(RequestHandler, self).process(data=data or self.get_r... | Fetch incoming data from the Flask request object when no data is supplied
to the process method. By default, the RequestHandler expects the
incoming data to be sent as JSON. |
def Romeo_2002(Re, eD):
r'''Calculates Darcy friction factor using the method in Romeo (2002)
[2]_ as shown in [1]_.
.. math::
\frac{1}{\sqrt{f_d}} = -2\log\left\{\frac{\epsilon}{3.7065D}\times
\frac{5.0272}{Re}\times\log\left[\frac{\epsilon}{3.827D} -
\frac{4.567}{Re}\times\log\lef... | r'''Calculates Darcy friction factor using the method in Romeo (2002)
[2]_ as shown in [1]_.
.. math::
\frac{1}{\sqrt{f_d}} = -2\log\left\{\frac{\epsilon}{3.7065D}\times
\frac{5.0272}{Re}\times\log\left[\frac{\epsilon}{3.827D} -
\frac{4.567}{Re}\times\log\left(\frac{\epsilon}{7.7918D}^{... |
def get_source(self):
"""returns self._source"""
if self._source is None:
self.emit("}\n")
self._source = "\n".join(self.lines)
del self.lines
return self._source | returns self._source |
def blake2b(data, digest_size=BLAKE2B_BYTES, key=b'',
salt=b'', person=b'',
encoder=nacl.encoding.HexEncoder):
"""
Hashes ``data`` with blake2b.
:param data: the digest input byte sequence
:type data: bytes
:param digest_size: the requested digest size; must be at most
... | Hashes ``data`` with blake2b.
:param data: the digest input byte sequence
:type data: bytes
:param digest_size: the requested digest size; must be at most
:const:`BLAKE2B_BYTES_MAX`;
the default digest size is
:const:`BLAKE2B_BYTES`
... |
def strframe(obj, extended=False):
"""
Return a string with a frame record pretty-formatted.
The record is typically an item in a list generated by `inspect.stack()
<https://docs.python.org/3/library/inspect.html#inspect.stack>`_).
:param obj: Frame record
:type obj: tuple
:param extende... | Return a string with a frame record pretty-formatted.
The record is typically an item in a list generated by `inspect.stack()
<https://docs.python.org/3/library/inspect.html#inspect.stack>`_).
:param obj: Frame record
:type obj: tuple
:param extended: Flag that indicates whether contents of the ... |
def get_corrections_dict(self, entry):
"""
Returns the corrections applied to a particular entry.
Args:
entry: A ComputedEntry object.
Returns:
({correction_name: value})
"""
corrections = {}
for c in self.corrections:
val = c... | Returns the corrections applied to a particular entry.
Args:
entry: A ComputedEntry object.
Returns:
({correction_name: value}) |
def install_all_labels(stdout=None):
"""
Discover all subclasses of StructuredNode in your application and execute install_labels on each.
Note: code most be loaded (imported) in order for a class to be discovered.
:param stdout: output stream
:return: None
"""
if not stdout:
stdou... | Discover all subclasses of StructuredNode in your application and execute install_labels on each.
Note: code most be loaded (imported) in order for a class to be discovered.
:param stdout: output stream
:return: None |
def bank_chisq_from_filters(tmplt_snr, tmplt_norm, bank_snrs, bank_norms,
tmplt_bank_matches, indices=None):
""" This function calculates and returns a TimeSeries object containing the
bank veto calculated over a segment.
Parameters
----------
tmplt_snr: TimeSeries
The SNR time seri... | This function calculates and returns a TimeSeries object containing the
bank veto calculated over a segment.
Parameters
----------
tmplt_snr: TimeSeries
The SNR time series from filtering the segment against the current
search template
tmplt_norm: float
The normalization fac... |
def set_slimits(self, row, column, min, max):
"""Set limits for the point sizes.
:param min: point size for the lowest value.
:param max: point size for the highest value.
"""
subplot = self.get_subplot_at(row, column)
subplot.set_slimits(min, max) | Set limits for the point sizes.
:param min: point size for the lowest value.
:param max: point size for the highest value. |
def read_telenor(incoming_cdr, outgoing_cdr, cell_towers, describe=True,
warnings=True):
"""
Load user records from a CSV file in *telenor* format, which is only
applicable for call records.
.. warning:: ``read_telenor`` has been deprecated in bandicoot 0.4.
Parameters
-------... | Load user records from a CSV file in *telenor* format, which is only
applicable for call records.
.. warning:: ``read_telenor`` has been deprecated in bandicoot 0.4.
Parameters
----------
incoming_cdr : str
Path to the CSV file containing incoming records, using the following
schem... |
def _strip_colors(self, message: str) -> str:
""" Remove all of the color tags from this message. """
for c in self.COLORS:
message = message.replace(c, "")
return message | Remove all of the color tags from this message. |
def _apply_bias(inputs, outputs, channel_index, data_format, output_channels,
initializers, partitioners, regularizers):
"""Initialize and apply a bias to the outputs.
Figures out the shape of the bias vector, initialize it, and applies it.
Args:
inputs: A Tensor of shape `data_format`.
... | Initialize and apply a bias to the outputs.
Figures out the shape of the bias vector, initialize it, and applies it.
Args:
inputs: A Tensor of shape `data_format`.
outputs: A Tensor of shape `data_format`.
channel_index: The index of the channel dimension in `inputs`.
data_format: Format of `input... |
def lerfcc(x):
"""
Returns the complementary error function erfc(x) with fractional
error everywhere less than 1.2e-7. Adapted from Numerical Recipies.
Usage: lerfcc(x)
"""
z = abs(x)
t = 1.0 / (1.0+0.5*z)
ans = t * math.exp(-z*z-1.26551223 + t*(1.00002368+t*(0.37409196+t*(0.09678418+t*(-0.18628806+... | Returns the complementary error function erfc(x) with fractional
error everywhere less than 1.2e-7. Adapted from Numerical Recipies.
Usage: lerfcc(x) |
def parse_authn_request_response(self, xmlstr, binding, outstanding=None,
outstanding_certs=None, conv_info=None):
""" Deal with an AuthnResponse
:param xmlstr: The reply as a xml string
:param binding: Which binding that was used for the transport
:... | Deal with an AuthnResponse
:param xmlstr: The reply as a xml string
:param binding: Which binding that was used for the transport
:param outstanding: A dictionary with session IDs as keys and
the original web request from the user before redirection
as values.
:p... |
def calc_smoothpar_logistic2(metapar):
"""Return the smoothing parameter corresponding to the given meta
parameter when using |smooth_logistic2|.
Calculate the smoothing parameter value corresponding the meta parameter
value 2.5:
>>> from hydpy.auxs.smoothtools import calc_smoothpar_logistic2
... | Return the smoothing parameter corresponding to the given meta
parameter when using |smooth_logistic2|.
Calculate the smoothing parameter value corresponding the meta parameter
value 2.5:
>>> from hydpy.auxs.smoothtools import calc_smoothpar_logistic2
>>> smoothpar = calc_smoothpar_logistic2(2.5)
... |
def _init(frame, log_level=ERROR):
'''
Enables explicit relative import in sub-modules when ran as __main__
:param log_level: module's inner logger level (equivalent to logging pkg)
'''
global _log_level
_log_level = log_level
# now we have access to the module globals
main_globals = fra... | Enables explicit relative import in sub-modules when ran as __main__
:param log_level: module's inner logger level (equivalent to logging pkg) |
def is_frameshift_len(mut_df):
"""Simply returns a series indicating whether each corresponding mutation
is a frameshift.
This is based on the length of the indel. Thus may be fooled by frameshifts
at exon-intron boundaries or other odd cases.
Parameters
----------
mut_df : pd.DataFrame
... | Simply returns a series indicating whether each corresponding mutation
is a frameshift.
This is based on the length of the indel. Thus may be fooled by frameshifts
at exon-intron boundaries or other odd cases.
Parameters
----------
mut_df : pd.DataFrame
mutation input file as a datafra... |
def network_details():
"""
Returns details about the network links
"""
# Get IPv4 details
ipv4_addresses = [
info[4][0]
for info in socket.getaddrinfo(
socket.gethostname(), None, socket.AF_INET
)
]
# Add localh... | Returns details about the network links |
def is_image(self, key):
"""Return True if variable is a PIL.Image image"""
data = self.model.get_data()
return isinstance(data[key], Image) | Return True if variable is a PIL.Image image |
def apply_heuristic(self, node_a, node_b, heuristic=None):
"""
helper function to apply heuristic
"""
if not heuristic:
heuristic = self.heuristic
return heuristic(
abs(node_a.x - node_b.x),
abs(node_a.y - node_b.y)) | helper function to apply heuristic |
def formatargvalues(args, varargs, varkw, locals,
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value),
join=joinseq):
"""Format an a... | Format an argument spec from the 4 values returned by getargvalues.
The first four arguments are (args, varargs, varkw, locals). The
next four arguments are the corresponding optional formatting functions
that are called to turn names and values into strings. The ninth
argument is an optional functio... |
def stitch_block_rows(block_list):
'''
Stitches blocks together into a single block rowwise. These blocks are 2D tables usually
generated from tableproc. The final block will be of dimensions (sum(num_rows), max(num_cols)).
'''
stitched = list(itertools.chain(*block_list))
max_length = max... | Stitches blocks together into a single block rowwise. These blocks are 2D tables usually
generated from tableproc. The final block will be of dimensions (sum(num_rows), max(num_cols)). |
def von_mises_strain(self):
"""
Equivalent strain to Von Mises Stress
"""
eps = self - 1/3 * np.trace(self) * np.identity(3)
return np.sqrt(np.sum(eps * eps) * 2/3) | Equivalent strain to Von Mises Stress |
def is_diacritic(char, strict=True):
"""
Check whether the character is a diacritic (as opposed to a letter or a
suprasegmental).
In strict mode return True only if the diacritic is part of the IPA spec.
"""
if char in chart.diacritics:
return True
if not strict:
return (unicodedata.category(char) in ['Lm'... | Check whether the character is a diacritic (as opposed to a letter or a
suprasegmental).
In strict mode return True only if the diacritic is part of the IPA spec. |
def open(self, filename):
# type: (str) -> None
'''
Open up an existing ISO for inspection and modification.
Parameters:
filename - The filename containing the ISO to open up.
Returns:
Nothing.
'''
if self._initialized:
raise pycdlib... | Open up an existing ISO for inspection and modification.
Parameters:
filename - The filename containing the ISO to open up.
Returns:
Nothing. |
def highlight(string, keywords, cls_name='highlighted'):
""" Given an list of words, this function highlights the matched text in the given string. """
if not keywords:
return string
if not string:
return ''
include, exclude = get_text_tokenizer(keywords)
highlighted = highlight_tex... | Given an list of words, this function highlights the matched text in the given string. |
def finish_operation(self, conn_or_internal_id, success, *args):
"""Finish an operation on a connection.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
success (bool): Whether the operation was successful
... | Finish an operation on a connection.
Args:
conn_or_internal_id (string, int): Either an integer connection id or a string
internal_id
success (bool): Whether the operation was successful
failure_reason (string): Optional reason why the operation failed
... |
def load(self):
""" Return the model from the store """
filters = [Filter(self.field, 'eq', self.rid)]
store = goldman.sess.store
self._is_loaded = True
self.models = store.search(self.rtype, filters=filters)
return self.models | Return the model from the store |
def mark_deactivated(self,request,queryset):
"""An admin action for marking several cages as inactive.
This action sets the selected cages as Active=False and Death=today.
This admin action also shows as the output the number of mice sacrificed."""
rows_updated = queryset.update(Activ... | An admin action for marking several cages as inactive.
This action sets the selected cages as Active=False and Death=today.
This admin action also shows as the output the number of mice sacrificed. |
def extract_date(value):
"""
Convert timestamp to datetime and set everything to zero except a date
"""
dtime = value.to_datetime()
dtime = (dtime - timedelta(hours=dtime.hour) - timedelta(minutes=dtime.minute) -
timedelta(seconds=dtime.second) - timedelta(microseconds=dtime.microsecond... | Convert timestamp to datetime and set everything to zero except a date |
def get_metabolite_compartments(self):
"""Return all metabolites' compartments."""
warn('use Model.compartments instead', DeprecationWarning)
return {met.compartment for met in self.metabolites
if met.compartment is not None} | Return all metabolites' compartments. |
def read_configuration(self):
"""Load configuration from Django settings."""
self.configured = True
# Default backend needs to be the database backend for backward
# compatibility.
backend = (getattr(settings, 'CELERY_RESULT_BACKEND', None) or
getattr(settings,... | Load configuration from Django settings. |
def retyped(self, new_type):
"""Returns a new node with the same contents as self, but with a new node_type."""
return ParseNode(new_type,
children=list(self.children),
consumed=self.consumed,
position=self.position,
ignored... | Returns a new node with the same contents as self, but with a new node_type. |
def dump_connection_info(engine: Engine, fileobj: TextIO = sys.stdout) -> None:
"""
Dumps some connection info, as an SQL comment. Obscures passwords.
Args:
engine: the SQLAlchemy :class:`Engine` to dump metadata information
from
fileobj: the file-like object (default ``sys.stdo... | Dumps some connection info, as an SQL comment. Obscures passwords.
Args:
engine: the SQLAlchemy :class:`Engine` to dump metadata information
from
fileobj: the file-like object (default ``sys.stdout``) to write
information to |
def _is_dirty(self, xblock):
"""
Return whether this field should be saved when xblock.save() is called
"""
# pylint: disable=protected-access
if self not in xblock._dirty_fields:
return False
baseline = xblock._dirty_fields[self]
return baseline is E... | Return whether this field should be saved when xblock.save() is called |
def check_ressources(sess):
"""
check the Ressources of the Fortinet Controller
all thresholds are currently hard coded. should be fine.
"""
# get the data
cpu_value = get_data(sess, cpu_oid, helper)
memory_value = get_data(sess, memory_oid, helper)
fil... | check the Ressources of the Fortinet Controller
all thresholds are currently hard coded. should be fine. |
def delete(self, image_file, delete_thumbnails=True):
"""
Deletes the reference to the ``image_file`` and deletes the references
to thumbnails as well as thumbnail files if ``delete_thumbnails`` is
`True``. Does not delete the ``image_file`` is self.
"""
if delete_thumbna... | Deletes the reference to the ``image_file`` and deletes the references
to thumbnails as well as thumbnail files if ``delete_thumbnails`` is
`True``. Does not delete the ``image_file`` is self. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.