code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _expand_tasks(self, scopes):
"""Add all tasks in any requested goals.
Returns the requested scopes, plus the added tasks, sorted by scope name.
"""
expanded_scopes = set(scopes)
for scope, info in self._scope_to_info.items():
if info.category == ScopeInfo.TASK:
outer = enclosing_s... | Add all tasks in any requested goals.
Returns the requested scopes, plus the added tasks, sorted by scope name. |
def bulkImport_json(self, filename, onDuplicate="error", formatType="auto", **params) :
"""bulk import from a file repecting arango's key/value format"""
url = "%s/import" % self.database.URL
params["onDuplicate"] = onDuplicate
params["collection"] = self.name
params["type"] = f... | bulk import from a file repecting arango's key/value format |
def printArchive(fileName):
""" Prints content of combine archive
:param fileName: path of archive
:return: None
"""
archive = CombineArchive()
if archive.initializeFromArchive(fileName) is None:
print("Invalid Combine Archive")
return None
print('*'*80)
print('Print ar... | Prints content of combine archive
:param fileName: path of archive
:return: None |
def gameValue(self):
"""identify the correpsonding internal SC2 game value for self.type's value"""
allowed = type(self).ALLOWED_TYPES
try:
if isinstance(allowed, dict): # if ALLOWED_TYPES is not a dict, there is no-internal game value mapping defined
return allowed.g... | identify the correpsonding internal SC2 game value for self.type's value |
def sanitize_git_path(self, uri, ref=None):
"""Take a git URI and ref and converts it to a directory safe path.
Args:
uri (string): git URI
(e.g. git@github.com:foo/bar.git)
ref (string): optional git ref to be appended to the path
Returns:
... | Take a git URI and ref and converts it to a directory safe path.
Args:
uri (string): git URI
(e.g. git@github.com:foo/bar.git)
ref (string): optional git ref to be appended to the path
Returns:
str: Directory name for the supplied uri |
def cublasDtpmv(handle, uplo, trans, diag, n, AP, x, incx):
"""
Matrix-vector product for real triangular-packed matrix.
"""
status = _libcublas.cublasDtpmv_v2(handle,
_CUBLAS_FILL_MODE[uplo],
_CUBLAS_OP[trans],
... | Matrix-vector product for real triangular-packed matrix. |
def _trim_zeros_complex(str_complexes, na_rep='NaN'):
"""
Separates the real and imaginary parts from the complex number, and
executes the _trim_zeros_float method on each of those.
"""
def separate_and_trim(str_complex, na_rep):
num_arr = str_complex.split('+')
return (_trim_zeros_f... | Separates the real and imaginary parts from the complex number, and
executes the _trim_zeros_float method on each of those. |
def get_list_attribute(self, attribute):
"""
:return: attribute value as Python list.
"""
list_attribute = self.api.getListAttribute(self.obj_ref(), attribute)
# IXN returns '::ixNet::OK' for invalid attributes. We want error.
if list_attribute == ['::ixNet::OK']:
... | :return: attribute value as Python list. |
def add(self, *args, **kwargs):
"""Add the instance tied to the field to all the indexes
For the parameters, seen BaseIndex.add
"""
check_uniqueness = kwargs.pop('check_uniqueness', False)
args = self.prepare_args(args)
for index in self._indexes:
index.ad... | Add the instance tied to the field to all the indexes
For the parameters, seen BaseIndex.add |
def check_dimensions(self, dataset):
'''
Checks that the feature types of this dataset are consitent with a point dataset
'''
required_ctx = TestCtx(BaseCheck.HIGH, 'All geophysical variables are point feature types')
t = util.get_time_variable(dataset)
# Exit prematurel... | Checks that the feature types of this dataset are consitent with a point dataset |
def _set_qsfpp(self, v, load=False):
"""
Setter method for qsfpp, mapped from YANG variable /brocade_interface_ext_rpc/get_media_detail/output/interface/qsfpp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_qsfpp is considered as a private
method. Backend... | Setter method for qsfpp, mapped from YANG variable /brocade_interface_ext_rpc/get_media_detail/output/interface/qsfpp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_qsfpp is considered as a private
method. Backends looking to populate this variable should
do... |
def command(state, args):
"""Search AniDB."""
args = parser.parse_args(args[1:])
if not args.query:
print('Must supply query.')
return
search_query = _compile_re_query(args.query)
results = state.titles.search(search_query)
results = [(anime.aid, anime.main_title) for anime in re... | Search AniDB. |
def ReferenceResults(self, field, allow_edit=False):
"""Render Reference Results Table
"""
instance = getattr(self, "instance", field.aq_parent)
table = api.get_view("table_reference_results",
context=instance,
request=self.REQUES... | Render Reference Results Table |
def iterfollow(self):
""" Generator for self.follow()
"""
# use same criterion as self.follow()
if self.links is None:
return
if self.links.get("next"):
yield self.follow()
else:
raise StopIteration | Generator for self.follow() |
def get_uservar(self, user, name):
"""Get a variable about a user.
:param str user: The user ID to look up a variable for.
:param str name: The name of the variable to get.
:return: The user variable, or ``None`` or ``"undefined"``:
* If the user has no data at all, this r... | Get a variable about a user.
:param str user: The user ID to look up a variable for.
:param str name: The name of the variable to get.
:return: The user variable, or ``None`` or ``"undefined"``:
* If the user has no data at all, this returns ``None``.
* If the user doe... |
def get_archive(self, container, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE):
"""
Retrieve a file or folder from a container in the form of a tar
archive.
Args:
container (str): The container where the file is located
path (str): Path to the file or folder to retri... | Retrieve a file or folder from a container in the form of a tar
archive.
Args:
container (str): The container where the file is located
path (str): Path to the file or folder to retrieve
chunk_size (int): The number of bytes returned by each iteration
... |
def filter(self, table, vg_snapshots, filter_string):
"""Naive case-insensitive search."""
query = filter_string.lower()
return [vg_snapshot for vg_snapshot in vg_snapshots
if query in vg_snapshot.name.lower()] | Naive case-insensitive search. |
def is_all_field_none(self):
"""
:rtype: bool
"""
if self._id_ is not None:
return False
if self._time_responded is not None:
return False
if self._time_expiry is not None:
return False
if self._monetary_account_id is not No... | :rtype: bool |
def extract_version(filepath='jeni.py', name='__version__'):
"""Parse __version__ out of given Python file.
Given jeni.py has dependencies, `from jeni import __version__` will fail.
"""
context = {}
for line in open(filepath):
if name in line:
exec(line, context)
bre... | Parse __version__ out of given Python file.
Given jeni.py has dependencies, `from jeni import __version__` will fail. |
def extract(binary):
'''
Extract a code object from a binary pyc file.
:param binary: a sequence of bytes from a pyc file.
'''
if len(binary) <= 8:
raise Exception("Binary pyc must be greater than 8 bytes (got %i)" % len(binary))
magic = binary[:4]
MAGIC = get_magic()
... | Extract a code object from a binary pyc file.
:param binary: a sequence of bytes from a pyc file. |
def array_split(
ary,
indices_or_sections=None,
axis=None,
tile_shape=None,
max_tile_bytes=None,
max_tile_shape=None,
sub_tile_shape=None,
halo=None
):
"To be replaced."
return [
ary[slyce]
for slyce in
shape_split(
array_shape=ary.shape,
... | To be replaced. |
def add(name, gid=None, **kwargs):
'''
Add the specified group
CLI Example:
.. code-block:: bash
salt '*' group.add foo 3456
'''
### NOTE: **kwargs isn't used here but needs to be included in this
### function for compatibility with the group.present state
if info(name):
... | Add the specified group
CLI Example:
.. code-block:: bash
salt '*' group.add foo 3456 |
def firmware_autoupgrade_params_username(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
firmware = ET.SubElement(config, "firmware", xmlns="urn:brocade.com:mgmt:brocade-firmware")
autoupgrade_params = ET.SubElement(firmware, "autoupgrade-params")
... | Auto Generated Code |
def tag(self, *tags):
"""
Tags the job with one or more unique indentifiers.
Tags must be hashable. Duplicate tags are discarded.
:param tags: A unique list of ``Hashable`` tags.
:return: The invoked job instance
"""
if any([not isinstance(tag, collections.Hasha... | Tags the job with one or more unique indentifiers.
Tags must be hashable. Duplicate tags are discarded.
:param tags: A unique list of ``Hashable`` tags.
:return: The invoked job instance |
def state_likelihood(self, beta, alpha):
""" Returns likelihood of the states given the variance latent variables
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
alpha : np.array
State matrix
... | Returns likelihood of the states given the variance latent variables
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
alpha : np.array
State matrix
Returns
----------
State likeliho... |
def all_subclasses(cls):
""" Recursively generate of all the subclasses of class cls. """
for subclass in cls.__subclasses__():
yield subclass
for subc in all_subclasses(subclass):
yield subc | Recursively generate of all the subclasses of class cls. |
def get_jwt_decrypt_keys(self, jwt, **kwargs):
"""
Get decryption keys from this keyjar based on information carried
in a JWE. These keys should be usable to decrypt an encrypted JWT.
:param jwt: A cryptojwt.jwt.JWT instance
:param kwargs: Other key word arguments
:retur... | Get decryption keys from this keyjar based on information carried
in a JWE. These keys should be usable to decrypt an encrypted JWT.
:param jwt: A cryptojwt.jwt.JWT instance
:param kwargs: Other key word arguments
:return: list of usable keys |
def getSiblings(self, retracted=False):
"""
Returns the list of analyses of the Analysis Request to which this
analysis belongs to, but with the current analysis excluded.
:param retracted: If false, retracted/rejected siblings are dismissed
:type retracted: bool
:return:... | Returns the list of analyses of the Analysis Request to which this
analysis belongs to, but with the current analysis excluded.
:param retracted: If false, retracted/rejected siblings are dismissed
:type retracted: bool
:return: list of siblings for this analysis
:rtype: list of ... |
def conditional_http_tween_factory(handler, registry):
"""
Tween that adds ETag headers and tells Pyramid to enable
conditional responses where appropriate.
"""
settings = registry.settings if hasattr(registry, 'settings') else {}
not_cacheble_list = []
if 'not.cachable.list' in settings:
... | Tween that adds ETag headers and tells Pyramid to enable
conditional responses where appropriate. |
def isempty(result):
''' Finds out if a scraping result should be considered empty. '''
if isinstance(result, list):
for element in result:
if isinstance(element, list):
if not isempty(element):
return False
else:
if element is not None:
return False
else:
if result is not None:
retur... | Finds out if a scraping result should be considered empty. |
def make(self, selection):
"""
Scopes and selectors are tested in this order:
* is this a CSS selector with an appended @something attribute?
* is this a regular CSS selector?
* is this an XPath expression?
XPath expression can also use EXSLT functions (as long as they a... | Scopes and selectors are tested in this order:
* is this a CSS selector with an appended @something attribute?
* is this a regular CSS selector?
* is this an XPath expression?
XPath expression can also use EXSLT functions (as long as they are
understood by libxslt) |
def log_debug(msg, logger="TaskLogger"):
"""Log a DEBUG message
Convenience function to log a message to the default Logger
Parameters
----------
msg : str
Message to be logged
logger : str, optional (default: "TaskLogger")
Unique name of the logger to retrieve
Returns
... | Log a DEBUG message
Convenience function to log a message to the default Logger
Parameters
----------
msg : str
Message to be logged
logger : str, optional (default: "TaskLogger")
Unique name of the logger to retrieve
Returns
-------
logger : TaskLogger |
def read_nonblocking(self, size=1, timeout=-1):
'''This reads at most size characters from the child application. It
includes a timeout. If the read does not complete within the timeout
period then a TIMEOUT exception is raised. If the end of file is read
then an EOF exception will be ra... | This reads at most size characters from the child application. It
includes a timeout. If the read does not complete within the timeout
period then a TIMEOUT exception is raised. If the end of file is read
then an EOF exception will be raised. If a logfile is specified, a
copy is written... |
def read(self, size=None):
"""Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
re... | Reads a byte string from the file-like object at the current offset.
The function will read a byte string of the specified size or
all of the remaining data if no size was specified.
Args:
size (Optional[int]): number of bytes to read, where None is all
remaining data.
Returns:
... |
def assign_account_entitlement_for_user(self, body, user_id, dont_notify_user=None, origin=None):
"""AssignAccountEntitlementForUser.
[Preview API] Assign an explicit account entitlement
:param :class:`<AccountEntitlementUpdateModel> <azure.devops.v5_0.licensing.models.AccountEntitlementUpdateMo... | AssignAccountEntitlementForUser.
[Preview API] Assign an explicit account entitlement
:param :class:`<AccountEntitlementUpdateModel> <azure.devops.v5_0.licensing.models.AccountEntitlementUpdateModel>` body: The update model for the entitlement
:param str user_id: The id of the user
:para... |
def parse(self, generator):
"""Parse an iterable source of strings into a generator"""
gen = iter(generator)
for line in gen:
block = {}
for rule in self.rules:
if rule[0](line):
block = rule[1](line, gen)
break
... | Parse an iterable source of strings into a generator |
def from_size(value):
'''
Convert zfs size (human readble) to python int (bytes)
'''
match_size = re_zfs_size.match(str(value))
if match_size:
v_unit = match_size.group(2).upper()[0]
v_size = float(match_size.group(1))
v_multiplier = math.pow(1024, zfs_size.index(v_unit) + 1)... | Convert zfs size (human readble) to python int (bytes) |
def _probs(density_matrix: np.ndarray, indices: List[int],
num_qubits: int) -> List[float]:
"""Returns the probabilities for a measurement on the given indices."""
# Only diagonal elements matter.
all_probs = np.diagonal(
np.reshape(density_matrix, (2 ** num_qubits, 2 ** num_qubits)))
# Shap... | Returns the probabilities for a measurement on the given indices. |
def run(self):
""" Span a background thread to periodically report queued spans """
self.timer = t.Thread(target=self.report_spans)
self.timer.daemon = True
self.timer.name = "Instana Span Reporting"
self.timer.start() | Span a background thread to periodically report queued spans |
def parentItem(self, value):
""" The parent item """
self._parentItem = value
self._recursiveSetNodePath(self._constructNodePath()) | The parent item |
def read_stats(self):
""" Read current statistics from chassis.
:return: dictionary {tpld full index {group name {stat name: stat value}}}
"""
self.statistics = TgnObjectsDict()
for port in self.session.ports.values():
for tpld in port.tplds.values():
... | Read current statistics from chassis.
:return: dictionary {tpld full index {group name {stat name: stat value}}} |
def fetch_batch(self, formatter=TableFormat):
"""
Fetch a batch of logs and return using the specified formatter.
Formatter is class type defined in :py:mod:`smc_monitoring.models.formatters`.
This fetch type will be a single shot fetch (this method forces
``fetch_type='stored'`... | Fetch a batch of logs and return using the specified formatter.
Formatter is class type defined in :py:mod:`smc_monitoring.models.formatters`.
This fetch type will be a single shot fetch (this method forces
``fetch_type='stored'``). If ``fetch_size`` is not already set on the
query, the... |
def _handle_request(self, request):
"""Finds the resource to which a request maps and then calls it.
Instantiates, fills and returns a :class:`webob.Response` object. If
no resource matches the request, a 404 status is set on the response
object.
:param request: Object represent... | Finds the resource to which a request maps and then calls it.
Instantiates, fills and returns a :class:`webob.Response` object. If
no resource matches the request, a 404 status is set on the response
object.
:param request: Object representing the current request.
:type request:... |
def remove(self, key):
"""
Transactional implementation of :func:`Map.remove(key) <hazelcast.proxy.map.Map.remove>`
The object to be removed will be removed from only the current transaction context until the transaction is
committed.
:param key: (object), key of the mapping to... | Transactional implementation of :func:`Map.remove(key) <hazelcast.proxy.map.Map.remove>`
The object to be removed will be removed from only the current transaction context until the transaction is
committed.
:param key: (object), key of the mapping to be deleted.
:return: (object), the... |
def _write_marker(self, indent_string, depth, entry, comment):
"""Write a section marker line"""
return '%s%s%s%s%s' % (indent_string,
self._a_to_u('[' * depth),
self._quote(self._decode_element(entry), multiline=False),
... | Write a section marker line |
def xpathNextParent(self, cur):
"""Traversal function for the "parent" direction The parent
axis contains the parent of the context node, if there is
one. """
if cur is None: cur__o = None
else: cur__o = cur._o
ret = libxml2mod.xmlXPathNextParent(self._o, cur__o)
... | Traversal function for the "parent" direction The parent
axis contains the parent of the context node, if there is
one. |
def _set_character_restriction(self, v, load=False):
"""
Setter method for character_restriction, mapped from YANG variable /password_attributes/character_restriction (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_character_restriction is considered as a pri... | Setter method for character_restriction, mapped from YANG variable /password_attributes/character_restriction (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_character_restriction is considered as a private
method. Backends looking to populate this variable shoul... |
def import_class(path):
""" Import a class from a string module class path """
components = path.split(".")
module = components[:-1]
module = ".".join(module)
mod = __import__(module, fromlist=[native_str(components[-1])])
return getattr(mod, native_str(components[-1])) | Import a class from a string module class path |
def decode_data(self, encoded):
'''
Decode sensor data.
Returns:
dict: Sensor values
'''
try:
identifier = None
data_format = 2
if len(encoded) > 8:
data_format = 4
identifier = encoded[8:]
... | Decode sensor data.
Returns:
dict: Sensor values |
def extern_equals(self, context_handle, val1, val2):
"""Return true if the given Handles are __eq__."""
return self._ffi.from_handle(val1[0]) == self._ffi.from_handle(val2[0]) | Return true if the given Handles are __eq__. |
def _wrap_thing(self, thing, kind):
"""Mimic praw.Submission and praw.Comment API"""
thing['created'] = self._epoch_utc_to_local(thing['created_utc'])
thing['d_'] = copy.deepcopy(thing)
ThingType = namedtuple(kind, thing.keys())
thing = ThingType(**thing)
return thing | Mimic praw.Submission and praw.Comment API |
def accept_vpc_peering_connection(name=None, conn_id=None, conn_name=None,
region=None, key=None, keyid=None, profile=None):
'''
Accept a VPC pending requested peering connection between two VPCs.
name
Name of this state
conn_id
The connection ID to ac... | Accept a VPC pending requested peering connection between two VPCs.
name
Name of this state
conn_id
The connection ID to accept. Exclusive with conn_name. String type.
conn_name
The name of the VPC peering connection to accept. Exclusive with conn_id. String type.
region
... |
def plugin(name, module=''):
"""
Returns the plugin for the given name. By default, the
base Builder instance will be returned.
:param name | <str>
"""
if module:
mod = projex.importfile(module)
if mod:
return getattr... | Returns the plugin for the given name. By default, the
base Builder instance will be returned.
:param name | <str> |
def _map(self, from_pos, to_pos, pos, base):
"""Map position between aligned sequences
Positions in this function are 0-based.
"""
pos_i = -1
while pos_i < len(self.cigar_op) and pos >= from_pos[pos_i + 1]:
pos_i += 1
if pos_i == -1 or pos_i == len(self.ciga... | Map position between aligned sequences
Positions in this function are 0-based. |
def get_if_addr6(iff):
"""
Returns the main global unicast address associated with provided
interface, in human readable form. If no global address is found,
None is returned.
"""
return next((x[0] for x in in6_getifaddr()
if x[2] == iff and x[1] == IPV6_ADDR_GLOBAL), None) | Returns the main global unicast address associated with provided
interface, in human readable form. If no global address is found,
None is returned. |
def parse(self, content):
"""
Parse the fetched feed content
Feedparser returned dict contain a 'bozo' key which can be '1' if the feed
is malformed.
Return None if the feed is malformed and 'bozo_accept'
is 'False', else return the feed content dict.
... | Parse the fetched feed content
Feedparser returned dict contain a 'bozo' key which can be '1' if the feed
is malformed.
Return None if the feed is malformed and 'bozo_accept'
is 'False', else return the feed content dict.
If the feed is malformed but ... |
def diag(A, k=0):
"""Extract or construct a diagonal polynomial array."""
if isinstance(A, Poly):
core, core_new = A.A, {}
for key in A.keys:
core_new[key] = numpy.diag(core[key], k)
return Poly(core_new, A.dim, None, A.dtype)
return numpy.diag(A, k) | Extract or construct a diagonal polynomial array. |
def get_instructions(self, cm, size, insn, idx):
"""
:param cm: a ClassManager object
:type cm: :class:`ClassManager` object
:param size: the total size of the buffer
:type size: int
:param insn: a raw buffer where are the instructions
:typ... | :param cm: a ClassManager object
:type cm: :class:`ClassManager` object
:param size: the total size of the buffer
:type size: int
:param insn: a raw buffer where are the instructions
:type insn: string
:param idx: a start address in the buffer
... |
def lookup(self, hostname):
"""
Find a hostkey entry for a given hostname or IP. If no entry is found,
``None`` is returned. Otherwise a dictionary of keytype to key is
returned. The keytype will be either ``"ssh-rsa"`` or ``"ssh-dss"``.
:param str hostname: the hostname (or ... | Find a hostkey entry for a given hostname or IP. If no entry is found,
``None`` is returned. Otherwise a dictionary of keytype to key is
returned. The keytype will be either ``"ssh-rsa"`` or ``"ssh-dss"``.
:param str hostname: the hostname (or IP) to lookup
:return: dict of `str` -> ... |
def create_index_list(self, table_name, attr_names):
"""
:param str table_name: Table name that exists attribute.
:param list attr_names:
List of attribute names to create indices.
Ignore attributes that are not existing in the table.
.. seealso:: :py:meth:`.crea... | :param str table_name: Table name that exists attribute.
:param list attr_names:
List of attribute names to create indices.
Ignore attributes that are not existing in the table.
.. seealso:: :py:meth:`.create_index` |
def create_tags(filesystemid,
tags,
keyid=None,
key=None,
profile=None,
region=None,
**kwargs):
'''
Creates or overwrites tags associated with a file system.
Each tag is a key-value pair. If a tag key specified i... | Creates or overwrites tags associated with a file system.
Each tag is a key-value pair. If a tag key specified in the request
already exists on the file system, this operation overwrites
its value with the value provided in the request.
filesystemid
(string) - ID of the file system for whose ta... |
def prefetch_urls(self, urls):
"""
预取文件列表,文档 http://developer.qiniu.com/article/fusion/api/prefetch.html
Args:
urls: 待预取的文件外链列表
Returns:
一个dict变量和一个ResponseInfo对象
参考代码 examples/cdn_manager.py
"""
req = {}
req.update({"urls": urls... | 预取文件列表,文档 http://developer.qiniu.com/article/fusion/api/prefetch.html
Args:
urls: 待预取的文件外链列表
Returns:
一个dict变量和一个ResponseInfo对象
参考代码 examples/cdn_manager.py |
def get_attrs(cls):
"""
Get all class attributes ordered by definition
"""
ignore = dir(type('dummy', (object,), {})) + ['__metaclass__']
attrs = [
item for item in inspect.getmembers(cls) if item[0] not in ignore
and not isinstance(
item[1... | Get all class attributes ordered by definition |
def backbone_bond_lengths(self):
"""Dictionary containing backbone bond lengths as lists of floats.
Returns
-------
bond_lengths : dict
Keys are `n_ca`, `ca_c`, `c_o` and `c_n`, referring to the
N-CA, CA-C, C=O and C-N bonds respectively. Values are
l... | Dictionary containing backbone bond lengths as lists of floats.
Returns
-------
bond_lengths : dict
Keys are `n_ca`, `ca_c`, `c_o` and `c_n`, referring to the
N-CA, CA-C, C=O and C-N bonds respectively. Values are
lists of floats : the bond lengths in Angstro... |
def get_unresolved_properties_by_inheritance(self, timeperiod):
"""
Fill full properties with template if needed for the
unresolved values (example: sunday ETCETC)
:return: None
"""
# Ok, I do not have prop, Maybe my templates do?
# Same story for plus
for... | Fill full properties with template if needed for the
unresolved values (example: sunday ETCETC)
:return: None |
def get_disease(self, disease_name=None, disease_id=None, definition=None, parent_ids=None, tree_numbers=None,
parent_tree_numbers=None, slim_mapping=None, synonym=None, alt_disease_id=None, limit=None,
as_df=False):
"""
Get diseases
:param bool as_df: if... | Get diseases
:param bool as_df: if set to True result returns as `pandas.DataFrame`
:param int limit: maximum number of results
:param str disease_name: disease name
:param str disease_id: disease identifier
:param str definition: definition of disease
:param str parent_... |
def spin(self):
"""Flush any registration notifications and execution results
waiting in the ZMQ queue.
"""
if self._notification_socket:
self._flush_notifications()
if self._iopub_socket:
self._flush_iopub(self._iopub_socket)
if self._mux_socket:
... | Flush any registration notifications and execution results
waiting in the ZMQ queue. |
async def iter_all(
self,
direction: msg.StreamDirection = msg.StreamDirection.Forward,
from_position: Optional[Union[msg.Position, msg._PositionSentinel]] = None,
batch_size: int = 100,
resolve_links: bool = True,
require_master: bool = False,
correlation_id: Opt... | Read through all the events in the database.
Args:
direction (optional): Controls whether to read forward or backward
through the events. Defaults to StreamDirection.Forward
from_position (optional): The position to start reading from.
Defaults to photonpump... |
def __cleanup_breakpoint(self, event, bp):
"Auxiliary method."
try:
process = event.get_process()
thread = event.get_thread()
bp.disable(process, thread) # clear the debug regs / trap flag
except Exception:
pass
bp.set_condition(True) # b... | Auxiliary method. |
def word(self):
"""Property of the DigitWord returning (or setting) the DigitWord as a list of integers (or
string representations) of DigitModel. The property is called during instantiation as the
property validates the value passed and ensures that all digits are valid."""
if self.wor... | Property of the DigitWord returning (or setting) the DigitWord as a list of integers (or
string representations) of DigitModel. The property is called during instantiation as the
property validates the value passed and ensures that all digits are valid. |
def main():
'''the main entry point for the HelpMe Command line application. Currently,
the user can request help or set config values for a particular helper.
'''
# Customize parser
parser = get_parser()
subparsers = get_subparsers(parser)
def help(return_code=0):
'''print hel... | the main entry point for the HelpMe Command line application. Currently,
the user can request help or set config values for a particular helper. |
def from_dict(dic):
"""
recursive dict to dictobj 컨버트
:param dic:
:return:
"""
return ODict((k, ODict.convert_ifdic(v)) for k, v in dic.items()) | recursive dict to dictobj 컨버트
:param dic:
:return: |
def save(self, fname):
"""Save figure to SVG file.
Parameters
----------
fname : str
Full path to file.
"""
element = _transform.SVGFigure(self.width, self.height)
element.append(self)
element.save(os.path.join(CONFIG['figure.save_path'], fnam... | Save figure to SVG file.
Parameters
----------
fname : str
Full path to file. |
def specific_gains(string):
"""Convert string with gains of individual amplification elements to dict"""
if not string:
return {}
gains = {}
for gain in string.split(','):
amp_name, value = gain.split('=')
gains[amp_name.strip()] = float(value.strip())
return gains | Convert string with gains of individual amplification elements to dict |
def tempfile_writer(target):
'''write cache data to a temporary location. when writing is
complete, rename the file to the actual location. delete
the temporary file on any error'''
tmp = target.parent / ('_%s' % target.name)
try:
with tmp.open('wb') as fd:
yield fd
except:... | write cache data to a temporary location. when writing is
complete, rename the file to the actual location. delete
the temporary file on any error |
def get_renderer(self, with_layout=True):
""" Get the default renderer """
if with_layout and self.is_lti():
return self._default_renderer_lti
elif with_layout:
return self._default_renderer
else:
return self._default_renderer_nolayout | Get the default renderer |
def get_objective_requisite_assignment_session_for_objective_bank(self, objective_bank_id, proxy, *args, **kwargs):
"""Gets the ``OsidSession`` associated with the objective sequencing service for the given objective bank.
:param objective_bank_id: the ``Id`` of the objective bank
:type objecti... | Gets the ``OsidSession`` associated with the objective sequencing service for the given objective bank.
:param objective_bank_id: the ``Id`` of the objective bank
:type objective_bank_id: ``osid.id.Id``
:param proxy: a proxy
:type proxy: ``osid.proxy.Proxy``
:return: an ``Object... |
def irreg(self, i):
""" Renvoie la forme irrégulière de morpho i. excl devient True si elle est exclusive, sinon.
:return: Forme irrégulière de morpho i, Exclusivité
:rtype: tuple.<str, bool>
"""
excl = False
for ir in self._irregs:
if i in ir.morphos():
... | Renvoie la forme irrégulière de morpho i. excl devient True si elle est exclusive, sinon.
:return: Forme irrégulière de morpho i, Exclusivité
:rtype: tuple.<str, bool> |
def create_free_space_request_content():
"""Creates an XML for requesting of free space on remote WebDAV server.
:return: the XML string of request content.
"""
root = etree.Element('propfind', xmlns='DAV:')
prop = etree.SubElement(root, 'prop')
etree.SubElement(prop, 'q... | Creates an XML for requesting of free space on remote WebDAV server.
:return: the XML string of request content. |
def reset_password_view(self, token):
""" Verify the password reset token, Prompt for new password, and set the user's password."""
# Verify token
if self.call_or_get(current_user.is_authenticated):
logout_user()
data_items = self.token_manager.verify_token(
tok... | Verify the password reset token, Prompt for new password, and set the user's password. |
def location_path(self):
"""
Return the Location-Path of the response.
:rtype : String
:return: the Location-Path option
"""
value = []
for option in self.options:
if option.number == defines.OptionRegistry.LOCATION_PATH.number:
value.... | Return the Location-Path of the response.
:rtype : String
:return: the Location-Path option |
def _get_translations_multi_paths():
"""Return the correct gettext translations that should be used for this
request.
This will never fail and return a dummy translation object if used
outside of the request or if a translation cannot be found.
"""
ctx = _request_ctx_stack.top
if ctx is Non... | Return the correct gettext translations that should be used for this
request.
This will never fail and return a dummy translation object if used
outside of the request or if a translation cannot be found. |
def map2slim(subjects, slim, **kwargs):
"""
Maps a set of subjects (e.g. genes) to a set of slims
Result is a list of unique subject-class pairs, with
a list of source assocations
"""
logging.info("SLIM SUBJECTS:{} SLIM:{} CAT:{}".format(subjects,slim,kwargs.get('category')))
searchresult =... | Maps a set of subjects (e.g. genes) to a set of slims
Result is a list of unique subject-class pairs, with
a list of source assocations |
def append_partition_by_name(self, db_name, tbl_name, part_name):
"""
Parameters:
- db_name
- tbl_name
- part_name
"""
self.send_append_partition_by_name(db_name, tbl_name, part_name)
return self.recv_append_partition_by_name() | Parameters:
- db_name
- tbl_name
- part_name |
def get_many(self, keys):
"""
Fetch a bunch of keys from the cache. For certain backends (memcached,
pgsql) this can be *much* faster when fetching multiple values.
Return a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the respo... | Fetch a bunch of keys from the cache. For certain backends (memcached,
pgsql) this can be *much* faster when fetching multiple values.
Return a dict mapping each key in keys to its value. If the given
key is missing, it will be missing from the response dict. |
def DoRarExtraction(rarArchive, targetFile, dstDir):
"""
RAR extraction with exception catching
Parameters
----------
rarArchive : RarFile object
RarFile object to extract.
targetFile : string
Target file name.
dstDir : string
Target directory.
Returns
----------
boolea... | RAR extraction with exception catching
Parameters
----------
rarArchive : RarFile object
RarFile object to extract.
targetFile : string
Target file name.
dstDir : string
Target directory.
Returns
----------
boolean
False if rar extraction failed, otherwise True. |
def get_meta_regex(schema='mona'):
""" Create a dictionary of regex for extracting the meta data for the spectra
"""
# NOTE: will just ignore cases, to avoid repetition here
meta_parse = collections.OrderedDict()
if schema == 'mona':
meta_parse['collision_energy'] = ['^collision energy(?:=|... | Create a dictionary of regex for extracting the meta data for the spectra |
def in_dir(
config_dir=os.path.expanduser('~/.tmuxp'), extensions=['.yml', '.yaml', '.json']
):
"""
Return a list of configs in ``config_dir``.
Parameters
----------
config_dir : str
directory to search
extensions : list
filetypes to check (e.g. ``['.yaml', '.json']``).
... | Return a list of configs in ``config_dir``.
Parameters
----------
config_dir : str
directory to search
extensions : list
filetypes to check (e.g. ``['.yaml', '.json']``).
Returns
-------
list |
def __realized_bbox(self, requested_bbox):
"""
The requested bbox might not be aligned to the underlying chunk grid
or even outside the bounds of the dataset. Convert the request into
a bbox representing something that can be actually downloaded.
Returns: Bbox
"""
realized_bbox = requested... | The requested bbox might not be aligned to the underlying chunk grid
or even outside the bounds of the dataset. Convert the request into
a bbox representing something that can be actually downloaded.
Returns: Bbox |
def _energy_minimize_openmm(
self,
tmp_dir,
forcefield_files=None,
forcefield_name=None,
steps=1000,
scale_bonds=1,
scale_angles=1,
scale_torsions=1,
scale_nonbonded=1):
""" Perform energy minimization us... | Perform energy minimization using OpenMM
Converts an mBuild Compound to a Parmed Structure,
applies a forcefield using Foyer, and creates an OpenMM System.
Parameters
----------
forcefield_files : str or list of str, optional, default=None
Forcefield files to load
... |
def add_search_path(*path_tokens):
"""
Adds a new search path from where modules can be loaded.
This function is provided for test applications to add locations to the search path, so any required functionality
can be loaded. It helps keeping the step implementation modules simple by placing the b... | Adds a new search path from where modules can be loaded.
This function is provided for test applications to add locations to the search path, so any required functionality
can be loaded. It helps keeping the step implementation modules simple by placing the bulk of the implementation in
separate utili... |
def uint32_gt(a: int, b: int) -> bool:
"""
Return a > b.
"""
half_mod = 0x80000000
return (((a < b) and ((b - a) > half_mod)) or
((a > b) and ((a - b) < half_mod))) | Return a > b. |
def transform(self, v3):
"""
Calculates the vector transformed by this quaternion
:param v3: Vector3 to be transformed
:returns: transformed vector
"""
if isinstance(v3, Vector3):
t = super(Quaternion, self).transform([v3.x, v3.y, v3.z])
return Vec... | Calculates the vector transformed by this quaternion
:param v3: Vector3 to be transformed
:returns: transformed vector |
def set_widths(self, estimation, widths):
"""Set estimation on widths
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
widths : 2D array, in shape [K, 1]
Estimation on widths
"""
estimation[self.map_offse... | Set estimation on widths
Parameters
----------
estimation : 1D arrary
Either prior of posterior estimation
widths : 2D array, in shape [K, 1]
Estimation on widths |
async def check_passwd(self,
identity: str,
passwd: str
) -> SessionIdentity :
""" 通过密码检查身份 """
assert identity
value, _ = await self._client.get(f"{self._prefix_identity}/{identity}")
if value is None:
logger.debug(f'Not found identity: {identity}')
... | 通过密码检查身份 |
def cleanup(self):
""" Do cleanup (stop and remove watchdogs that are no longer needed)
:return: None
"""
for task in self.__done_registry:
task.stop()
self.__done_registry.clear()
self.cleanup_event().clear() | Do cleanup (stop and remove watchdogs that are no longer needed)
:return: None |
def postadressen(self):
'''
Returns the postadressen for this Perceel.
Will only take the huisnummers with status `inGebruik` into account.
:rtype: list
'''
return [h.postadres for h in self.huisnummers if h.status.id == '3'] | Returns the postadressen for this Perceel.
Will only take the huisnummers with status `inGebruik` into account.
:rtype: list |
def active_trail_nodes(self, variables, observed=None):
"""
Returns a dictionary with the given variables as keys and all the nodes reachable
from that respective variable as values.
Parameters
----------
variables: str or array like
variables whose active tra... | Returns a dictionary with the given variables as keys and all the nodes reachable
from that respective variable as values.
Parameters
----------
variables: str or array like
variables whose active trails are to be found.
observed : List of nodes (optional)
... |
def set_feature_flag_courses(self, feature, course_id, state=None):
"""
Set feature flag.
Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets
a feature flag for the same feature in any state other than "allowed".
"""
... | Set feature flag.
Set a feature flag for a given Account, Course, or User. This call will fail if a parent account sets
a feature flag for the same feature in any state other than "allowed". |
def _cromwell_move_outputs(metadata, final_dir):
"""Move Cromwell outputs to the final upload directory.
"""
sample_key = [k for k in metadata["outputs"].keys() if k.endswith(("rgnames__sample", "rgnames__sample_out"))][0]
project_dir = utils.safe_makedir(os.path.join(final_dir, "project"))
samples ... | Move Cromwell outputs to the final upload directory. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.