code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def is_visit_primitive(obj):
'''Returns true if properly visiting the object returns only the object itself.'''
from .base import visit
if (isinstance(obj, tuple(PRIMITIVE_TYPES)) and not isinstance(obj, STR)
and not isinstance(obj, bytes)):
return True
if (isinstance(obj, CONTAINERS) an... | Returns true if properly visiting the object returns only the object itself. |
def _handle_inotify_event(self, wd):
"""Handle a series of events coming-in from inotify."""
b = os.read(wd, 1024)
if not b:
return
self.__buffer += b
while 1:
length = len(self.__buffer)
if length < _STRUCT_HEADER_LENGTH:
_... | Handle a series of events coming-in from inotify. |
def part(self):
"""Retrieve the part that holds this Property.
:returns: The :class:`Part` associated to this property
:raises APIError: if the `Part` is not found
"""
part_id = self._json_data['part']
return self._client.part(pk=part_id, category=self._json_data['categ... | Retrieve the part that holds this Property.
:returns: The :class:`Part` associated to this property
:raises APIError: if the `Part` is not found |
def _readResponse(self):
"""
Yield each row of response untill !done is received.
:throws TrapError: If one !trap is received.
:throws MultiTrapError: If > 1 !trap is received.
"""
traps = []
reply_word = None
while reply_word != '!done':
repl... | Yield each row of response untill !done is received.
:throws TrapError: If one !trap is received.
:throws MultiTrapError: If > 1 !trap is received. |
def responds(self):
"""
:returns: The frequency with which the user associated with this profile
responds to messages.
"""
contacted_text = self._contacted_xpb.\
get_text_(self.profile_tree).lower()
if 'contacted' not in contacted_text:
... | :returns: The frequency with which the user associated with this profile
responds to messages. |
def save(self, save_json=True, save_xml=True):
"""
Saves the metadata json and/or xml to a file or DB.
:param save_json: flag to save json
:type save_json: bool
:param save_xml: flag to save xml
:type save_xml: bool
"""
if self.layer_is_file_based:
... | Saves the metadata json and/or xml to a file or DB.
:param save_json: flag to save json
:type save_json: bool
:param save_xml: flag to save xml
:type save_xml: bool |
def reset_env(exclude=[]):
"""Remove environment variables, used in Jupyter notebooks"""
if os.getenv(env.INITED):
wandb_keys = [key for key in os.environ.keys() if key.startswith(
'WANDB_') and key not in exclude]
for key in wandb_keys:
del os.environ[key]
return... | Remove environment variables, used in Jupyter notebooks |
def str_replace(x, pat, repl, n=-1, flags=0, regex=False):
"""Replace occurences of a pattern/regex in a column with some other string.
:param str pattern: string or a regex pattern
:param str replace: a replacement string
:param int n: number of replacements to be made from the start. If -1 make all r... | Replace occurences of a pattern/regex in a column with some other string.
:param str pattern: string or a regex pattern
:param str replace: a replacement string
:param int n: number of replacements to be made from the start. If -1 make all replacements.
:param int flags: ??
:param bool regex: If Tr... |
def updatePassword(self,
user,
currentPassword,
newPassword):
"""Change the password of a user."""
return self.__post('/api/updatePassword',
data={
'user': user,
... | Change the password of a user. |
def _loadData(self, data):
""" Load attribute values from Plex XML response. """
self._data = data
for elem in data:
id = utils.lowerFirst(elem.attrib['id'])
if id in self._settings:
self._settings[id]._loadData(elem)
continue
s... | Load attribute values from Plex XML response. |
def ghuser_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a GitHub user.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:par... | Link to a GitHub user.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked wit... |
def OnShowFindReplace(self, event):
"""Calls the find-replace dialog"""
data = wx.FindReplaceData(wx.FR_DOWN)
dlg = wx.FindReplaceDialog(self.grid, data, "Find & Replace",
wx.FR_REPLACEDIALOG)
dlg.data = data # save a reference to data
dlg.Sho... | Calls the find-replace dialog |
def set_content(self, data):
"""
handle the content from the data
:param data: contains the data from the provider
:type data: dict
:rtype: string
"""
content = self._get_content(data, 'content')
if content == '':
content = sel... | handle the content from the data
:param data: contains the data from the provider
:type data: dict
:rtype: string |
def SetSerializersProfiler(self, serializers_profiler):
"""Sets the serializers profiler.
Args:
serializers_profiler (SerializersProfiler): serializers profiler.
"""
self._serializers_profiler = serializers_profiler
if self._storage_file:
self._storage_file.SetSerializersProfiler(serial... | Sets the serializers profiler.
Args:
serializers_profiler (SerializersProfiler): serializers profiler. |
def _filtdim(items, shape, dim, nsl):
"""Return items, shape filtered by a dimension slice."""
normshape = tuple(stop - start for start, stop in shape)
nsl_type = type(nsl)
newitems = list()
# Number of groups
num = reduce(operator.mul, normshape[:dim+1])
# Size of each group
size = len(... | Return items, shape filtered by a dimension slice. |
def put_encryption_materials(self, cache_key, encryption_materials, plaintext_length, entry_hints=None):
"""Does not add encryption materials to the cache since there is no cache to which to add them.
:param bytes cache_key: Identifier for entries in cache
:param encryption_materials: Encryptio... | Does not add encryption materials to the cache since there is no cache to which to add them.
:param bytes cache_key: Identifier for entries in cache
:param encryption_materials: Encryption materials to add to cache
:type encryption_materials: aws_encryption_sdk.materials_managers.EncryptionMate... |
def _process_genes(self, limit=None):
"""
This table provides the ZFIN gene id, the SO type of the gene,
the gene symbol, and the NCBI Gene ID.
Triples created:
<gene id> a class
<gene id> rdfs:label gene_symbol
<gene id> equivalent class <ncbi_gene_id>
:... | This table provides the ZFIN gene id, the SO type of the gene,
the gene symbol, and the NCBI Gene ID.
Triples created:
<gene id> a class
<gene id> rdfs:label gene_symbol
<gene id> equivalent class <ncbi_gene_id>
:param limit:
:return: |
def setup(app):
"""When used for spinx extension."""
global _is_sphinx
_is_sphinx = True
app.add_config_value('no_underscore_emphasis', False, 'env')
app.add_source_parser('.md', M2RParser)
app.add_directive('mdinclude', MdInclude) | When used for spinx extension. |
def add_handler( # noqa: F811
self, fd: Union[int, _Selectable], handler: Callable[..., None], events: int
) -> None:
"""Registers the given handler to receive the given events for ``fd``.
The ``fd`` argument may either be an integer file descriptor or
a file-like object with a ``f... | Registers the given handler to receive the given events for ``fd``.
The ``fd`` argument may either be an integer file descriptor or
a file-like object with a ``fileno()`` and ``close()`` method.
The ``events`` argument is a bitwise or of the constants
``IOLoop.READ``, ``IOLoop.WRITE``,... |
def get_self_host(request_data):
"""
Returns the current host.
:param request_data: The request as a dict
:type: dict
:return: The current host
:rtype: string
"""
if 'http_host' in request_data:
current_host = request_data['http_host']
... | Returns the current host.
:param request_data: The request as a dict
:type: dict
:return: The current host
:rtype: string |
def list_provincies(self, gewest=2):
'''
List all `provincies` in a `gewest`.
:param gewest: The :class:`Gewest` for which the \
`provincies` are wanted.
:param integer sort: What field to sort on.
:rtype: A :class:`list` of :class:`Provincie`.
'''
tr... | List all `provincies` in a `gewest`.
:param gewest: The :class:`Gewest` for which the \
`provincies` are wanted.
:param integer sort: What field to sort on.
:rtype: A :class:`list` of :class:`Provincie`. |
def match(self, objects: List[Any]) -> bool:
"""
Return True if the list of objects matches the expression.
"""
s = self._make_string(objects)
m = self._compiled_expression.match(s)
return m is not None | Return True if the list of objects matches the expression. |
def rmdir_p(self):
""" Like :meth:`rmdir`, but does not raise an exception if the
directory is not empty or does not exist. """
try:
self.rmdir()
except OSError:
_, e, _ = sys.exc_info()
if e.errno != errno.ENOTEMPTY and e.errno != errno.EEXIST:
... | Like :meth:`rmdir`, but does not raise an exception if the
directory is not empty or does not exist. |
def make_back_notes(self, body):
"""
The notes element in PLoS articles can be employed for posting notices
of corrections or adjustments in proof. The <notes> element has a very
diverse content model, but PLoS practice appears to be fairly
consistent: a single <sec> containing a... | The notes element in PLoS articles can be employed for posting notices
of corrections or adjustments in proof. The <notes> element has a very
diverse content model, but PLoS practice appears to be fairly
consistent: a single <sec> containing a <title> and a <p> |
def detect(self):
"""Detect and return the IP address."""
if PY3: # py23
import subprocess # noqa: S404 @UnresolvedImport pylint: disable=import-error
else:
import commands as subprocess # @UnresolvedImport pylint: disable=import-error
try:
theip = ... | Detect and return the IP address. |
def error_msg_wx(msg, parent=None):
"""
Signal an error condition -- in a GUI, popup a error dialog
"""
dialog =wx.MessageDialog(parent = parent,
message = msg,
caption = 'Matplotlib backend_wx error',
style=wx.OK | ... | Signal an error condition -- in a GUI, popup a error dialog |
def hsepd_pdf(sigma1, sigma2, xi, beta,
sim=None, obs=None, node=None, skip_nan=False):
"""Calculate the probability densities based on the
heteroskedastic skewed exponential power distribution.
For convenience, the required parameters of the probability density
function as well as the si... | Calculate the probability densities based on the
heteroskedastic skewed exponential power distribution.
For convenience, the required parameters of the probability density
function as well as the simulated and observed values are stored
in a dictonary:
>>> import numpy
>>> from hydpy import ro... |
def from_country(cls, country):
"""Retrieve the first datacenter id associated to a country."""
result = cls.list({'sort_by': 'id ASC'})
dc_countries = {}
for dc in result:
if dc['country'] not in dc_countries:
dc_countries[dc['country']] = dc['id']
r... | Retrieve the first datacenter id associated to a country. |
def get_info(self):
'''
Get information about the counter
.. note::
GetCounterInfo sometimes crashes in the wrapper code. Fewer crashes
if this is called after sampling data.
'''
if not self.info:
ci = win32pdh.GetCounterInfo(self.handle, 0)
... | Get information about the counter
.. note::
GetCounterInfo sometimes crashes in the wrapper code. Fewer crashes
if this is called after sampling data. |
def analyze(problem, Y, M=4, print_to_console=False, seed=None):
"""Performs the Fourier Amplitude Sensitivity Test (FAST) on model outputs.
Returns a dictionary with keys 'S1' and 'ST', where each entry is a list of
size D (the number of parameters) containing the indices in the same order
as the... | Performs the Fourier Amplitude Sensitivity Test (FAST) on model outputs.
Returns a dictionary with keys 'S1' and 'ST', where each entry is a list of
size D (the number of parameters) containing the indices in the same order
as the parameter file.
Parameters
----------
problem : dict
... |
def global_matches(self, text):
"""Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace or self.global_namespace that match.
"""
#print 'Completer->global_matches, txt=%r' % text # dbg
... | Compute matches when text is a simple name.
Return a list of all keywords, built-in functions and names currently
defined in self.namespace or self.global_namespace that match. |
def compute(self, runner_results, setup=False, poll=False, ignore_errors=False):
''' walk through all results and increment stats '''
for (host, value) in runner_results.get('contacted', {}).iteritems():
if not ignore_errors and (('failed' in value and bool(value['failed'])) or
... | walk through all results and increment stats |
def debug_variable_node_render(self, context):
"""
Like DebugVariableNode.render, but doesn't catch UnicodeDecodeError.
"""
try:
output = self.filter_expression.resolve(context)
output = template_localtime(output, use_tz=context.use_tz)
output = localize(output, use_l10n=context.... | Like DebugVariableNode.render, but doesn't catch UnicodeDecodeError. |
def fan_speed(self, speed: int = None) -> bool:
"""Adjust Fan Speed by Specifying 1,2,3 as argument or cycle
through speeds increasing by one"""
body = helpers.req_body(self.manager, 'devicestatus')
body['uuid'] = self.uuid
head = helpers.req_headers(self.manager)
if ... | Adjust Fan Speed by Specifying 1,2,3 as argument or cycle
through speeds increasing by one |
def get_connection(self, command_name, *keys, **options):
"Get a connection from the pool"
self._checkpid()
try:
connection = self._available_connections.pop()
except IndexError:
connection = self.make_connection()
self._in_use_connections.add(connection)
... | Get a connection from the pool |
def headerData(self, section, orientation, role):
"""Get the Header for the columns in the table
Required by view, see :qtdoc:`subclassing<qabstractitemmodel.subclassing>`
:param section: column of header to return
:type section: int
"""
if role == QtCore.Qt.DisplayRole... | Get the Header for the columns in the table
Required by view, see :qtdoc:`subclassing<qabstractitemmodel.subclassing>`
:param section: column of header to return
:type section: int |
def get_total_size_trans(self, entries):
"""
Returns the total size of a collection of entries - transferred.
NOTE: use with har file generated with chrome-har-capturer
:param entries: ``list`` of entries to calculate the total size of.
"""
size = 0
for entry in... | Returns the total size of a collection of entries - transferred.
NOTE: use with har file generated with chrome-har-capturer
:param entries: ``list`` of entries to calculate the total size of. |
def version_router(self, request, response, api_version=None, versions={}, not_found=None, **kwargs):
"""Intelligently routes a request to the correct handler based on the version being requested"""
request_version = self.determine_version(request, api_version)
if request_version:
re... | Intelligently routes a request to the correct handler based on the version being requested |
def is_valid(self, request_data, request_id=None, raise_exceptions=False):
"""
Validates the response object.
:param request_data: Request Data
:type request_data: dict
:param request_id: Optional argument. The ID of the AuthNRequest sent by this SP to the IdP
:type req... | Validates the response object.
:param request_data: Request Data
:type request_data: dict
:param request_id: Optional argument. The ID of the AuthNRequest sent by this SP to the IdP
:type request_id: string
:param raise_exceptions: Whether to return false on failure or raise a... |
def mk_set_headers(self, data, columns):
""" figure out sizes and create header fmt """
columns = tuple(columns)
lens = []
for key in columns:
value_len = max(len(str(each.get(key, ''))) for each in data)
# account for header lengths
lens.append(max(v... | figure out sizes and create header fmt |
def _root(path, root):
'''
Relocate an absolute path to a new root directory.
'''
if root:
return os.path.join(root, os.path.relpath(path, os.path.sep))
else:
return path | Relocate an absolute path to a new root directory. |
def __generate_cluster_centers(self, width):
"""!
@brief Generates centers (means in statistical term) for clusters.
@param[in] width (list): Width of generated clusters.
@return (list) Generated centers in line with normal distribution.
"""
centers = []
... | !
@brief Generates centers (means in statistical term) for clusters.
@param[in] width (list): Width of generated clusters.
@return (list) Generated centers in line with normal distribution. |
def generate_id(self):
"""Generate a fresh id"""
if self.use_repeatable_ids:
self.repeatable_id_counter += 1
return 'autobaked-{}'.format(self.repeatable_id_counter)
else:
return str(uuid4()) | Generate a fresh id |
def _find_keep_files(root, keep):
'''
Compile a list of valid keep files (and directories).
Used by _clean_dir()
'''
real_keep = set()
real_keep.add(root)
if isinstance(keep, list):
for fn_ in keep:
if not os.path.isabs(fn_):
continue
fn_ = os.... | Compile a list of valid keep files (and directories).
Used by _clean_dir() |
def value(self):
""" 成交量序列(張)
:rtype: list
"""
val = (round(i / 1000, 3) for i in self.__serial_price(1))
return list(val) | 成交量序列(張)
:rtype: list |
def either(self):
"""Transform pattern into an equivalent, with only top-level Either."""
# Currently the pattern will not be equivalent, but more "narrow",
# although good enough to reason about list arguments.
if not hasattr(self, 'children'):
return Either(Required(self))
... | Transform pattern into an equivalent, with only top-level Either. |
def f_delete_links(self, iterator_of_links, remove_from_trajectory=False):
"""Deletes several links from the hard disk.
Links can be passed as a string ``'groupA.groupB.linkA'``
or as a tuple containing the node from which the link should be removed and the
name of the link ``(groupWit... | Deletes several links from the hard disk.
Links can be passed as a string ``'groupA.groupB.linkA'``
or as a tuple containing the node from which the link should be removed and the
name of the link ``(groupWithLink, 'linkA')``. |
def check_labels(self):
""" Checks if all the labels has been declared
"""
for entry in self.labels:
self.check_is_declared(entry.name, entry.lineno, CLASS.label) | Checks if all the labels has been declared |
def supports(cls, template_file=None):
"""
:return: Whether the engine can process given template file or not.
"""
if anytemplate.compat.IS_PYTHON_3:
cls._priority = 99
return False # Always as it's not ported to python 3.
return super(Engine, cls).suppo... | :return: Whether the engine can process given template file or not. |
def threshold_monitor_hidden_threshold_monitor_sfp_policy_area_threshold_high_threshold(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
threshold_monitor_hidden = ET.SubElement(config, "threshold-monitor-hidden", xmlns="urn:brocade.com:mgmt:brocade-threshold-mon... | Auto Generated Code |
def log_normalize(a, axis=None):
"""Normalizes the input array so that the exponent of the sum is 1.
Parameters
----------
a : array
Non-normalized input data.
axis : int
Dimension along which normalization is performed.
Notes
-----
Modifies the input **inplace**.
... | Normalizes the input array so that the exponent of the sum is 1.
Parameters
----------
a : array
Non-normalized input data.
axis : int
Dimension along which normalization is performed.
Notes
-----
Modifies the input **inplace**. |
def _save_model(self, steps=0):
"""
Saves current model to checkpoint folder.
:param steps: Current number of steps in training process.
:param saver: Tensorflow saver for session.
"""
for brain_name in self.trainers.keys():
self.trainers[brain_name].save_mode... | Saves current model to checkpoint folder.
:param steps: Current number of steps in training process.
:param saver: Tensorflow saver for session. |
def compress_table(condition, tbl, axis=None, out=None, blen=None, storage=None,
create='table', **kwargs):
"""Return selected rows of a table."""
# setup
if axis is not None and axis != 0:
raise NotImplementedError('only axis 0 is supported')
if out is not None:
# ar... | Return selected rows of a table. |
def read_plain_int64(file_obj, count):
"""Read `count` 64-bit ints using the plain encoding."""
return struct.unpack("<{}q".format(count).encode("utf-8"), file_obj.read(8 * count)) | Read `count` 64-bit ints using the plain encoding. |
def _is_duplicate_record(self, rtype, name, content):
"""Check if DNS entry already exists."""
records = self._list_records(rtype, name, content)
is_duplicate = len(records) >= 1
if is_duplicate:
LOGGER.info('Duplicate record %s %s %s, NOOP', rtype, name, content)
ret... | Check if DNS entry already exists. |
def estimate_parameters(self, max_dist_kb, size_bin_kb, display_graph):
"""
estimation by least square optimization of Rippe parameters on the
experimental data
:param max_dist_kb:
:param size_bin_kb:
"""
logger.info("estimation of the parameters of the model")
... | estimation by least square optimization of Rippe parameters on the
experimental data
:param max_dist_kb:
:param size_bin_kb: |
def reverse_mapping(mapping):
"""
For every key, value pair, return the mapping for the
equivalent value, key pair
>>> reverse_mapping({'a': 'b'}) == {'b': 'a'}
True
"""
keys, values = zip(*mapping.items())
return dict(zip(values, keys)) | For every key, value pair, return the mapping for the
equivalent value, key pair
>>> reverse_mapping({'a': 'b'}) == {'b': 'a'}
True |
def set_itunes_element(self):
"""Set each of the itunes elements."""
self.set_itunes_author_name()
self.set_itunes_block()
self.set_itunes_closed_captioned()
self.set_itunes_duration()
self.set_itunes_explicit()
self.set_itune_image()
self.set_itunes_order... | Set each of the itunes elements. |
def install_plugin(self, dir, entry_script=None):
"""
Install *Vim* plugin.
:param string dir: the root directory contains *Vim* script
:param string entry_script: path to the initializing script
"""
self.runtimepath.append(dir)
if entry_script is not None:
... | Install *Vim* plugin.
:param string dir: the root directory contains *Vim* script
:param string entry_script: path to the initializing script |
def active_brokers(self):
"""Set of brokers that are not inactive or decommissioned."""
return {
broker for broker in six.itervalues(self.brokers)
if not broker.inactive and not broker.decommissioned
} | Set of brokers that are not inactive or decommissioned. |
def is_equal(self, other):
"""
If two intervals are the same
"""
other = IntervalCell.coerce(other)
return other.low == self.low and other.high == self.high | If two intervals are the same |
def read_data(self, **kwargs):
"""
get the data from the service
as the pocket service does not have any date
in its API linked to the note,
add the triggered date to the dict data
thus the service will be triggered when data will be found
... | get the data from the service
as the pocket service does not have any date
in its API linked to the note,
add the triggered date to the dict data
thus the service will be triggered when data will be found
:param kwargs: contain keyword args : trigger_id at le... |
def honeypot_exempt(view_func):
"""
Mark view as exempt from honeypot validation
"""
# borrowing liberally from django's csrf_exempt
def wrapped(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped.honeypot_exempt = True
return wraps(view_func, assigned=available_attrs(vie... | Mark view as exempt from honeypot validation |
def get_feature_state_for_scope(self, feature_id, user_scope, scope_name, scope_value):
"""GetFeatureStateForScope.
[Preview API] Get the state of the specified feature for the given named scope
:param str feature_id: Contribution id of the feature
:param str user_scope: User-Scope at wh... | GetFeatureStateForScope.
[Preview API] Get the state of the specified feature for the given named scope
:param str feature_id: Contribution id of the feature
:param str user_scope: User-Scope at which to get the value. Should be "me" for the current user or "host" for all users.
:param s... |
def is_child_of_bin(self, id_, bin_id):
"""Tests if a bin is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: bin_id (osid.id.Id): the ``Id`` of a bin
return: (boolean) - ``true`` if the ``id`` is a child of
``bin_id,`` ``false`` otherwise
r... | Tests if a bin is a direct child of another.
arg: id (osid.id.Id): an ``Id``
arg: bin_id (osid.id.Id): the ``Id`` of a bin
return: (boolean) - ``true`` if the ``id`` is a child of
``bin_id,`` ``false`` otherwise
raise: NotFound - ``bin_id`` is not found
r... |
def create_translation_field(translated_field, language):
"""
Takes the original field, a given language, a decider model and return a
Field class for model.
"""
cls_name = translated_field.__class__.__name__
if not isinstance(translated_field, tuple(SUPPORTED_FIELDS.keys())):
raise Imp... | Takes the original field, a given language, a decider model and return a
Field class for model. |
def deepish_copy(org):
"""Improved speed deep copy for dictionaries of simple python types.
Thanks to Gregg Lind:
http://writeonly.wordpress.com/2009/05/07/deepcopy-is-a-pig-for-simple-data/
"""
out = dict().fromkeys(org)
for k, v in org.items():
if isinstance(v, dict):
out[... | Improved speed deep copy for dictionaries of simple python types.
Thanks to Gregg Lind:
http://writeonly.wordpress.com/2009/05/07/deepcopy-is-a-pig-for-simple-data/ |
def _generate_token(self, length=32):
'''
_generate_token - internal function for generating randomized alphanumberic
strings of a given length
'''
return ''.join(choice(ascii_letters + digits) for x in range(length)) | _generate_token - internal function for generating randomized alphanumberic
strings of a given length |
def bulk_exports(self):
"""
:returns: Version bulk_exports of preview
:rtype: twilio.rest.preview.bulk_exports.BulkExports
"""
if self._bulk_exports is None:
self._bulk_exports = BulkExports(self)
return self._bulk_exports | :returns: Version bulk_exports of preview
:rtype: twilio.rest.preview.bulk_exports.BulkExports |
def unmajority(p, a, b, c):
"""Unmajority gate."""
p.ccx(a, b, c)
p.cx(c, a)
p.cx(a, b) | Unmajority gate. |
def to_xml(self, opts = defaultdict(lambda: None)):
'''
Generate XML from the current settings.
'''
if not self.launch_url or not self.secure_launch_url:
raise InvalidLTIConfigError('Invalid LTI configuration')
root = etree.Element('cartridge_basiclti_link', attrib ... | Generate XML from the current settings. |
def git_list_refs(repo_dir):
"""List references available in the local repo with commit ids.
This is similar to ls-remote, but shows the *local* refs.
Return format:
.. code-block:: python
{<ref1>: <commit_hash1>,
<ref2>: <commit_hash2>,
...,
<refN>: <commit_hashN>... | List references available in the local repo with commit ids.
This is similar to ls-remote, but shows the *local* refs.
Return format:
.. code-block:: python
{<ref1>: <commit_hash1>,
<ref2>: <commit_hash2>,
...,
<refN>: <commit_hashN>,
} |
def iter(self, count=0, func=sum):
'''Iterator of infinite dice rolls.
:param count: [0] Return list of ``count`` sums
:param func: [sum] Apply func to list of individual die rolls func([])
'''
while True:
yield self.roll(count, func) | Iterator of infinite dice rolls.
:param count: [0] Return list of ``count`` sums
:param func: [sum] Apply func to list of individual die rolls func([]) |
def add(self, field, data_type=None, nullable=True, metadata=None):
"""
Construct a StructType by adding new elements to it to define the schema. The method accepts
either:
a) A single parameter which is a StructField object.
b) Between 2 and 4 parameters as (name, data_... | Construct a StructType by adding new elements to it to define the schema. The method accepts
either:
a) A single parameter which is a StructField object.
b) Between 2 and 4 parameters as (name, data_type, nullable (optional),
metadata(optional). The data_type parameter ma... |
def _scheduleUpgrade(self,
ev_data: UpgradeLogData,
failTimeout) -> None:
"""
Schedules node upgrade to a newer version
:param ev_data: upgrade event parameters
"""
logger.info(
"{}'s upgrader processing upgrade for v... | Schedules node upgrade to a newer version
:param ev_data: upgrade event parameters |
def clean_gff(gff, cleaned, add_chr=False, chroms_to_ignore=None,
featuretypes_to_ignore=None):
"""
Cleans a GFF file by removing features on unwanted chromosomes and of
unwanted featuretypes. Optionally adds "chr" to chrom names.
"""
logger.info("Cleaning GFF")
chroms_to_ignore =... | Cleans a GFF file by removing features on unwanted chromosomes and of
unwanted featuretypes. Optionally adds "chr" to chrom names. |
def get_params(url, ignore_empty=False):
"""
Static method that parses a given `url` and retrieves `url`'s parameters. Could also ignore empty value parameters.
Handles parameters-only urls as `q=banana&peel=false`.
:param str url: url to parse
:param bool ignore_empty: ignore e... | Static method that parses a given `url` and retrieves `url`'s parameters. Could also ignore empty value parameters.
Handles parameters-only urls as `q=banana&peel=false`.
:param str url: url to parse
:param bool ignore_empty: ignore empty value parameter or not
:return: dictionary of pa... |
def add_term_facet(self, *args, **kwargs):
"""Add a term factory facet"""
self.facets.append(TermFacet(*args, **kwargs)) | Add a term factory facet |
def wait_for_servers(session, servers):
"""Wait for the servers to be ready.
Note(msimonin): we don't garantee the SSH connection to be ready.
"""
nclient = nova.Client(NOVA_VERSION, session=session,
region_name=os.environ['OS_REGION_NAME'])
while True:
deployed = ... | Wait for the servers to be ready.
Note(msimonin): we don't garantee the SSH connection to be ready. |
def to_html(self):
"""Render a Paragraph MessageElement as html
:returns: The html representation of the Paragraph MessageElement
"""
if self.text is None:
return
else:
return '<p%s>%s%s</p>' % (
self.html_attributes(), self.html_icon(), ... | Render a Paragraph MessageElement as html
:returns: The html representation of the Paragraph MessageElement |
def _migrate_db_pre010(self, dbname, newslab):
'''
Check for any pre-010 entries in 'dbname' in my slab and migrate those to the new slab.
Once complete, drop the database from me with the name 'dbname'
Returns (bool): True if a migration occurred, else False
'''
doneke... | Check for any pre-010 entries in 'dbname' in my slab and migrate those to the new slab.
Once complete, drop the database from me with the name 'dbname'
Returns (bool): True if a migration occurred, else False |
def get_version_info():
"""
Return astropy and photutils versions.
Returns
-------
result : str
The astropy and photutils versions.
"""
from astropy import __version__
astropy_version = __version__
from photutils import __version__
photutils_version = __version__
... | Return astropy and photutils versions.
Returns
-------
result : str
The astropy and photutils versions. |
def datetime_to_ns(then):
"""Transform a :any:`datetime.datetime` into a NationStates-style
string.
For example "6 days ago", "105 minutes ago", etc.
"""
if then == datetime(1970, 1, 1, 0, 0):
return 'Antiquity'
now = datetime.utcnow()
delta = now - then
seconds = delta.total_s... | Transform a :any:`datetime.datetime` into a NationStates-style
string.
For example "6 days ago", "105 minutes ago", etc. |
def _apply_dvs_config(config_spec, config_dict):
'''
Applies the values of the config dict dictionary to a config spec
(vim.VMwareDVSConfigSpec)
'''
if config_dict.get('name'):
config_spec.name = config_dict['name']
if config_dict.get('contact_email') or config_dict.get('contact_name'):
... | Applies the values of the config dict dictionary to a config spec
(vim.VMwareDVSConfigSpec) |
def validate_enum_attribute(fully_qualified_name: str, spec: Dict[str, Any], attribute: str,
candidates: Set[Union[str, int, float]]) -> Optional[InvalidValueError]:
""" Validates to ensure that the value of an attribute lies within an allowed set of candidates """
if attribute not ... | Validates to ensure that the value of an attribute lies within an allowed set of candidates |
def _qmed_from_pot_records(self):
"""
Return QMED estimate based on peaks-over-threshold (POT) records.
Methodology source: FEH, Vol. 3, pp. 77-78
:return: QMED in m³/s
:rtype: float
"""
pot_dataset = self.catchment.pot_dataset
if not pot_dataset:
... | Return QMED estimate based on peaks-over-threshold (POT) records.
Methodology source: FEH, Vol. 3, pp. 77-78
:return: QMED in m³/s
:rtype: float |
def _vowelinstem(self, stem):
"""vowelinstem(stem) is TRUE <=> stem contains a vowel"""
for i in range(len(stem)):
if not self._cons(stem, i):
return True
return False | vowelinstem(stem) is TRUE <=> stem contains a vowel |
def _write_adminfile(kwargs):
'''
Create a temporary adminfile based on the keyword arguments passed to
pkg.install.
'''
# Set the adminfile default variables
email = kwargs.get('email', '')
instance = kwargs.get('instance', 'quit')
partial = kwargs.get('partial', 'nocheck')
runlevel... | Create a temporary adminfile based on the keyword arguments passed to
pkg.install. |
def job_get_log(object_id, input_params={}, always_retry=False, **kwargs):
"""
Invokes the /job-xxxx/getLog API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FgetLog
"""
return DXHTTPRequest('/%s/getLog' % object_i... | Invokes the /job-xxxx/getLog API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Applets-and-Entry-Points#API-method%3A-%2Fjob-xxxx%2FgetLog |
def visit_Dict(self, node: AST, dfltChaining: bool = True) -> str:
"""Return dict representation of `node`s elements."""
items = (': '.join((self.visit(key), self.visit(value)))
for key, value in zip(node.keys, node.values))
return f"{{{', '.join(items)}}}" | Return dict representation of `node`s elements. |
def locality_preserving_projections(self, coordinates, num_dims=None):
'''Locality Preserving Projections (LPP, linearized Laplacian Eigenmaps).'''
X = np.atleast_2d(coordinates) # n x d
L = self.laplacian(normed=True) # n x n
u,s,_ = np.linalg.svd(X.T.dot(X))
Fplus = np.linalg.pinv(u * np.sqrt(s)... | Locality Preserving Projections (LPP, linearized Laplacian Eigenmaps). |
def _init_go2ntpresent(go_ntsets, go_all, gosubdag):
"""Mark all GO IDs with an X if present in the user GO list."""
go2ntpresent = {}
ntobj = namedtuple('NtPresent', " ".join(nt.hdr for nt in go_ntsets))
# Get present marks for GO sources
for goid_all in go_all:
pres... | Mark all GO IDs with an X if present in the user GO list. |
def _create_data_files_directory(symlink=False):
"""Install data_files in the /etc directory."""
current_directory = os.path.abspath(os.path.dirname(__file__))
etc_kytos = os.path.join(BASE_ENV, ETC_KYTOS)
if not os.path.exists(etc_kytos):
os.makedirs(etc_kytos)
sr... | Install data_files in the /etc directory. |
def get_jids():
'''
Return a list of all job ids
'''
with _get_serv(ret=None, commit=True) as cur:
sql = '''SELECT jid, load
FROM jids'''
cur.execute(sql)
data = cur.fetchall()
ret = {}
for jid, load in data:
ret[jid] = salt.utils.jid... | Return a list of all job ids |
def AddBlob(self, blob_hash, length, chunk_number):
"""Add another blob to this image using its hash."""
if len(blob_hash.AsBytes()) != self._HASH_SIZE:
raise ValueError("Hash '%s' doesn't have correct length (%d)." %
(blob_hash, self._HASH_SIZE))
# If we're adding a new blob, ... | Add another blob to this image using its hash. |
def change_tz(cal, new_timezone, default, utc_only=False, utc_tz=icalendar.utc):
"""
Change the timezone of the specified component.
Args:
cal (Component): the component to change
new_timezone (tzinfo): the timezone to change to
default (tzinfo): a timezone to assume if the dtstart ... | Change the timezone of the specified component.
Args:
cal (Component): the component to change
new_timezone (tzinfo): the timezone to change to
default (tzinfo): a timezone to assume if the dtstart or dtend in cal
doesn't have an existing timezone
utc_only (bool): only ... |
def determine_band_channel(kal_out):
"""Return band, channel, target frequency from kal output."""
band = ""
channel = ""
tgt_freq = ""
while band == "":
for line in kal_out.splitlines():
if "Using " in line and " channel " in line:
band = str(line.split()[1])
... | Return band, channel, target frequency from kal output. |
def rolling_window(array, axis, window, center, fill_value):
"""
Make an ndarray with a rolling window of axis-th dimension.
The rolling dimension will be placed at the last dimension.
"""
if isinstance(array, dask_array_type):
return dask_array_ops.rolling_window(
array, axis, w... | Make an ndarray with a rolling window of axis-th dimension.
The rolling dimension will be placed at the last dimension. |
def _ensure_programmer_executable():
""" Find the lpc21isp executable and ensure it is executable
"""
# Find the lpc21isp executable, explicitly allowing the case where it
# is not executable (since that’s exactly what we’re trying to fix)
updater_executable = shutil.which('lpc21isp',
... | Find the lpc21isp executable and ensure it is executable |
def tarball_files(work_dir, tar_name, uuid=None, files=None):
"""
Tars a group of files together into a tarball
work_dir: str Current Working Directory
tar_name: str Name of tarball
uuid: str UUID to stamp files with
files: str(s) List of filenames to place in the ta... | Tars a group of files together into a tarball
work_dir: str Current Working Directory
tar_name: str Name of tarball
uuid: str UUID to stamp files with
files: str(s) List of filenames to place in the tarball from working directory |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.