code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _conv(self, name, x, filter_size, in_filters, out_filters, strides):
"""Convolution."""
with tf.variable_scope(name):
n = filter_size * filter_size * out_filters
kernel = tf.get_variable(
"DW", [filter_size, filter_size, in_filters, out_filters],
... | Convolution. |
def get_comment_ancestors(comID, depth=None):
"""
Returns the list of ancestors of the given comment, ordered from
oldest to newest ("top-down": direct parent of comID is at last position),
up to given depth
:param comID: the ID of the comment for which we want to retrieve ancestors
:type comID... | Returns the list of ancestors of the given comment, ordered from
oldest to newest ("top-down": direct parent of comID is at last position),
up to given depth
:param comID: the ID of the comment for which we want to retrieve ancestors
:type comID: int
:param depth: the maximum of levels up from the ... |
def run(self):
'''Start scheduler loop'''
logger.info("scheduler starting...")
while not self._quit:
try:
time.sleep(self.LOOP_INTERVAL)
self.run_once()
self._exceptions = 0
except KeyboardInterrupt:
break
... | Start scheduler loop |
async def sentinel_monitor(self, name, ip, port, quorum):
"Add a new master to Sentinel to be monitored"
return await self.execute_command('SENTINEL MONITOR', name, ip, port, quorum) | Add a new master to Sentinel to be monitored |
def get_xy_from_linecol(self, line, col, offsets, factors):
"""Get the intermediate coordinates from line & col.
Intermediate coordinates are actually the instruments scanning angles.
"""
loff, coff = offsets
lfac, cfac = factors
x__ = (col - coff) / cfac * 2**16
... | Get the intermediate coordinates from line & col.
Intermediate coordinates are actually the instruments scanning angles. |
def del_repo(repo, **kwargs):
'''
Delete a repo from the sources.list / sources.list.d
If the .list file is in the sources.list.d directory
and the file that the repo exists in does not contain any other
repo configuration, the file itself will be deleted.
The repo passed in must be a fully fo... | Delete a repo from the sources.list / sources.list.d
If the .list file is in the sources.list.d directory
and the file that the repo exists in does not contain any other
repo configuration, the file itself will be deleted.
The repo passed in must be a fully formed repository definition
string.
... |
def get_volumes_for_sdc(self, sdcObj):
"""
:param sdcObj: SDC object
:return: list of Volumes attached to SDC
:rtyoe: ScaleIO Volume object
"""
self.conn.connection._check_login()
all_volumes = []
response = self.conn.connection._do_get("{}/{}{}/{}".format... | :param sdcObj: SDC object
:return: list of Volumes attached to SDC
:rtyoe: ScaleIO Volume object |
def _client_tagged(self, tags):
'''ensure that the client name is included in a list of tags. This is
important for matching builders to the correct client. We exit
on fail.
Parameters
==========
tags: a list of tags to look for client name in
... | ensure that the client name is included in a list of tags. This is
important for matching builders to the correct client. We exit
on fail.
Parameters
==========
tags: a list of tags to look for client name in |
def _get_phantom_root_catalog(self, cat_name, cat_class):
"""Get's the catalog id corresponding to the root of all implementation catalogs."""
catalog_map = make_catalog_map(cat_name, identifier=PHANTOM_ROOT_IDENTIFIER)
return cat_class(osid_object_map=catalog_map, runtime=self._runtime, proxy=s... | Get's the catalog id corresponding to the root of all implementation catalogs. |
def _validate_namespace(self, namespace):
"""Validates a namespace, raising a ResponseFailed error if invalid.
Args:
state_root (str): The state_root to validate
Raises:
ResponseFailed: The state_root was invalid, and a status of
INVALID_ROOT will be sen... | Validates a namespace, raising a ResponseFailed error if invalid.
Args:
state_root (str): The state_root to validate
Raises:
ResponseFailed: The state_root was invalid, and a status of
INVALID_ROOT will be sent with the response. |
def set_env(envName, envValue):
"""
่ฎพ็ฝฎ็ฏๅขๅ้
:params envName: envๅๅญ
:params envValue: ๅผ
"""
os.environ[envName] = os.environ[envName] + ':' + envValue | ่ฎพ็ฝฎ็ฏๅขๅ้
:params envName: envๅๅญ
:params envValue: ๅผ |
def get_neg_one_task_agent(generators, market, nOffer, maxSteps):
""" Returns a task-agent tuple whose action is always minus one.
"""
env = pyreto.discrete.MarketEnvironment(generators, market, nOffer)
task = pyreto.discrete.ProfitTask(env, maxSteps=maxSteps)
agent = pyreto.util.NegOneAgent(env.out... | Returns a task-agent tuple whose action is always minus one. |
def upload_job_chunk_list(self, upload_job_id, **kwargs): # noqa: E501
"""List all metadata for uploaded chunks # noqa: E501
List all metadata for uploaded chunks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass a... | List all metadata for uploaded chunks # noqa: E501
List all metadata for uploaded chunks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.upload_job_chunk_list(upload_job_id, asyn... |
def get_session():
"""Build the session object."""
# NOTE(msimonin): We provide only a basic support which focus
# Chameleon cloud and its rc files
if os.environ.get("OS_IDENTITY_API_VERSION") == "3":
logging.info("Creating a v3 Keystone Session")
auth = v3.Password(
auth_url... | Build the session object. |
def class_statistics(TP, TN, FP, FN, classes, table):
"""
Return all class statistics.
:param TP: true positive dict for all classes
:type TP : dict
:param TN: true negative dict for all classes
:type TN : dict
:param FP: false positive dict for all classes
:type FP : dict
:param FN... | Return all class statistics.
:param TP: true positive dict for all classes
:type TP : dict
:param TN: true negative dict for all classes
:type TN : dict
:param FP: false positive dict for all classes
:type FP : dict
:param FN: false negative dict for all classes
:type FN : dict
:par... |
def set_settings_file_path(self, settings_file_path):
"""Currently, it is an error to change this property on any machine.
Later this will allow setting a new path for the settings file, with
automatic relocation of all files (including snapshots and disk images)
which are inside the bas... | Currently, it is an error to change this property on any machine.
Later this will allow setting a new path for the settings file, with
automatic relocation of all files (including snapshots and disk images)
which are inside the base directory. This operation is only allowed
when there ar... |
def define_options(default_conf):
"""
Define the options from default.conf dynamically
"""
default = {}
with open(default_conf, 'rb') as f:
exec_in(native_str(f.read()), {}, default)
for name, value in default.iteritems():
# if the option is already defined by tornado
# ... | Define the options from default.conf dynamically |
def get_station_by_name(self,
station_name,
num_minutes=None,
direction=None,
destination=None,
stops_at=None):
"""Returns all trains due to serve station `station_name`.
... | Returns all trains due to serve station `station_name`.
@param station_code
@param num_minutes. Only trains within this time. Between 5 and 90
@param direction Filter by direction. Northbound or Southbound
@param destination Filter by name of the destination stations
@param stops... |
def set(self):
"""set the event to triggered
after calling this method, all greenlets waiting on the event will be
rescheduled, and calling :meth:`wait` will not block until
:meth:`clear` has been called
"""
self._is_set = True
scheduler.state.awoken_from_events.... | set the event to triggered
after calling this method, all greenlets waiting on the event will be
rescheduled, and calling :meth:`wait` will not block until
:meth:`clear` has been called |
def get_experiments(self, workspace_id):
"""Runs HTTP GET request to retrieve the list of experiments."""
api_path = self.EXPERIMENTS_URI_FMT.format(workspace_id)
return self._send_get_req(api_path) | Runs HTTP GET request to retrieve the list of experiments. |
def remove_interface_router(self, router, body=None):
"""Removes an internal network interface from the specified router."""
return self.put((self.router_path % router) +
"/remove_router_interface", body=body) | Removes an internal network interface from the specified router. |
def index(self, block_start, block_end):
"""
Entry point for indexing:
* scan the blockchain from start_block to end_block and make sure we're up-to-date
* process any newly-arrived zone files and re-index the affected subdomains
"""
log.debug("BEGIN Processing zonefiles ... | Entry point for indexing:
* scan the blockchain from start_block to end_block and make sure we're up-to-date
* process any newly-arrived zone files and re-index the affected subdomains |
def cal_dist_between_2_coord_frame_aligned_boxes(box1_pos, box1_size, box2_pos, box2_size):
""" Calculate Euclidean distance between two boxes those edges are parallel to the coordinate axis
The function decides condition based which corner to corner or edge to edge distance needs to be calculated.
:param... | Calculate Euclidean distance between two boxes those edges are parallel to the coordinate axis
The function decides condition based which corner to corner or edge to edge distance needs to be calculated.
:param tuple box1_pos: x and y position of box 1
:param tuple box1_size: x and y size of box 1
:pa... |
def p_int(self, tree):
''' V ::= INTEGER '''
tree.value = int(tree.attr)
tree.svalue = tree.attr | V ::= INTEGER |
def get_object(self, id, **args):
"""Fetches the given object from the graph."""
return self.request("{0}/{1}".format(self.version, id), args) | Fetches the given object from the graph. |
def update_firewall_rule(self, firewall_rule, protocol=None, action=None,
name=None, description=None, ip_version=None,
source_ip_address=None, destination_ip_address=None, source_port=None,
destination_port=None, shared=None, enable... | Update a firewall rule |
def multi_label_morphology(image, operation, radius, dilation_mask=None, label_list=None, force=False):
"""
Morphology on multi label images.
Wraps calls to iMath binary morphology. Additionally, dilation and closing operations preserve
pre-existing labels. The choices of operation are:
Di... | Morphology on multi label images.
Wraps calls to iMath binary morphology. Additionally, dilation and closing operations preserve
pre-existing labels. The choices of operation are:
Dilation: dilates all labels sequentially, but does not overwrite original labels.
This reduces dependence on the ... |
def _compute_missing_deps(self, src_tgt, actual_deps):
"""Computes deps that are used by the compiler but not specified in a BUILD file.
These deps are bugs waiting to happen: the code may happen to compile because the dep was
brought in some other way (e.g., by some other root target), but that is obvious... | Computes deps that are used by the compiler but not specified in a BUILD file.
These deps are bugs waiting to happen: the code may happen to compile because the dep was
brought in some other way (e.g., by some other root target), but that is obviously fragile.
Note that in practice we're OK with reliance ... |
def untranslated_policy(self, default):
'''Get the policy for untranslated content'''
return self.generator.settings.get(self.info.get('policy', None),
default) | Get the policy for untranslated content |
def servicegroup_server_enable(sg_name, s_name, s_port, **connection_args):
'''
Enable a server:port member of a servicegroup
CLI Example:
.. code-block:: bash
salt '*' netscaler.servicegroup_server_enable 'serviceGroupName' 'serverName' 'serverPort'
'''
ret = True
server = _servi... | Enable a server:port member of a servicegroup
CLI Example:
.. code-block:: bash
salt '*' netscaler.servicegroup_server_enable 'serviceGroupName' 'serverName' 'serverPort' |
def as_fstring(text):
"""expansion with python f-string, usually ok, but with the case
of ' inside expressions, adding repr will add backslash to it
and cause trouble.
"""
for quote in ('"""', "'''"):
# although script-format always ends with \n, direct use of this function might
... | expansion with python f-string, usually ok, but with the case
of ' inside expressions, adding repr will add backslash to it
and cause trouble. |
def getCitiesDrawingXML(points):
''' Build an XML string that contains a square for each city'''
xml = ""
for p in points:
x = str(p.x)
z = str(p.y)
xml += '<DrawBlock x="' + x + '" y="7" z="' + z + '" type="beacon"/>'
xml += '<DrawItem x="' + x + '" y="10" z="' + z + '" type... | Build an XML string that contains a square for each city |
def get_legal_params(self, method):
'''Given a API name, list all legal parameters using boto3 service model.'''
if method not in self.client.meta.method_to_api_mapping:
# Injected methods. Ignore.
return []
api = self.client.meta.method_to_api_mapping[method]
shape = self.client.meta.servic... | Given a API name, list all legal parameters using boto3 service model. |
def wait(self):
""" Block for user input """
text = input(
"Press return for next %d result%s (or type 'all'):"
% (self.pagesize, plural(self.pagesize))
)
if text:
if text.lower() in ["a", "all"]:
self._pagesize = 0
elif tex... | Block for user input |
def get_name_cost( db, name ):
"""
Get the cost of a name, given the fully-qualified name.
Do so by finding the namespace it belongs to (even if the namespace is being imported).
Return {'amount': ..., 'units': ...} on success
Return None if the namespace has not been declared
"""
lastblock... | Get the cost of a name, given the fully-qualified name.
Do so by finding the namespace it belongs to (even if the namespace is being imported).
Return {'amount': ..., 'units': ...} on success
Return None if the namespace has not been declared |
def create_role_policy(role_name, policy_name, policy, region=None, key=None,
keyid=None, profile=None):
'''
Create or modify a role policy.
CLI Example:
.. code-block:: bash
salt myminion boto_iam.create_role_policy myirole mypolicy '{"MyPolicy": "Statement": [{"Action... | Create or modify a role policy.
CLI Example:
.. code-block:: bash
salt myminion boto_iam.create_role_policy myirole mypolicy '{"MyPolicy": "Statement": [{"Action": ["sqs:*"], "Effect": "Allow", "Resource": ["arn:aws:sqs:*:*:*"], "Sid": "MyPolicySqs1"}]}' |
def paintEvent(self, event):
""" Reimplemented to paint the background panel.
"""
painter = QtGui.QStylePainter(self)
option = QtGui.QStyleOptionFrame()
option.initFrom(self)
painter.drawPrimitive(QtGui.QStyle.PE_PanelTipLabel, option)
painter.end()
super... | Reimplemented to paint the background panel. |
def fixed_crop(src, x0, y0, w, h, size=None, interp=2):
"""Crop src at fixed location, and (optionally) resize it to size.
Parameters
----------
src : NDArray
Input image
x0 : int
Left boundary of the cropping area
y0 : int
Top boundary of the cropping area
w : int
... | Crop src at fixed location, and (optionally) resize it to size.
Parameters
----------
src : NDArray
Input image
x0 : int
Left boundary of the cropping area
y0 : int
Top boundary of the cropping area
w : int
Width of the cropping area
h : int
Height of... |
def autocomplete():
"""Command and option completion for the main option parser (and options)
and its subcommands (and options).
Enable by sourcing one of the completion shell scripts (bash or zsh).
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in ... | Command and option completion for the main option parser (and options)
and its subcommands (and options).
Enable by sourcing one of the completion shell scripts (bash or zsh). |
def write(self, output):
"""Passthrough for pyserial Serial.write().
Args:
output (str): Block to write to port
"""
view_str = output.encode('ascii', 'ignore')
if (len(view_str) > 0):
self.m_ser.write(view_str)
self.m_ser.flush()
s... | Passthrough for pyserial Serial.write().
Args:
output (str): Block to write to port |
def _parse_urls(self, match):
'''Parse URLs.'''
mat = match.group(0)
# Fix a bug in the regex concerning www...com and www.-foo.com domains
# TODO fix this in the regex instead of working around it here
domain = match.group(5)
if domain[0] in '.-':
return ma... | Parse URLs. |
def via_upnp():
""" Use SSDP as described by the Philips guide """
ssdp_list = ssdp_discover("ssdp:all", timeout=5)
#import pickle
#with open("ssdp.pickle", "wb") as f:
#pickle.dump(ssdp_list,f)
bridges_from_ssdp = [u for u in ssdp_list if 'IpBridge' in u.server]
logger.info('SSDP return... | Use SSDP as described by the Philips guide |
def create_model(cls, data: dict, fields=None):
'''
Creates model instance from data (dict).
'''
if fields is None:
fields = set(cls._fields.keys())
else:
if not isinstance(fields, set):
fields = set(fields)
new_keys = set(data.keys... | Creates model instance from data (dict). |
def get_primitives_paths():
"""Get the list of folders where the primitives will be looked for.
This list will include the value of any `entry_point` named `jsons_path` published under
the name `mlprimitives`.
An example of such an entry point would be::
entry_points = {
'mlprimit... | Get the list of folders where the primitives will be looked for.
This list will include the value of any `entry_point` named `jsons_path` published under
the name `mlprimitives`.
An example of such an entry point would be::
entry_points = {
'mlprimitives': [
'jsons_pat... |
def build_beta_part(ruleset, alpha_terminals):
"""
Given a set of already adapted rules, and a dictionary of
patterns and alpha_nodes, wire up the beta part of the RETE
network.
"""
for rule in ruleset:
if isinstance(rule[0], OR):
for subrule ... | Given a set of already adapted rules, and a dictionary of
patterns and alpha_nodes, wire up the beta part of the RETE
network. |
def vector_normalize(vector_in, decimals=18):
""" Generates a unit vector from the input.
:param vector_in: vector to be normalized
:type vector_in: list, tuple
:param decimals: number of significands
:type decimals: int
:return: the normalized vector (i.e. the unit vector)
:rtype: list
... | Generates a unit vector from the input.
:param vector_in: vector to be normalized
:type vector_in: list, tuple
:param decimals: number of significands
:type decimals: int
:return: the normalized vector (i.e. the unit vector)
:rtype: list |
def submit(self, pixels, queue=None, debug=False, configfile=None):
"""
Submit the likelihood job for the given pixel(s).
"""
# For backwards compatibility
batch = self.config['scan'].get('batch',self.config['batch'])
queue = batch['cluster'] if queue is None else queue
... | Submit the likelihood job for the given pixel(s). |
def includeme(config):
""" The callable makes it possible to include rpcinterface
in a Pyramid application.
Calling ``config.include(twitcher.rpcinterface)`` will result in this
callable being called.
Arguments:
* ``config``: the ``pyramid.config.Configurator`` object.
"""
settings = ... | The callable makes it possible to include rpcinterface
in a Pyramid application.
Calling ``config.include(twitcher.rpcinterface)`` will result in this
callable being called.
Arguments:
* ``config``: the ``pyramid.config.Configurator`` object. |
def assignmentComplete():
"""ASSIGNMENT COMPLETE Section 9.1.3"""
a = TpPd(pd=0x6)
b = MessageType(mesType=0x29) # 00101001
c = RrCause()
packet = a / b / c
return packet | ASSIGNMENT COMPLETE Section 9.1.3 |
def upload_jterator_project_files(self, directory):
'''Uploads the *jterator* project description from files on disk in
YAML format. It expects a ``pipeline.yaml`` file in `directory` and
optionally ``*handles.yaml`` files in a ``handles`` subfolder of
`directory`.
Parameters
... | Uploads the *jterator* project description from files on disk in
YAML format. It expects a ``pipeline.yaml`` file in `directory` and
optionally ``*handles.yaml`` files in a ``handles`` subfolder of
`directory`.
Parameters
----------
directory: str
path to the... |
def get_model(name):
"""
Convert a model's verbose name to the model class. This allows us to
use the models verbose name in steps.
"""
model = MODELS.get(name.lower(), None)
assert model, "Could not locate model by name '%s'" % name
return model | Convert a model's verbose name to the model class. This allows us to
use the models verbose name in steps. |
def crop(im, r, c, sz):
'''
crop image into a square of size sz,
'''
return im[r:r+sz, c:c+sz] | crop image into a square of size sz, |
def _check_pip_installed():
"""
Invoke `pip --version` and make sure it doesn't error.
Use check_output to capture stdout and stderr
Invokes pip by the same manner that we plan to in _call_pip()
Don't bother trying to reuse _call_pip to do this... Finnicky and not worth
the effort.
"""
... | Invoke `pip --version` and make sure it doesn't error.
Use check_output to capture stdout and stderr
Invokes pip by the same manner that we plan to in _call_pip()
Don't bother trying to reuse _call_pip to do this... Finnicky and not worth
the effort. |
def _f_cash_root(x, counts, bkg, model):
"""
Function to find root of. Described in Appendix A, Stewart (2009).
Parameters
----------
x : float
Model amplitude.
counts : `~numpy.ndarray`
Count map slice, where model is defined.
bkg : `~numpy.ndarray`
Background map s... | Function to find root of. Described in Appendix A, Stewart (2009).
Parameters
----------
x : float
Model amplitude.
counts : `~numpy.ndarray`
Count map slice, where model is defined.
bkg : `~numpy.ndarray`
Background map slice, where model is defined.
model : `~numpy.nda... |
def get_parser():
"""Get parser for mpu."""
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('--version',
action='version'... | Get parser for mpu. |
def _freeze(self, final_text, err=False):
"""Stop spinner, compose last frame and 'freeze' it."""
if not final_text:
final_text = ""
target = self.stderr if err else self.stdout
if target.closed:
target = sys.stderr if err else sys.stdout
text = to_text(fi... | Stop spinner, compose last frame and 'freeze' it. |
def deserialize(cls, serializer, wf_spec, s_state, **kwargs):
"""
Deserializes the trigger using the provided serializer.
"""
return serializer.deserialize_trigger(wf_spec,
s_state,
**kwargs) | Deserializes the trigger using the provided serializer. |
def load_classes(cls, fail_silently=True):
"""Load all the classes for a plugin.
Produces a sequence containing the identifiers and their corresponding
classes for all of the available instances of this plugin.
fail_silently causes the code to simply log warnings if a
plugin ca... | Load all the classes for a plugin.
Produces a sequence containing the identifiers and their corresponding
classes for all of the available instances of this plugin.
fail_silently causes the code to simply log warnings if a
plugin cannot import. The goal is to be able to use part of
... |
def replace(self, **kwargs):
"""
Return: a new :class:`AsideUsageKeyV2` with ``KEY_FIELDS`` specified in ``kwargs`` replaced
with their corresponding values. Deprecation value is also preserved.
"""
if 'usage_key' in kwargs:
for attr in self.USAGE_KEY_ATTRS:
... | Return: a new :class:`AsideUsageKeyV2` with ``KEY_FIELDS`` specified in ``kwargs`` replaced
with their corresponding values. Deprecation value is also preserved. |
def by_chat_command(prefix=('/',), separator=' ', pass_args=False):
"""
:param prefix:
a list of special characters expected to indicate the head of a command.
:param separator:
a command may be followed by arguments separated by ``separator``.
:type pass_args: bool
:param pass_arg... | :param prefix:
a list of special characters expected to indicate the head of a command.
:param separator:
a command may be followed by arguments separated by ``separator``.
:type pass_args: bool
:param pass_args:
If ``True``, arguments following a command will be passed to the hand... |
def transform(self, *axes, verbose=True):
"""Transform the data.
Parameters
----------
axes : strings
Expressions for the new set of axes.
verbose : boolean (optional)
Toggle talkback. Default is True
See Also
--------
set_constan... | Transform the data.
Parameters
----------
axes : strings
Expressions for the new set of axes.
verbose : boolean (optional)
Toggle talkback. Default is True
See Also
--------
set_constants
Similar method except for constants |
def progress(self, *restrictions, display=True):
"""
report progress of populating the table
:return: remaining, total -- tuples to be populated
"""
todo = self._jobs_to_do(restrictions)
total = len(todo)
remaining = len(todo - self.target)
if display:
... | report progress of populating the table
:return: remaining, total -- tuples to be populated |
def parse_epcr(self):
"""
Parse the ePCR output file. Populate dictionary of resutls. For alleles, find the best result based on the
number of mismatches before populating dictionary
"""
# Use the metadata object from the vtyper_object
for sample in self.vtyper_object.met... | Parse the ePCR output file. Populate dictionary of resutls. For alleles, find the best result based on the
number of mismatches before populating dictionary |
def interleave(infile_1, infile_2, outfile, suffix1=None, suffix2=None):
'''Makes interleaved file from two sequence files. If used, will append suffix1 onto end
of every sequence name in infile_1, unless it already ends with suffix1. Similar for sufffix2.'''
seq_reader_1 = sequences.file_reader(infile_1)
... | Makes interleaved file from two sequence files. If used, will append suffix1 onto end
of every sequence name in infile_1, unless it already ends with suffix1. Similar for sufffix2. |
def make_middleware(app=None, *args, **kw):
""" Given an app, return that app wrapped in RaptorizeMiddleware """
app = RaptorizeMiddleware(app, *args, **kw)
return app | Given an app, return that app wrapped in RaptorizeMiddleware |
def validate(cls, event_info):
"""Validate that provided event information is valid."""
assert 'routing_key' in event_info
assert isinstance(event_info['routing_key'], six.string_types)
assert 'event_action' in event_info
assert event_info['event_action'] in cls.EVENT_TYPES
... | Validate that provided event information is valid. |
def transformer_encoder_layers(inputs,
num_layers,
hparams,
attention_type=AttentionType.GLOBAL,
self_attention_bias=None,
q_padding="VALID",
... | Multi layer transformer encoder. |
def get_last_modified_date(
self,
bucket: str,
key: str,
) -> datetime:
"""
Retrieves last modified date for a given key in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which the last modifi... | Retrieves last modified date for a given key in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which the last modified date is being retrieved.
:return: the last modified date |
def drop_collection(request, database_name, collection_name):
"""Drop Collection"""
name = """Retype "%s" to drop the collection""" % (collection_name)
if request.method == 'POST':
form = ConfirmDropForm(request.POST)
if form.is_valid():
name = form.cleaned_data['name']
... | Drop Collection |
def _readmodule(module, path, inpackage=None):
'''Do the hard work for readmodule[_ex].
If INPACKAGE is given, it must be the dotted name of the package in
which we are searching for a submodule, and then PATH must be the
package search path; otherwise, we are searching for a top-level
module, and ... | Do the hard work for readmodule[_ex].
If INPACKAGE is given, it must be the dotted name of the package in
which we are searching for a submodule, and then PATH must be the
package search path; otherwise, we are searching for a top-level
module, and PATH is combined with sys.path. |
def resolve(cls, propname, objcls=None):
'''
resolve type of the class property for the class. If objcls is not set then
assume that cls argument (class that the function is called from) is the class
we are trying to resolve the type for
:param cls:
:param ... | resolve type of the class property for the class. If objcls is not set then
assume that cls argument (class that the function is called from) is the class
we are trying to resolve the type for
:param cls:
:param objcls:
:param propname: |
def _determine_filtered_package_requirements(self):
"""
Parse the configuration file for [blacklist]packages
Returns
-------
list of packaging.requirements.Requirement
For all PEP440 package specifiers
"""
filtered_requirements = set()
try:
... | Parse the configuration file for [blacklist]packages
Returns
-------
list of packaging.requirements.Requirement
For all PEP440 package specifiers |
def create_replication(self, source_db=None, target_db=None,
repl_id=None, **kwargs):
"""
Creates a new replication task.
:param source_db: Database object to replicate from. Can be either a
``CouchDatabase`` or ``CloudantDatabase`` instance.
:par... | Creates a new replication task.
:param source_db: Database object to replicate from. Can be either a
``CouchDatabase`` or ``CloudantDatabase`` instance.
:param target_db: Database object to replicate to. Can be either a
``CouchDatabase`` or ``CloudantDatabase`` instance.
... |
def stop(self, stopSparkContext=True, stopGraceFully=False):
"""Stop processing streams.
:param stopSparkContext: stop the SparkContext (NOT IMPLEMENTED)
:param stopGracefully: stop gracefully (NOT IMPLEMENTED)
"""
while self._on_stop_cb:
cb = self._on_stop_cb.pop()
... | Stop processing streams.
:param stopSparkContext: stop the SparkContext (NOT IMPLEMENTED)
:param stopGracefully: stop gracefully (NOT IMPLEMENTED) |
def ComputeFortranSuffixes(suffixes, ppsuffixes):
"""suffixes are fortran source files, and ppsuffixes the ones to be
pre-processed. Both should be sequences, not strings."""
assert len(suffixes) > 0
s = suffixes[0]
sup = s.upper()
upper_suffixes = [_.upper() for _ in suffixes]
if SCons.Util... | suffixes are fortran source files, and ppsuffixes the ones to be
pre-processed. Both should be sequences, not strings. |
def permute(self, qubits: Qubits) -> 'Gate':
"""Permute the order of the qubits"""
vec = self.vec.permute(qubits)
return Gate(vec.tensor, qubits=vec.qubits) | Permute the order of the qubits |
def range_type_to_dtype(range_type: str) -> Optional[tf.DType]:
'''Maps RDDL range types to TensorFlow dtypes.'''
range2dtype = {
'real': tf.float32,
'int': tf.int32,
'bool': tf.bool
}
return range2dtype[range_type] | Maps RDDL range types to TensorFlow dtypes. |
def import_attr(path):
"""
Given a a Python dotted path to a variable in a module,
imports the module and returns the variable in it.
"""
module_path, attr_name = path.rsplit(".", 1)
return getattr(import_module(module_path), attr_name) | Given a a Python dotted path to a variable in a module,
imports the module and returns the variable in it. |
def fix_config(self, options):
"""
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
"""
options = super(Eva... | Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict |
def put_summary(self, summary):
"""
Put a `tf.Summary`.
"""
if isinstance(summary, six.binary_type):
summary = tf.Summary.FromString(summary)
assert isinstance(summary, tf.Summary), type(summary)
# TODO other types
for val in summary.value:
... | Put a `tf.Summary`. |
def stations(self, *stns):
"""Specify one or more stations for the query.
This modifies the query in-place, but returns `self` so that multiple
queries can be chained together on one line.
This replaces any existing spatial queries that have been set.
Parameters
------... | Specify one or more stations for the query.
This modifies the query in-place, but returns `self` so that multiple
queries can be chained together on one line.
This replaces any existing spatial queries that have been set.
Parameters
----------
stns : one or more string... |
def _register_endpoints(self, providers):
"""
Register methods to endpoints
:type providers: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
:param providers: A list of backend providers
:return: A list of endpoint/method pa... | Register methods to endpoints
:type providers: list[str]
:rtype: list[(str, ((satosa.context.Context, Any) -> satosa.response.Response, Any))]
:param providers: A list of backend providers
:return: A list of endpoint/method pairs |
def _ensure_exists(name, path=None):
'''
Raise an exception if the container does not exist
'''
if not exists(name, path=path):
raise CommandExecutionError(
'Container \'{0}\' does not exist'.format(name)
) | Raise an exception if the container does not exist |
def encrypt_key(key, password):
"""Encrypt the password with the public key and return an ASCII representation.
The public key retrieved from the Travis API is loaded as an RSAPublicKey
object using Cryptography's default backend. Then the given password
is encrypted with the encrypt() method of RSAPub... | Encrypt the password with the public key and return an ASCII representation.
The public key retrieved from the Travis API is loaded as an RSAPublicKey
object using Cryptography's default backend. Then the given password
is encrypted with the encrypt() method of RSAPublicKey. The encrypted
password is t... |
def port_channel_vlag_ignore_split(self, **kwargs):
"""Ignore VLAG Split.
Args:
name (str): Port-channel number. (1, 5, etc)
enabled (bool): Is ignore split enabled? (True, False)
callback (function): A function executed upon completion of the
method.... | Ignore VLAG Split.
Args:
name (str): Port-channel number. (1, 5, etc)
enabled (bool): Is ignore split enabled? (True, False)
callback (function): A function executed upon completion of the
method. The only parameter passed to `callback` will be the
... |
def newest(cls, session):
"""Fetches the latest media added to MAL.
:type session: :class:`myanimelist.session.Session`
:param session: A valid MAL session
:rtype: :class:`.Media`
:return: the newest media on MAL
:raises: :class:`.MalformedMediaPageError`
"""
media_type = cls.__name_... | Fetches the latest media added to MAL.
:type session: :class:`myanimelist.session.Session`
:param session: A valid MAL session
:rtype: :class:`.Media`
:return: the newest media on MAL
:raises: :class:`.MalformedMediaPageError` |
def _contextualize(contextFactory, contextReceiver):
"""
Invoke a callable with an argument derived from the current execution
context (L{twisted.python.context}), or automatically created if none is
yet present in the current context.
This function, with a better name and documentation, should pro... | Invoke a callable with an argument derived from the current execution
context (L{twisted.python.context}), or automatically created if none is
yet present in the current context.
This function, with a better name and documentation, should probably be
somewhere in L{twisted.python.context}. Calling con... |
def delete_agent_cloud(self, agent_cloud_id):
"""DeleteAgentCloud.
[Preview API]
:param int agent_cloud_id:
:rtype: :class:`<TaskAgentCloud> <azure.devops.v5_1.task-agent.models.TaskAgentCloud>`
"""
route_values = {}
if agent_cloud_id is not None:
rout... | DeleteAgentCloud.
[Preview API]
:param int agent_cloud_id:
:rtype: :class:`<TaskAgentCloud> <azure.devops.v5_1.task-agent.models.TaskAgentCloud>` |
def bench_report(results):
"""Print a report for given benchmark results to the console."""
table = Table(names=['function', 'nest', 'nside', 'size',
'time_healpy', 'time_self', 'ratio'],
dtype=['S20', bool, int, int, float, float, float], masked=True)
for row in ... | Print a report for given benchmark results to the console. |
def download(data_dir):
"""Download census data if it is not already present."""
tf.gfile.MakeDirs(data_dir)
training_file_path = os.path.join(data_dir, TRAINING_FILE)
if not tf.gfile.Exists(training_file_path):
_download_and_clean_file(training_file_path, TRAINING_URL)
eval_file_path = os.path.join(dat... | Download census data if it is not already present. |
def generate_pdfa(
pdf_pages,
output_file,
compression,
log,
threads=1,
pdf_version='1.5',
pdfa_part='2',
):
"""Generate a PDF/A.
The pdf_pages, a list files, will be merged into output_file. One or more
PDF files may be merged. One of the files in this list must be a pdfmark
... | Generate a PDF/A.
The pdf_pages, a list files, will be merged into output_file. One or more
PDF files may be merged. One of the files in this list must be a pdfmark
file that provides Ghostscript with details on how to perform the PDF/A
conversion. By default with we pick PDF/A-2b, but this works for 1... |
def add_alias(self, name, *alt_names):
"""
Add some duplicate names for a given function. The original function's implementation must already be
registered.
:param name: The name of the function for which an implementation is already present
:param alt_names: Any number... | Add some duplicate names for a given function. The original function's implementation must already be
registered.
:param name: The name of the function for which an implementation is already present
:param alt_names: Any number of alternate names may be passed as varargs |
def _search(problem, fringe, graph_search=False, depth_limit=None,
node_factory=SearchNode, graph_replace_when_better=False,
viewer=None):
'''
Basic search algorithm, base of all the other search algorithms.
'''
if viewer:
viewer.event('started')
memory = set()
i... | Basic search algorithm, base of all the other search algorithms. |
def _process_data(*kwarg_names):
"""Helper function to handle data keyword argument
"""
def _data_decorator(func):
@functools.wraps(func)
def _mark_with_data(*args, **kwargs):
data = kwargs.pop('data', None)
if data is None:
return func(*args, **kwargs... | Helper function to handle data keyword argument |
def percent_encode_host(url):
""" Convert the host of uri formatted with to_uri()
to have a %-encoded host instead of punycode host
The rest of url should be unchanged
"""
# only continue if punycode encoded
if 'xn--' not in url:
return url
parts = u... | Convert the host of uri formatted with to_uri()
to have a %-encoded host instead of punycode host
The rest of url should be unchanged |
def sql(self, sql: str, *qmark_params, **named_params):
"""
:deprecated: use self.statement to execute properly-formatted sql statements
"""
statement = SingleSqlStatement(sql)
return self.statement(statement).execute(*qmark_params, **named_params) | :deprecated: use self.statement to execute properly-formatted sql statements |
def _get_span_name(servicer_context):
"""Generates a span name based off of the gRPC server rpc_request_info"""
method_name = servicer_context._rpc_event.call_details.method[1:]
if isinstance(method_name, bytes):
method_name = method_name.decode('utf-8')
method_name = method_name.replace('/', '.... | Generates a span name based off of the gRPC server rpc_request_info |
def api_retrieve(self, api_key=None):
"""
Call the stripe API's retrieve operation for this model.
:param api_key: The api key to use for this request. Defaults to settings.STRIPE_SECRET_KEY.
:type api_key: string
"""
api_key = api_key or self.default_api_key
return self.stripe_class.retrieve(
id=sel... | Call the stripe API's retrieve operation for this model.
:param api_key: The api key to use for this request. Defaults to settings.STRIPE_SECRET_KEY.
:type api_key: string |
def addCondition(self, *fns, **kwargs):
"""Add a boolean predicate function to expression's list of parse actions. See
:class:`setParseAction` for function call signatures. Unlike ``setParseAction``,
functions passed to ``addCondition`` need to return boolean success/fail of the condition.
... | Add a boolean predicate function to expression's list of parse actions. See
:class:`setParseAction` for function call signatures. Unlike ``setParseAction``,
functions passed to ``addCondition`` need to return boolean success/fail of the condition.
Optional keyword arguments:
- message =... |
def get_next_occurrence(tx: ScheduledTransaction) -> date:
""" Calculates the next occurrence date for scheduled transaction.
Mimics the recurrenceNextInstance() function from GnuCash.
Still not fully complete but handles the main cases I use. """
# Reference documentation:
# https://github.com/Mist... | Calculates the next occurrence date for scheduled transaction.
Mimics the recurrenceNextInstance() function from GnuCash.
Still not fully complete but handles the main cases I use. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.