code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def get_line_numbers(self, buffer):
"""
Return a (start_line, end_line) pair.
"""
# Get absolute cursor positions from the text object.
from_, to = self.operator_range(buffer.document)
from_ += buffer.cursor_position
to += buffer.cursor_position
# Take th... | Return a (start_line, end_line) pair. |
def solve_gfl(data, edges=None, weights=None,
minlam=0.2, maxlam=1000.0, numlam=30,
alpha=0.2, inflate=2., converge=1e-6,
maxsteps=1000000, lam=None, verbose=0,
missing_val=None, full_path=False,
loss='normal'):
'''A very easy-to-use version of G... | A very easy-to-use version of GFL solver that just requires the data and
the edges. |
def __find_block_neighbors(self, block, level_blocks, unhandled_block_indexes):
"""!
@brief Search block neighbors that are parts of new clusters (density is greater than threshold and that are
not cluster members yet), other neighbors are ignored.
@param[in] block (bang_bl... | !
@brief Search block neighbors that are parts of new clusters (density is greater than threshold and that are
not cluster members yet), other neighbors are ignored.
@param[in] block (bang_block): BANG-block for which neighbors should be found (which can be part of cluster).
... |
def call_function(self, command, response_length=0, params=[], timeout_sec=1):
"""Send specified command to the PN532 and expect up to response_length
bytes back in a response. Note that less than the expected bytes might
be returned! Params can optionally specify an array of bytes to send as
... | Send specified command to the PN532 and expect up to response_length
bytes back in a response. Note that less than the expected bytes might
be returned! Params can optionally specify an array of bytes to send as
parameters to the function call. Will wait up to timeout_secs seconds
for... |
def read_altitude(self, sealevel_pa=101325.0):
"""Calculates the altitude in meters."""
# Calculation taken straight from section 3.6 of the datasheet.
pressure = float(self.read_pressure())
altitude = 44330.0 * (1.0 - pow(pressure / sealevel_pa, (1.0/5.255)))
self.logger.debug('... | Calculates the altitude in meters. |
def update_roles_gce(use_cache=True, cache_expiration=86400, cache_path="~/.gcetools/instances", group_name=None, region=None, zone=None):
"""
Dynamically update fabric's roles by using assigning the tags associated with
each machine in Google Compute Engine.
use_cache - will store a local cache in ~/.... | Dynamically update fabric's roles by using assigning the tags associated with
each machine in Google Compute Engine.
use_cache - will store a local cache in ~/.gcetools/
cache_expiration - cache expiration in seconds (default: 1 day)
cache_path - the path to store instances data (default: ~/.gcetools/i... |
def get_disconnect_message(self, code: int):
'''
http://channels.readthedocs.io/en/stable/asgi/www.html#disconnection
'''
self.order += 1
return {
'channel': 'websocket.disconnect',
'reply_channel': None,
'path': self.path,
'order':... | http://channels.readthedocs.io/en/stable/asgi/www.html#disconnection |
def rgb2ansi(r, g, b):
"""
Convert an RGB color to 256 ansi graphics.
"""
# Thanks to
# https://github.com/tehmaze/ansi/blob/master/ansi/colour/rgb.py
grayscale = False
poss = True
step = 2.5
while poss:
if min(r, g, b) < step:
grayscale = max(r, g, b) < step
... | Convert an RGB color to 256 ansi graphics. |
def _login_request(self, username=None, secret=None):
"""Send a login request with paramerters."""
url = 'http://' + self._host + '/login_sid.lua'
params = {}
if username:
params['username'] = username
if secret:
params['response'] = secret
plain ... | Send a login request with paramerters. |
def _run(self):
""" Execution body
:return: Execution result
:rtype: kser.result.Result
"""
if KSER_METRICS_ENABLED == "yes":
KSER_TASK_COUNT.inc()
logger.debug(
"{}.Run: {}[{}]".format(
self.__class__.__name__, self.__class__.pat... | Execution body
:return: Execution result
:rtype: kser.result.Result |
def get_last_api_metadata(self):
"""Get meta data for the last Mbed Cloud API call.
:returns: meta data of the last Mbed Cloud API call
:rtype: ApiMetadata
"""
last_metadata = None
for key, api in iteritems(self.apis):
api_client = api.api_client
... | Get meta data for the last Mbed Cloud API call.
:returns: meta data of the last Mbed Cloud API call
:rtype: ApiMetadata |
def index_humansorted(seq, key=None, reverse=False, alg=ns.DEFAULT):
"""
This is a wrapper around ``index_natsorted(seq, alg=ns.LOCALE)``.
Parameters
----------
seq: iterable
The input to sort.
key: callable, optional
A key used to determine how to sort each element of the sequ... | This is a wrapper around ``index_natsorted(seq, alg=ns.LOCALE)``.
Parameters
----------
seq: iterable
The input to sort.
key: callable, optional
A key used to determine how to sort each element of the sequence.
It is **not** applied recursively.
It should accept a singl... |
def save_to_file(self, path):
"""
Dump all cookies to file.
Cookies are dumped as JSON-serialized dict of keys and values.
"""
with open(path, 'w') as out:
out.write(json.dumps(self.get_dict())) | Dump all cookies to file.
Cookies are dumped as JSON-serialized dict of keys and values. |
def tAx(mt, x, t):
""" n/Ax : Returns the EPV (net single premium) of a deferred whole life insurance. """
return mt.Mx[x + t] / mt.Dx[x] | n/Ax : Returns the EPV (net single premium) of a deferred whole life insurance. |
def get_notificant(self, id, **kwargs): # noqa: E501
"""Get a specific notification target # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_notificant(id, ... | Get a specific notification target # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_notificant(id, async_req=True)
>>> result = thread.get()
:param... |
def run(self):
"""The main routine for a thread's work.
The thread pulls tasks from the task queue and executes them until it
encounters a death token. The death token is a tuple of two Nones.
"""
try:
quit_request_detected = False
while True:
... | The main routine for a thread's work.
The thread pulls tasks from the task queue and executes them until it
encounters a death token. The death token is a tuple of two Nones. |
def zSetSurfaceData(self, surfNum, radius=None, thick=None, material=None, semidia=None,
conic=None, comment=None):
"""Sets surface data"""
if self.pMode == 0: # Sequential mode
surf = self.pLDE.GetSurfaceAt(surfNum)
if radius is not None:
... | Sets surface data |
def get_conditional_instance(self, parameter_names):
""" get a new Schur instance that includes conditional update from
some parameters becoming known perfectly
Parameters
----------
parameter_names : list
parameters that are to be treated as notionally perfectly
... | get a new Schur instance that includes conditional update from
some parameters becoming known perfectly
Parameters
----------
parameter_names : list
parameters that are to be treated as notionally perfectly
known
Returns
-------
la_cond :... |
def global_include(self, pattern):
"""
Include all files anywhere in the current directory that match the
pattern. This is very inefficient on large file trees.
"""
if self.allfiles is None:
self.findall()
match = translate_pattern(os.path.join('**', pattern))... | Include all files anywhere in the current directory that match the
pattern. This is very inefficient on large file trees. |
def disconnect(self):
"""Gracefully close connection to stomp server."""
if self._connected:
self._connected = False
self._conn.disconnect() | Gracefully close connection to stomp server. |
def extend_left_to(self, window, max_size):
"""Adjust the offset to start where the given window on our left ends if possible,
but don't make yourself larger than max_size.
The resize will assure that the new window still contains the old window area"""
rofs = self.ofs - window.ofs_end()... | Adjust the offset to start where the given window on our left ends if possible,
but don't make yourself larger than max_size.
The resize will assure that the new window still contains the old window area |
def list_numbers(self, **kwargs): # noqa: E501
"""Get your numbers # noqa: E501
List all your purchased numbers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_numbers(asy... | Get your numbers # noqa: E501
List all your purchased numbers # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_numbers(async=True)
>>> result = thread.get()
:param ... |
def _serialize_list(cls, list_):
"""
:type list_: list
:rtype: list
"""
list_serialized = []
for item in list_:
item_serialized = cls.serialize(item)
list_serialized.append(item_serialized)
return list_serialized | :type list_: list
:rtype: list |
def process_pybel_graph(graph):
"""Return a PybelProcessor by processing a PyBEL graph.
Parameters
----------
graph : pybel.struct.BELGraph
A PyBEL graph to process
Returns
-------
bp : PybelProcessor
A PybelProcessor object which contains INDRA Statements in
bp.sta... | Return a PybelProcessor by processing a PyBEL graph.
Parameters
----------
graph : pybel.struct.BELGraph
A PyBEL graph to process
Returns
-------
bp : PybelProcessor
A PybelProcessor object which contains INDRA Statements in
bp.statements. |
def run_gatk_germline_pipeline(job, samples, config):
"""
Downloads shared files and calls the GATK best practices germline pipeline for a cohort of samples
:param JobFunctionWrappingJob job: passed automatically by Toil
:param list[GermlineSample] samples: List of GermlineSample namedtuples
:param... | Downloads shared files and calls the GATK best practices germline pipeline for a cohort of samples
:param JobFunctionWrappingJob job: passed automatically by Toil
:param list[GermlineSample] samples: List of GermlineSample namedtuples
:param Namespace config: Configuration options for pipeline
Requ... |
def _calc_checksum(self, secret):
"""Calculate string.
:param secret: The secret key.
:returns: The checksum.
"""
return str_to_uascii(
hashlib.sha256(mysql_aes_encrypt(self.salt, secret)).hexdigest()
) | Calculate string.
:param secret: The secret key.
:returns: The checksum. |
def setup(self):
"""
Initialize the crochet library.
This starts the reactor in a thread, and connect's Twisted's logs to
Python's standard library logging module.
This must be called at least once before the library can be used, and
can be called multiple times.
... | Initialize the crochet library.
This starts the reactor in a thread, and connect's Twisted's logs to
Python's standard library logging module.
This must be called at least once before the library can be used, and
can be called multiple times. |
def deserialize_upload(value, url):
"""
Restore file and name and storage from serialized value and the upload url.
"""
result = {'name': None, 'storage': None}
try:
result = signing.loads(value, salt=url)
except signing.BadSignature:
# TODO: Log invalid signature
pass
... | Restore file and name and storage from serialized value and the upload url. |
def ordersku_update(self, oid, sku_id=None, sku_props=None):
'''taobao.trade.ordersku.update 更新交易订单的销售属性
需要商家或以上权限才可调用此接口,可重复调用本接口更新交易备注,本接口同时具有添加备注的功能'''
request = TOPRequest('taobao.trade.ordersku.update')
request['oid'] = oid
if sku_id!=None: request['sku_id'] = sku_i... | taobao.trade.ordersku.update 更新交易订单的销售属性
需要商家或以上权限才可调用此接口,可重复调用本接口更新交易备注,本接口同时具有添加备注的功能 |
def decode_offset_fetch_response(cls, response):
"""
Decode OffsetFetchResponse to OffsetFetchResponsePayloads
Arguments:
response: OffsetFetchResponse
"""
return [
kafka.structs.OffsetFetchResponsePayload(
topic, partition, offset, metada... | Decode OffsetFetchResponse to OffsetFetchResponsePayloads
Arguments:
response: OffsetFetchResponse |
def window(data, param):
"""
MAYBE WE CAN DO THIS WITH NUMPY (no, the edges of windows are not graceful with numpy)
data - list of records
"""
name = param.name # column to assign window function result
edges = param.edges # columns to gourp by
where = param.where # DO NOT CONSIDER THESE ... | MAYBE WE CAN DO THIS WITH NUMPY (no, the edges of windows are not graceful with numpy)
data - list of records |
def post_structure(entry, site):
"""
A post structure with extensions.
"""
author = entry.authors.all()[0]
return {'title': entry.title,
'description': six.text_type(entry.html_content),
'link': '%s://%s%s' % (PROTOCOL, site.domain,
entry.ge... | A post structure with extensions. |
def init_autoindex(self, auto_interval):
"""Initialize and start the auto-indexing of the collections. If auto_interval is None this is a no op.
:param str|int auto_interval: The auto-indexing interval from the configuration file or CLI argument
"""
if not auto_interval:
ret... | Initialize and start the auto-indexing of the collections. If auto_interval is None this is a no op.
:param str|int auto_interval: The auto-indexing interval from the configuration file or CLI argument |
def all(self, list_id, subscriber_hash, **queryparams):
"""
Get the last 50 events of a member’s activity on a specific list,
including opens, clicks, and unsubscribes.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param subscriber_hash: The... | Get the last 50 events of a member’s activity on a specific list,
including opens, clicks, and unsubscribes.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param subscriber_hash: The MD5 hash of the lowercase version of the
list member’s email addr... |
def _register_factory(self, factory_name, factory, override):
# type: (str, type, bool) -> None
"""
Registers a component factory
:param factory_name: The name of the factory
:param factory: The factory class object
:param override: If true, previous factory is overridde... | Registers a component factory
:param factory_name: The name of the factory
:param factory: The factory class object
:param override: If true, previous factory is overridden, else an
exception is risen if a previous factory with that
name already... |
def update(self, parent=None):
"""
Updates the resource. This will trigger an api PATCH request.
:param parent ResourceBase: the parent of the resource - used for nesting the request url, optional
:raises ResourceError: if the resource does not have an id (does not exist yet)
:re... | Updates the resource. This will trigger an api PATCH request.
:param parent ResourceBase: the parent of the resource - used for nesting the request url, optional
:raises ResourceError: if the resource does not have an id (does not exist yet)
:returns: the resource itself |
def make_vcard_data(name, displayname, email=None, phone=None, fax=None,
videophone=None, memo=None, nickname=None, birthday=None,
url=None, pobox=None, street=None, city=None, region=None,
zipcode=None, country=None, org=None, lat=None, lng=None,
... | \
Creates a string encoding the contact information as vCard 3.0.
Only a subset of available vCard properties is supported.
:param str name: The name. If it contains a semicolon, , the first part
is treated as lastname and the second part is treated as forename.
:param str displayname: Com... |
def detach_zone(organization_id_or_slug):
'''Detach the zone of a given <organization>.'''
organization = Organization.objects.get_by_id_or_slug(
organization_id_or_slug)
if not organization:
exit_with_error(
'No organization found for {0}'.format(organization_id_or_slug)
... | Detach the zone of a given <organization>. |
def deleteoutputfile(project, filename, credentials=None):
"""Delete an output file"""
user, oauth_access_token = parsecredentials(credentials) #pylint: disable=unused-variable
if filename: filename = filename.replace("..","") #Simple security
if not filename or len(filename) == 0:
... | Delete an output file |
def PackageVariable(key, help, default, searchfunc=None):
# NB: searchfunc is currently undocumented and unsupported
"""
The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
... | The input parameters describe a 'package list' option, thus they
are returned with the correct converter and validator appended. The
result is usable for input to opts.Add() .
A 'package list' option may either be 'all', 'none' or a list of
package names (separated by space). |
def score(self):
""" Returns the sum of the accidental dignities
score.
"""
if not self.scoreProperties:
self.scoreProperties = self.getScoreProperties()
return sum(self.scoreProperties.values()) | Returns the sum of the accidental dignities
score. |
def parse(self, filename, verbose=0):
"""
Parse the given file. Return :class:`EventReport`.
"""
run_completed, start_datetime, end_datetime = False, None, None
filename = os.path.abspath(filename)
report = EventReport(filename)
w = WildCard("*Error|*Warning|*Com... | Parse the given file. Return :class:`EventReport`. |
def delete_namespaced_custom_object(self, group, version, namespace, plural, name, body, **kwargs):
"""
Deletes the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>... | Deletes the specified namespace scoped custom object
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_custom_object(group, version, namespace, plural, name, body, async_req=True)
>... |
def run_iter(self, mine=False, jid=None):
'''
Execute and yield returns as they come in, do not print to the display
mine
The Single objects will use mine_functions defined in the roster,
pillar, or master config (they will be checked in that order) and
will ... | Execute and yield returns as they come in, do not print to the display
mine
The Single objects will use mine_functions defined in the roster,
pillar, or master config (they will be checked in that order) and
will modify the argv with the arguments from mine_functions |
def real_sound_match_abstract_sound(self, abstract_pos: AbstractPosition) -> bool:
"""
If an observed position
:param abstract_pos:
:return:
"""
assert isinstance(abstract_pos, AbstractPosition)
if self.before is not None and self.after is not None:
re... | If an observed position
:param abstract_pos:
:return: |
def add(self, child):
"""
Adds a typed child object to the component type.
@param child: Child object to be added.
"""
if isinstance(child, Parameter):
self.add_parameter(child)
elif isinstance(child, Property):
self.add_property(child)
e... | Adds a typed child object to the component type.
@param child: Child object to be added. |
def _reportFutures(self):
"""Sends futures status updates to broker at intervals of
scoop.TIME_BETWEEN_STATUS_REPORTS seconds. Is intended to be run by a
separate thread."""
try:
while True:
time.sleep(scoop.TIME_BETWEEN_STATUS_REPORTS)
fids = ... | Sends futures status updates to broker at intervals of
scoop.TIME_BETWEEN_STATUS_REPORTS seconds. Is intended to be run by a
separate thread. |
def thumbnail(self):
"""
This method returns a thumbnail representation of the file if the data is a supported graphics format.
Input:
* None
Output:
* A byte stream representing a thumbnail of a support graphics file
Example::
file = clien... | This method returns a thumbnail representation of the file if the data is a supported graphics format.
Input:
* None
Output:
* A byte stream representing a thumbnail of a support graphics file
Example::
file = client.get_file("4ddfds", 0)
open(... |
def ui_device_label(self):
"""UI string identifying the device (drive) if toplevel."""
return ': '.join(filter(None, [
self.ui_device_presentation,
self.loop_file or
self.drive_label or self.ui_id_label or self.ui_id_uuid
])) | UI string identifying the device (drive) if toplevel. |
def detect(self):
"""
Try to contact a remote webservice and parse the returned output.
Determine the IP address from the parsed output and return.
"""
if self.opts_url and self.opts_parser:
url = self.opts_url
parser = self.opts_parser
else:
... | Try to contact a remote webservice and parse the returned output.
Determine the IP address from the parsed output and return. |
def filter_select_columns_intensity(df, prefix, columns):
"""
Filter dataframe to include specified columns, retaining any Intensity columns.
"""
# Note: I use %s.+ (not %s.*) so it forces a match with the prefix string, ONLY if it is followed by something.
return df.filter(regex='^(%s.+|%s)$' % (pr... | Filter dataframe to include specified columns, retaining any Intensity columns. |
def query(url, **kwargs):
'''
Query a resource, and decode the return data
Passes through all the parameters described in the
:py:func:`utils.http.query function <salt.utils.http.query>`:
.. autofunction:: salt.utils.http.query
CLI Example:
.. code-block:: bash
salt '*' http.que... | Query a resource, and decode the return data
Passes through all the parameters described in the
:py:func:`utils.http.query function <salt.utils.http.query>`:
.. autofunction:: salt.utils.http.query
CLI Example:
.. code-block:: bash
salt '*' http.query http://somelink.com/
salt '... |
def errdp(marker, number):
"""
Substitute a double precision number for the first occurrence of
a marker found in the current long error message.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errdp_c.html
:param marker: A substring of the error message to be replaced.
:type marker: s... | Substitute a double precision number for the first occurrence of
a marker found in the current long error message.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/errdp_c.html
:param marker: A substring of the error message to be replaced.
:type marker: str
:param number: The d.p. number t... |
def create(image_data):
"""
:param image_data: ImageMetadata
:return: V1Pod,
https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1Pod.md
"""
# convert environment variables to Kubernetes objects
env_variables = []
for key, value i... | :param image_data: ImageMetadata
:return: V1Pod,
https://github.com/kubernetes-client/python/blob/master/kubernetes/docs/V1Pod.md |
def _decode_names(self):
"""Decode names (hopefully ASCII or UTF-8) into Unicode.
"""
if self.subject_name is not None:
subject_name = []
for part in self.subject_name:
new_part = []
for name, value in part:
try:
... | Decode names (hopefully ASCII or UTF-8) into Unicode. |
def get_lock_behaviour(triggers, all_data, lock):
"""Binary state lock protects from version increments if set"""
updates = {}
lock_key = config._forward_aliases.get(Constants.VERSION_LOCK_FIELD)
# if we are explicitly setting or locking the version, then set the lock field True anyway
if lock:
... | Binary state lock protects from version increments if set |
def _subprocessor(self, disabled_qubits):
"""Create a subprocessor by deleting a set of qubits. We assume
this removes all evil edges, and return an :class:`eden_processor`
instance.
"""
edgelist = [(p, q) for p, q in self._edgelist if
p not in disabled_qubit... | Create a subprocessor by deleting a set of qubits. We assume
this removes all evil edges, and return an :class:`eden_processor`
instance. |
def fast_cov(x, y=None, destination=None):
"""calculate the covariance matrix for the columns of x (MxN), or optionally, the covariance matrix between the
columns of x and and the columns of y (MxP). (In the language of statistics, the columns are variables, the rows
are observations).
Args:
x... | calculate the covariance matrix for the columns of x (MxN), or optionally, the covariance matrix between the
columns of x and and the columns of y (MxP). (In the language of statistics, the columns are variables, the rows
are observations).
Args:
x (numpy array-like) MxN in shape
y (numpy ... |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'consumption_preference_category_id'
) and self.consumption_preference_category_id is not None:
_dict[
'consumption_preference_category_id'] = sel... | Return a json dictionary representing this model. |
def CELERY_RESULT_BACKEND(self):
"""Redis result backend config"""
# allow specify directly
configured = get('CELERY_RESULT_BACKEND', None)
if configured:
return configured
if not self._redis_available():
return None
host, port = self.REDIS_HOST... | Redis result backend config |
def main():
"""
Start the GUI
:return:
"""
app = QApplication(sys.argv)
rlbot_icon = QtGui.QIcon(os.path.join(get_rlbot_directory(), 'img', 'rlbot_icon.png'))
app.setWindowIcon(rlbot_icon)
window = RLBotQTGui()
window.show()
app.exec_() | Start the GUI
:return: |
def fc(inputs,
num_units_out,
activation=tf.nn.relu,
stddev=0.01,
bias=0.0,
weight_decay=0,
batch_norm_params=None,
is_training=True,
trainable=True,
restore=True,
scope=None,
reuse=None):
"""Adds a fully connected layer followed by an optio... | Adds a fully connected layer followed by an optional batch_norm layer.
FC creates a variable called 'weights', representing the fully connected
weight matrix, that is multiplied by the input. If `batch_norm` is None, a
second variable called 'biases' is added to the result of the initial
vector-matrix multipli... |
async def delete_tag(self, tag):
"""
DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext
"""
path = '/api/tags/{tag}.{ext}'.format(tag=tag, ext=self.format)
params = {'ac... | DELETE /api/tags/{tag}.{_format}
Permanently remove one tag from every entry
:param tag: string The Tag
:return data related to the ext |
def all_host_infos():
'''
Summarize all host information.
'''
output = []
output.append(["Operating system", os()])
output.append(["CPUID information", cpu()])
output.append(["CC information", compiler()])
output.append(["JDK information", from_cmd("java -version")])
output.appen... | Summarize all host information. |
def invalidate(self, comparison: Comparison[Entity, Entity]) -> None:
"""
Invalidate paths in a zone. See https://api.cloudflare.com
/#zone-purge-individual-files-by-url-and-cache-tags
:param comparison: The comparison whose changes to invalidate.
:raises requests.exceptions.Req... | Invalidate paths in a zone. See https://api.cloudflare.com
/#zone-purge-individual-files-by-url-and-cache-tags
:param comparison: The comparison whose changes to invalidate.
:raises requests.exceptions.RequestException: On request failure.
:raises RuntimeError: If the request succeeded ... |
def max_pool(inputs, kernel_size, stride=2, padding='VALID', scope=None):
"""Adds a Max Pooling layer.
It is assumed by the wrapper that the pooling is only done per image and not
in depth or batch.
Args:
inputs: a tensor of size [batch_size, height, width, depth].
kernel_size: a list of length 2: [ke... | Adds a Max Pooling layer.
It is assumed by the wrapper that the pooling is only done per image and not
in depth or batch.
Args:
inputs: a tensor of size [batch_size, height, width, depth].
kernel_size: a list of length 2: [kernel_height, kernel_width] of the
pooling kernel over which the op is com... |
def colorize(text, color=None, **kwargs):
"""
Colorize the text
kwargs arguments:
style=, bg=
"""
style = None
bg = None
# ================ #
# Keyword checking #
# ================ #
if 'style' in kwargs:
if kwargs['style'] not in STYLE:
raise WrongStyle(... | Colorize the text
kwargs arguments:
style=, bg= |
def set_wts_get_npred_wt(gta, maskname):
"""Set a weights file and get the weighted npred for all the sources
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
maskname : str
The path to the file with the mask
Returns
-------
odict : dict
... | Set a weights file and get the weighted npred for all the sources
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
maskname : str
The path to the file with the mask
Returns
-------
odict : dict
Dictionary mapping from source name to weighted ... |
def rule_low_registers(self, arg):
"""Low registers are R0 - R7"""
r_num = self.check_register(arg)
if r_num > 7:
raise iarm.exceptions.RuleError(
"Register {} is not a low register".format(arg)) | Low registers are R0 - R7 |
def _set_data(self, **kwargs):
"""Sets data from given parameters
Old values are deleted.
If a paremeter is not given, nothing is changed.
Parameters
----------
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell cont... | Sets data from given parameters
Old values are deleted.
If a paremeter is not given, nothing is changed.
Parameters
----------
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell content
attributes: List of 3-tuples
... |
def _extract_optimizer_param_name_and_group(optimizer_name, param):
"""Extract param group and param name from the given parameter name.
Raises an error if the param name doesn't match one of
- ``optimizer__param_groups__<group>__<name>``
- ``optimizer__<name>``
In the second case group defaults to ... | Extract param group and param name from the given parameter name.
Raises an error if the param name doesn't match one of
- ``optimizer__param_groups__<group>__<name>``
- ``optimizer__<name>``
In the second case group defaults to 'all'.
The second case explicitly forbids ``optimizer__foo__bar``
s... |
def get_knowledge_category(self):
"""Gets the grade associated with the knowledge dimension.
return: (osid.grading.Grade) - the grade
raise: IllegalState - has_knowledge_category() is false
raise: OperationFailed - unable to complete request
compliance: mandatory - This method... | Gets the grade associated with the knowledge dimension.
return: (osid.grading.Grade) - the grade
raise: IllegalState - has_knowledge_category() is false
raise: OperationFailed - unable to complete request
compliance: mandatory - This method must be implemented. |
def string(self, *args, **kwargs):
"""Compare attributes of pairs with string algorithm.
Shortcut of :class:`recordlinkage.compare.String`::
from recordlinkage.compare import String
indexer = recordlinkage.Compare()
indexer.add(String())
"""
compar... | Compare attributes of pairs with string algorithm.
Shortcut of :class:`recordlinkage.compare.String`::
from recordlinkage.compare import String
indexer = recordlinkage.Compare()
indexer.add(String()) |
def autocomplete():
"""Entry Point for completion of main and subcommand options.
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try... | Entry Point for completion of main and subcommand options. |
def set_attribute(self, code, value):
"""Set attribute for user"""
attr, _ = self.get_or_create(code=code)
attr.value = value
attr.save() | Set attribute for user |
def _cbc_encrypt(self, content, final_key):
"""This method encrypts the content."""
aes = AES.new(final_key, AES.MODE_CBC, self._enc_iv)
padding = (16 - len(content) % AES.block_size)
for _ in range(padding):
content += chr(padding).encode()
temp = bytes(content)
... | This method encrypts the content. |
def add_vrf(self, auth, attr):
""" Add a new VRF.
* `auth` [BaseAuth]
AAA options.
* `attr` [vrf_attr]
The news VRF's attributes.
Add a VRF based on the values stored in the `attr` dict.
Returns a dict describing the VRF which wa... | Add a new VRF.
* `auth` [BaseAuth]
AAA options.
* `attr` [vrf_attr]
The news VRF's attributes.
Add a VRF based on the values stored in the `attr` dict.
Returns a dict describing the VRF which was added.
This is the documenta... |
def _init_go_sources(self, go_sources_arg, go2obj_arg):
"""Return GO sources which are present in GODag."""
gos_user = set(go_sources_arg)
if 'children' in self.kws and self.kws['children']:
gos_user |= get_leaf_children(gos_user, go2obj_arg)
gos_godag = set(go2obj_arg)
... | Return GO sources which are present in GODag. |
def scroll(self, scroll_id=None, body=None, params=None):
"""
Scroll a search request created by specifying the scroll parameter.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html>`_
:arg scroll_id: The scroll ID
:arg body: The scroll ID... | Scroll a search request created by specifying the scroll parameter.
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/search-request-scroll.html>`_
:arg scroll_id: The scroll ID
:arg body: The scroll ID if not passed by URL or query parameter.
:arg scroll: Specify how lon... |
def target_lines(self):
"""The formatted target_type(...) lines for this target.
This is just a convenience method for extracting and re-injecting the changed
`dependency_lines` into the target text.
"""
target_lines = self._target_source_lines[:]
deps_begin, deps_end = self._dependencies_inter... | The formatted target_type(...) lines for this target.
This is just a convenience method for extracting and re-injecting the changed
`dependency_lines` into the target text. |
def _stream_helper(self, response, decode=False):
"""Generator for data coming from a chunked-encoded HTTP response."""
if response.raw._fp.chunked:
if decode:
for chunk in json_stream(self._stream_helper(response, False)):
yield chunk
else:
... | Generator for data coming from a chunked-encoded HTTP response. |
def powered_off(name, connection=None, username=None, password=None):
'''
Stops a VM by power off.
.. versionadded:: 2016.3.0
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.... | Stops a VM by power off.
.. versionadded:: 2016.3.0
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to connect with, overridin... |
def get_unknown_check_result_brok(cmd_line):
"""Create unknown check result brok and fill it with command data
:param cmd_line: command line to extract data
:type cmd_line: str
:return: unknown check result brok
:rtype: alignak.objects.brok.Brok
"""
match = re.ma... | Create unknown check result brok and fill it with command data
:param cmd_line: command line to extract data
:type cmd_line: str
:return: unknown check result brok
:rtype: alignak.objects.brok.Brok |
def extract_response(self, extractors):
""" extract value from requests.Response and store in OrderedDict.
Args:
extractors (list):
[
{"resp_status_code": "status_code"},
{"resp_headers_content_type": "headers.content-type"},
... | extract value from requests.Response and store in OrderedDict.
Args:
extractors (list):
[
{"resp_status_code": "status_code"},
{"resp_headers_content_type": "headers.content-type"},
{"resp_content": "content"},
... |
def center(self) -> Location:
"""
:return: a Point corresponding to the absolute position of the center
of the well relative to the deck (with the front-left corner of slot 1
as (0,0,0))
"""
top = self.top()
center_z = top.point.z - (self._depth / 2.0)
ret... | :return: a Point corresponding to the absolute position of the center
of the well relative to the deck (with the front-left corner of slot 1
as (0,0,0)) |
def get_url(self, action, obj=None, domain=True):
"""
Returns an RFC3987 IRI for a HTML representation of the given object, action.
If domain is true, the current site's domain will be added.
"""
if not obj:
url = reverse('actstream_detail', None, (action.pk,))
... | Returns an RFC3987 IRI for a HTML representation of the given object, action.
If domain is true, the current site's domain will be added. |
def project_activity(index, start, end):
"""Compute the metrics for the project activity section of the enriched
github pull requests index.
Returns a dictionary containing a "metric" key. This key contains the
metrics for this section.
:param index: index object
:param start: start date to ge... | Compute the metrics for the project activity section of the enriched
github pull requests index.
Returns a dictionary containing a "metric" key. This key contains the
metrics for this section.
:param index: index object
:param start: start date to get the data from
:param end: end date to get ... |
def idd2grouplist(fhandle):
"""wrapper for iddtxt2grouplist"""
try:
txt = fhandle.read()
return iddtxt2grouplist(txt)
except AttributeError as e:
txt = open(fhandle, 'r').read()
return iddtxt2grouplist(txt) | wrapper for iddtxt2grouplist |
def get_jid(jid):
'''
Return the information returned when the specified job id was executed
'''
cb_ = _get_connection()
_verify_views()
ret = {}
for result in cb_.query(DESIGN_NAME, 'jid_returns', key=six.text_type(jid), include_docs=True):
ret[result.value] = result.doc.value
... | Return the information returned when the specified job id was executed |
def _is_small_molecule(pe):
"""Return True if the element is a small molecule"""
val = isinstance(pe, _bp('SmallMolecule')) or \
isinstance(pe, _bpimpl('SmallMolecule')) or \
isinstance(pe, _bp('SmallMoleculeReference')) or \
isinstance(pe, _bpimpl('SmallMoleculeReference'))
... | Return True if the element is a small molecule |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: WorkflowContext for this WorkflowInstance
:rtype: twilio.rest.taskrouter.v1.workspace.workflow.Wo... | Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: WorkflowContext for this WorkflowInstance
:rtype: twilio.rest.taskrouter.v1.workspace.workflow.WorkflowContext |
def validate_capacity(capacity):
"""Validate ScalingConfiguration capacity for serverless DBCluster"""
if capacity not in VALID_SCALING_CONFIGURATION_CAPACITIES:
raise ValueError(
"ScalingConfiguration capacity must be one of: {}".format(
", ".join(map(
s... | Validate ScalingConfiguration capacity for serverless DBCluster |
def libvlc_media_get_duration(p_md):
'''Get duration (in ms) of media descriptor object item.
@param p_md: media descriptor object.
@return: duration of media item or -1 on error.
'''
f = _Cfunctions.get('libvlc_media_get_duration', None) or \
_Cfunction('libvlc_media_get_duration', ((1,),),... | Get duration (in ms) of media descriptor object item.
@param p_md: media descriptor object.
@return: duration of media item or -1 on error. |
def _chunk_filter(self, extensions):
""" Create a filter from the extensions and ignore files """
if isinstance(extensions, six.string_types):
extensions = extensions.split()
def _filter(chunk):
""" Exclusion filter """
name = chunk['name']
if ext... | Create a filter from the extensions and ignore files |
def _set_show_support_save_status(self, v, load=False):
"""
Setter method for show_support_save_status, mapped from YANG variable /brocade_ras_ext_rpc/show_support_save_status (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_support_save_status is considered as... | Setter method for show_support_save_status, mapped from YANG variable /brocade_ras_ext_rpc/show_support_save_status (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_show_support_save_status is considered as a private
method. Backends looking to populate this variable sh... |
def interpret_expenditure_entry(entry):
"""Interpret data fields within a CO-TRACER expediture report.
Interpret the expenditure amount, expenditure date, filed date, amended,
and amendment fields of the provided entry. All dates (expenditure and
filed) are interpreted together and, if any fails, all w... | Interpret data fields within a CO-TRACER expediture report.
Interpret the expenditure amount, expenditure date, filed date, amended,
and amendment fields of the provided entry. All dates (expenditure and
filed) are interpreted together and, if any fails, all will retain their
original value. Likewise, ... |
def get_partitioned_view_result(self, partition_key, ddoc_id, view_name,
raw_result=False, **kwargs):
"""
Retrieves the partitioned view result based on the design document and
view name.
See :func:`~cloudant.database.CouchDatabase.get_view_result` me... | Retrieves the partitioned view result based on the design document and
view name.
See :func:`~cloudant.database.CouchDatabase.get_view_result` method for
further details.
:param str partition_key: Partition key.
:param str ddoc_id: Design document id used to get result.
... |
def main():
''' main program loop '''
conn = symphony.Config('/etc/es-bot/es-bot.cfg')
# connect to pod
try:
agent, pod, symphony_sid = conn.connect()
print ('connected: %s' % (symphony_sid))
except Exception as err:
print ('failed to connect!: %s' % (err))
# main loop
... | main program loop |
async def install_sandboxed_update(filename, loop):
"""
Create a virtual environment and activate it, and then install an
update candidate (leaves virtual environment activated)
:return: a result dict and the path to python in the virtual environment
"""
log.debug("Creating virtual environment"... | Create a virtual environment and activate it, and then install an
update candidate (leaves virtual environment activated)
:return: a result dict and the path to python in the virtual environment |
def procs():
'''
Return the process data
.. versionchanged:: 2016.11.4
Added support for AIX
CLI Example:
.. code-block:: bash
salt '*' status.procs
'''
# Get the user, pid and cmd
ret = {}
uind = 0
pind = 0
cind = 0
plines = __salt__['cmd.run'](__grai... | Return the process data
.. versionchanged:: 2016.11.4
Added support for AIX
CLI Example:
.. code-block:: bash
salt '*' status.procs |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.