code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
async def restart_stream(self):
"""
Restart the stream on error
"""
await self.response.release()
await asyncio.sleep(self._error_timeout)
await self.connect()
logger.info("Reconnected to the stream")
self._reconnecting = False
return {'stream... | Restart the stream on error |
def ChunksExist(self, chunk_numbers):
"""Do we have this chunk in the index?"""
index_urns = {
self.urn.Add(self.CHUNK_ID_TEMPLATE % chunk_number): chunk_number
for chunk_number in chunk_numbers
}
res = {chunk_number: False for chunk_number in chunk_numbers}
for metadata in aff4.FA... | Do we have this chunk in the index? |
def keypress(self, size, key):
"""Handle keypresses for changing tabs."""
key = super().keypress(size, key)
num_tabs = len(self._widgets)
if key == self._keys['prev_tab']:
self._tab_index = (self._tab_index - 1) % num_tabs
self._update_tabs()
elif key == s... | Handle keypresses for changing tabs. |
def serialize_json_string(self, value):
"""
Tries to load an encoded json string back into an object
:param json_string:
:return:
"""
# Check if the value might be a json string
if not isinstance(value, six.string_types):
return value
# Make ... | Tries to load an encoded json string back into an object
:param json_string:
:return: |
def end(target):
"""schedule a greenlet to be stopped immediately
:param target: the greenlet to end
:type target: greenlet
"""
if not isinstance(target, compat.greenlet):
raise TypeError("argument must be a greenlet")
if not target.dead:
schedule(target)
state.to_raise[... | schedule a greenlet to be stopped immediately
:param target: the greenlet to end
:type target: greenlet |
def create_strategy(name=None):
"""
Create a strategy, or just returns it if it's already one.
:param name:
:return: Strategy
"""
import logging
from bonobo.execution.strategies.base import Strategy
if isinstance(name, Strategy):
return name
if name is None:
name ... | Create a strategy, or just returns it if it's already one.
:param name:
:return: Strategy |
def etree(self, data, root=None):
'''Convert data structure into a list of etree.Element'''
result = self.list() if root is None else root
if isinstance(data, (self.dict, dict)):
for key, value in data.items():
if isinstance(value, (self.dict, dict)):
... | Convert data structure into a list of etree.Element |
def replace_from_url(self, url, **kwds):
"""
Endpoint: /photo/<id>replace.json
Import a photo from the specified URL to replace this photo.
"""
result = self._client.photo.replace_from_url(self, url, **kwds)
self._replace_fields(result.get_fields()) | Endpoint: /photo/<id>replace.json
Import a photo from the specified URL to replace this photo. |
def map(self, arg, na_action=None):
"""
Map values of Series according to input correspondence.
Used for substituting each value in a Series with another value,
that may be derived from a function, a ``dict`` or
a :class:`Series`.
Parameters
----------
a... | Map values of Series according to input correspondence.
Used for substituting each value in a Series with another value,
that may be derived from a function, a ``dict`` or
a :class:`Series`.
Parameters
----------
arg : function, dict, or Series
Mapping corre... |
def huji_sample(orient_file, meths='FS-FD:SO-POM:SO-SUN', location_name='unknown',
samp_con="1", ignore_dip=True, data_model_num=3,
samp_file="samples.txt", site_file="sites.txt",
dir_path=".", input_dir_path=""):
"""
Convert HUJI sample file to MagIC file(s)
... | Convert HUJI sample file to MagIC file(s)
Parameters
----------
orient_file : str
input file name
meths : str
colon-delimited sampling methods, default FS-FD:SO-POM:SO-SUN
for more options, see info below
location : str
location name, default "unknown"
samp_con : s... |
def matchBlocks(self, blocks, threshold=.5, *args, **kwargs):
"""
Partitions blocked data and generates a sequence of clusters,
where each cluster is a tuple of record ids
Keyword arguments:
blocks -- Sequence of tuples of records, where each tuple is a
set of... | Partitions blocked data and generates a sequence of clusters,
where each cluster is a tuple of record ids
Keyword arguments:
blocks -- Sequence of tuples of records, where each tuple is a
set of records covered by a blocking predicate
threshold -- Number between 0 an... |
def pre_run_cell(self, cellno, code):
"""Executes before the user-entered code in `ipython` is run. This
intercepts loops and other problematic code that would produce lots of
database entries and streamlines it to produce only a single entry.
Args:
cellno (int): the cell nu... | Executes before the user-entered code in `ipython` is run. This
intercepts loops and other problematic code that would produce lots of
database entries and streamlines it to produce only a single entry.
Args:
cellno (int): the cell number that is about to be executed.
co... |
def _from_deprecated_string(cls, serialized):
"""
Return an instance of `cls` parsed from its deprecated `serialized` form.
This will be called only if :meth:`OpaqueKey.from_string` is unable to
parse a key out of `serialized`, and only if `set_deprecated_fallback` has
been call... | Return an instance of `cls` parsed from its deprecated `serialized` form.
This will be called only if :meth:`OpaqueKey.from_string` is unable to
parse a key out of `serialized`, and only if `set_deprecated_fallback` has
been called to register a fallback class.
Args:
cls: T... |
def remodel_run(self, c=None, **global_optargs):
"""
Passes a connection from the connection pool so that we can call .run()
on a query without an explicit connection
"""
if not c:
with remodel.connection.get_conn() as conn:
return run(self, conn, **global_optargs)
else:
... | Passes a connection from the connection pool so that we can call .run()
on a query without an explicit connection |
def check_variable_names(self, ds):
"""
Ensures all variables have a standard_name set.
"""
msgs = []
count = 0
for k, v in ds.variables.items():
if 'standard_name' in v.ncattrs():
count += 1
else:
msgs.append("Vari... | Ensures all variables have a standard_name set. |
def joinRes(lstPrfRes, varPar, idxPos, inFormat='1D'):
"""Join results from different processing units (here cores).
Parameters
----------
lstPrfRes : list
Output of results from parallelization.
varPar : integer, positive
Number of cores that were used during parallelization
id... | Join results from different processing units (here cores).
Parameters
----------
lstPrfRes : list
Output of results from parallelization.
varPar : integer, positive
Number of cores that were used during parallelization
idxPos : integer, positive
List position index that we e... |
def ASHRAE_k(ID):
r'''Returns thermal conductivity of a building or insulating material
from a table in [1]_. Thermal conductivity is independent of temperature
here. Many entries in the table are listed for varying densities, but the
appropriate ID from the table must be selected to account for that.
... | r'''Returns thermal conductivity of a building or insulating material
from a table in [1]_. Thermal conductivity is independent of temperature
here. Many entries in the table are listed for varying densities, but the
appropriate ID from the table must be selected to account for that.
Parameters
---... |
def add_localedir_translations(self, localedir):
"""Merge translations from localedir."""
global _localedirs
if localedir in self.localedirs:
return
self.localedirs.append(localedir)
full_localedir = os.path.join(localedir, 'locale')
if os.path.exists(full_loc... | Merge translations from localedir. |
def with_(self, *relations):
"""
Set the relationships that should be eager loaded.
:return: The current Builder instance
:rtype: Builder
"""
if not relations:
return self
eagers = self._parse_relations(list(relations))
self._eager_load.upda... | Set the relationships that should be eager loaded.
:return: The current Builder instance
:rtype: Builder |
def bridge_exists(br):
'''
Tests whether bridge exists as a real or fake bridge.
Returns:
True if Bridge exists, else False.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' openvswitch.bridge_exists br0
'''
cmd = 'ovs-vsctl br-exists {0}'.format... | Tests whether bridge exists as a real or fake bridge.
Returns:
True if Bridge exists, else False.
.. versionadded:: 2016.3.0
CLI Example:
.. code-block:: bash
salt '*' openvswitch.bridge_exists br0 |
def add_handler(self, message_type, handler):
"""Manage callbacks for message handlers."""
if message_type not in self._handlers:
self._handlers[message_type] = []
if handler not in self._handlers[message_type]:
self._handlers[message_type].append(handler) | Manage callbacks for message handlers. |
def register_resource(mod, view, **kwargs):
"""Register the resource on the resource name or a custom url"""
resource_name = view.__name__.lower()[:-8]
endpoint = kwargs.get('endpoint', "{}_api".format(resource_name))
plural_resource_name = inflect.engine().plural(resource_name)
path = kwargs.get('u... | Register the resource on the resource name or a custom url |
def _write_bin(self, data, stream, byte_order):
'''
Write data to a binary stream.
'''
(len_t, val_t) = self.list_dtype(byte_order)
data = _np.asarray(data, dtype=val_t).ravel()
_write_array(stream, _np.array(data.size, dtype=len_t))
_write_array(stream, data) | Write data to a binary stream. |
def _to_pypi(self, docs_base, release):
"""Upload to PyPI."""
url = None
with self._zipped(docs_base) as handle:
reply = requests.post(self.params['url'], auth=get_pypi_auth(), allow_redirects=False,
files=dict(content=(self.cfg.project.name + '.zip'... | Upload to PyPI. |
def create_BIP122_uri(
chain_id: str, resource_type: str, resource_identifier: str
) -> URI:
"""
See: https://github.com/bitcoin/bips/blob/master/bip-0122.mediawiki
"""
if resource_type != BLOCK:
raise ValueError("Invalid resource_type. Must be one of 'block'")
elif not is_block_or_tran... | See: https://github.com/bitcoin/bips/blob/master/bip-0122.mediawiki |
def story_node_add_arc_element_update_characters_locations(sender, instance, created, *args, **kwargs):
'''
If an arc element is added to a story element node, add any missing elements or locations.
'''
arc_node = ArcElementNode.objects.get(pk=instance.pk)
logger.debug('Scanning arc_node %s' % arc_n... | If an arc element is added to a story element node, add any missing elements or locations. |
def httprettified(test=None, allow_net_connect=True):
"""decorator for test functions
.. tip:: Also available under the alias :py:func:`httpretty.activate`
:param test: a callable
example usage with `nosetests <https://nose.readthedocs.io/en/latest/>`_
.. testcode::
import sure
f... | decorator for test functions
.. tip:: Also available under the alias :py:func:`httpretty.activate`
:param test: a callable
example usage with `nosetests <https://nose.readthedocs.io/en/latest/>`_
.. testcode::
import sure
from httpretty import httprettified
@httprettified
... |
def parse_questions(raw_page):
"""Parse a StackExchange API raw response.
The method parses the API response retrieving the
questions from the received items
:param items: items from where to parse the questions
:returns: a generator of questions
"""
raw_questi... | Parse a StackExchange API raw response.
The method parses the API response retrieving the
questions from the received items
:param items: items from where to parse the questions
:returns: a generator of questions |
def unroll_state_saver(input_layer, name, state_shapes, template, lengths=None):
"""Unrolls the given function with state taken from the state saver.
Args:
input_layer: The input sequence.
name: The name of this layer.
state_shapes: A list of shapes, one for each state variable.
template: A templat... | Unrolls the given function with state taken from the state saver.
Args:
input_layer: The input sequence.
name: The name of this layer.
state_shapes: A list of shapes, one for each state variable.
template: A template with unbound variables for input and states that
returns a RecurrentResult.
... |
def mito(args):
"""
%prog mito chrM.fa input.bam
Identify mitochondrial deletions.
"""
p = OptionParser(mito.__doc__)
p.set_aws_opts(store="hli-mv-data-science/htang/mito-deletions")
p.add_option("--realignonly", default=False, action="store_true",
help="Realign only")
... | %prog mito chrM.fa input.bam
Identify mitochondrial deletions. |
def getMeta(self, uri):
"""Return meta information about an action. Cache the result as specified by the server"""
action = urlparse(uri).path
mediaKey = self.cacheKey + '_meta_' + action
mediaKey = mediaKey.replace(' ', '__')
meta = cache.get(mediaKey, None)
# Nothin... | Return meta information about an action. Cache the result as specified by the server |
def get_api_root_view(self, api_urls=None):
"""
Return a basic root view.
"""
api_root_dict = OrderedDict()
list_name = self.routes[0].name
for prefix, viewset, basename in self.registry:
api_root_dict[prefix] = list_name.format(basename=basename)
cla... | Return a basic root view. |
def should_skip(filename, config, path='/'):
"""Returns True if the file should be skipped based on the passed in settings."""
for skip_path in config['skip']:
if posixpath.abspath(posixpath.join(path, filename)) == posixpath.abspath(skip_path.replace('\\', '/')):
return True
position =... | Returns True if the file should be skipped based on the passed in settings. |
def p_expr_list_assign(p):
'expr : LIST LPAREN assignment_list RPAREN EQUALS expr'
p[0] = ast.ListAssignment(p[3], p[6], lineno=p.lineno(1)) | expr : LIST LPAREN assignment_list RPAREN EQUALS expr |
def get_parent_id(chebi_id):
'''Returns parent id'''
if len(__PARENT_IDS) == 0:
__parse_compounds()
return __PARENT_IDS[chebi_id] if chebi_id in __PARENT_IDS else float('NaN') | Returns parent id |
def libvlc_video_get_size(p_mi, num):
'''Get the pixel dimensions of a video.
@param p_mi: media player.
@param num: number of the video (starting from, and most commonly 0).
@return: px pixel width, py pixel height.
'''
f = _Cfunctions.get('libvlc_video_get_size', None) or \
_Cfunction(... | Get the pixel dimensions of a video.
@param p_mi: media player.
@param num: number of the video (starting from, and most commonly 0).
@return: px pixel width, py pixel height. |
def option(self, key, value=None, **kwargs):
"""Creates a new option inside a section
Args:
key (str): key of the option
value (str or None): value of the option
**kwargs: are passed to the constructor of :class:`Option`
Returns:
self for chainin... | Creates a new option inside a section
Args:
key (str): key of the option
value (str or None): value of the option
**kwargs: are passed to the constructor of :class:`Option`
Returns:
self for chaining |
def load_indexes(self):
"""Add the proper indexes to the scout instance.
All indexes are specified in scout/constants/indexes.py
If this method is utilised when new indexes are defined those should be added
"""
for collection_name in INDEXES:
existing_indexes = sel... | Add the proper indexes to the scout instance.
All indexes are specified in scout/constants/indexes.py
If this method is utilised when new indexes are defined those should be added |
def _stripe_object_to_subscription_items(cls, target_cls, data, subscription):
"""
Retrieves SubscriptionItems for a subscription.
If the subscription item doesn't exist already then it is created.
:param target_cls: The target class to instantiate per invoice item.
:type target_cls: ``SubscriptionItem``
... | Retrieves SubscriptionItems for a subscription.
If the subscription item doesn't exist already then it is created.
:param target_cls: The target class to instantiate per invoice item.
:type target_cls: ``SubscriptionItem``
:param data: The data dictionary received from the Stripe API.
:type data: dict
:pa... |
def get_input_list(self):
"""
Description:
Get input list
Returns an ordered list of all available input keys and names
"""
inputs = [' '] * len(self.command['input'])
for key in self.command['input']:
inputs[self.command['input'][key]['order... | Description:
Get input list
Returns an ordered list of all available input keys and names |
def support_in_progress_warcs():
'''
Monkey-patch pywb.warc.pathresolvers.PrefixResolver to include warcs still
being written to (warcs having ".open" suffix). This way if a cdx entry
references foo.warc.gz, pywb will try both foo.warc.gz and
foo.warc.gz.open.
'''
_orig_prefix_resolver_call ... | Monkey-patch pywb.warc.pathresolvers.PrefixResolver to include warcs still
being written to (warcs having ".open" suffix). This way if a cdx entry
references foo.warc.gz, pywb will try both foo.warc.gz and
foo.warc.gz.open. |
def get_package(self):
"""Get the URL or sandbox to release.
"""
directory = self.directory
develop = self.develop
scmtype = self.scmtype
self.scm = self.scms.get_scm(scmtype, directory)
if self.scm.is_valid_url(directory):
directory = self.urlparser... | Get the URL or sandbox to release. |
def nvmlDeviceGetTemperature(handle, sensor):
r"""
/**
* Retrieves the current temperature readings for the device, in degrees C.
*
* For all products.
*
* See \ref nvmlTemperatureSensors_t for details on available temperature sensors.
*
* @param device ... | r"""
/**
* Retrieves the current temperature readings for the device, in degrees C.
*
* For all products.
*
* See \ref nvmlTemperatureSensors_t for details on available temperature sensors.
*
* @param device The identifier of the target device
* ... |
def rangeChange(self, pw, ranges):
"""Adjusts the stimulus signal to keep it at the top of a plot,
after any ajustment to the axes ranges takes place.
This is a slot for the undocumented pyqtgraph signal sigRangeChanged.
From what I can tell the arguments are:
:param pw: refere... | Adjusts the stimulus signal to keep it at the top of a plot,
after any ajustment to the axes ranges takes place.
This is a slot for the undocumented pyqtgraph signal sigRangeChanged.
From what I can tell the arguments are:
:param pw: reference to the emitting object (plot widget in my ... |
def rollback(self):
"""Undoes the uninstall by moving stashed files back."""
for p in self._moves:
logging.info("Moving to %s\n from %s", *p)
for new_path, path in self._moves:
try:
logger.debug('Replacing %s from %s', new_path, path)
if o... | Undoes the uninstall by moving stashed files back. |
def add_user_js(self, js_list):
""" Adds supplementary user javascript files to the presentation. The
``js_list`` arg can be either a ``list`` or a string.
"""
if isinstance(js_list, string_types):
js_list = [js_list]
for js_path in js_list:
if js_path... | Adds supplementary user javascript files to the presentation. The
``js_list`` arg can be either a ``list`` or a string. |
def install(shell=None, prog_name=None, env_name=None, path=None, append=None, extra_env=None):
"""Install the completion
Parameters
----------
shell : Shell
The shell type targeted. It will be guessed with get_auto_shell() if the value is None (Default value = None)
prog_name : str
... | Install the completion
Parameters
----------
shell : Shell
The shell type targeted. It will be guessed with get_auto_shell() if the value is None (Default value = None)
prog_name : str
The program name on the command line. It will be automatically computed if the value is None
(... |
def _get_bnl(self, C_AMP, vs30):
"""
Gets the nonlinear term, given by equation 8 of Atkinson & Boore 2006
"""
# Default case 8d
bnl = np.zeros_like(vs30)
if np.all(vs30 >= self.CONSTS["Vref"]):
return bnl
# Case 8a
bnl[vs30 < self.CONSTS["v1"]... | Gets the nonlinear term, given by equation 8 of Atkinson & Boore 2006 |
def from_ros_pose_msg(pose_msg,
from_frame='unassigned',
to_frame='world'):
"""Creates a RigidTransform from a ROS pose msg.
Parameters
----------
pose_msg : :obj:`geometry_msgs.msg.Pose`
ROS pose message
"... | Creates a RigidTransform from a ROS pose msg.
Parameters
----------
pose_msg : :obj:`geometry_msgs.msg.Pose`
ROS pose message |
def parse_nem_file(nem_file) -> NEMFile:
""" Parse NEM file and return meter readings named tuple """
reader = csv.reader(nem_file, delimiter=',')
return parse_nem_rows(reader, file_name=nem_file) | Parse NEM file and return meter readings named tuple |
def _report_problem(self, problem, level=logging.ERROR):
'''Report a given problem'''
problem = self.basename + ': ' + problem
if self._logger.isEnabledFor(level):
self._problematic = True
if self._check_raises:
raise DapInvalid(problem)
self._logger.log(l... | Report a given problem |
def read_namespaced_pod_preset(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_pod_preset # noqa: E501
read the specified PodPreset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True... | read_namespaced_pod_preset # noqa: E501
read the specified PodPreset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_pod_preset(name, namespace, async_req=True)
... |
def wait_for_edge(channel, trigger, timeout=-1):
"""
This function is designed to block execution of your program until an edge
is detected.
:param channel: the channel based on the numbering system you have specified
(:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`).
:p... | This function is designed to block execution of your program until an edge
is detected.
:param channel: the channel based on the numbering system you have specified
(:py:attr:`GPIO.BOARD`, :py:attr:`GPIO.BCM` or :py:attr:`GPIO.SUNXI`).
:param trigger: The event to detect, one of: :py:attr:`GPIO.RIS... |
def list_of_list(self):
"""
This will convert the data from a list of dict to a list of list
:return: list of dict
"""
ret = [[row.get(key, '') for key in self._col_names] for row in self]
return ReprListList(ret, col_names=self._col_names,
col... | This will convert the data from a list of dict to a list of list
:return: list of dict |
def transform(row, table):
'Transform row "link" into full URL and add "state" based on "name"'
data = row._asdict()
data["link"] = urljoin("https://pt.wikipedia.org", data["link"])
data["name"], data["state"] = regexp_city_state.findall(data["name"])[0]
return data | Transform row "link" into full URL and add "state" based on "name" |
def _create_file_if_needed(self):
"""Create an empty file if necessary.
This method will not initialize the file. Instead it implements a
simple version of "touch" to ensure the file has been created.
"""
if not os.path.exists(self._filename):
old_umask = os.umask(0o... | Create an empty file if necessary.
This method will not initialize the file. Instead it implements a
simple version of "touch" to ensure the file has been created. |
def push_stream(cache, user_id, stream):
"""
Push a stream onto the stream stack in cache.
:param cache: werkzeug BasicCache-like object
:param user_id: id of user, used as key in cache
:param stream: stream object to push onto stack
:return: True on successful update,
False if fa... | Push a stream onto the stream stack in cache.
:param cache: werkzeug BasicCache-like object
:param user_id: id of user, used as key in cache
:param stream: stream object to push onto stack
:return: True on successful update,
False if failed to update,
None if invalid input wa... |
def cancel_ride(self, ride_id, cancel_confirmation_token=None):
"""Cancel an ongoing ride on behalf of a user.
Params
ride_id (str)
The unique ID of the Ride Request.
cancel_confirmation_token (str)
Optional string containing the cancellation confi... | Cancel an ongoing ride on behalf of a user.
Params
ride_id (str)
The unique ID of the Ride Request.
cancel_confirmation_token (str)
Optional string containing the cancellation confirmation token.
Returns
(Response)
A Res... |
def _initialize_from_dict(self, data):
"""
Loads serializer from a request object
"""
self._json = data
self._validate()
for name, value in self._json.items():
if name in self._properties:
if '$ref' in self._properties[name]:
... | Loads serializer from a request object |
def create_serving_logger() -> Logger:
"""Create a logger for serving.
This creates a logger named quart.serving.
"""
logger = getLogger('quart.serving')
if logger.level == NOTSET:
logger.setLevel(INFO)
logger.addHandler(serving_handler)
return logger | Create a logger for serving.
This creates a logger named quart.serving. |
def listen_for_events():
"""Pubsub event listener
Listen for events in the pubsub bus and calls the process function
when somebody comes to play.
"""
import_event_modules()
conn = redis_connection.get_connection()
pubsub = conn.pubsub()
pubsub.subscribe("eventlib")
for message in pu... | Pubsub event listener
Listen for events in the pubsub bus and calls the process function
when somebody comes to play. |
def get_content_hashes(image_path,
level=None,
regexp=None,
include_files=None,
tag_root=True,
level_filter=None,
skip_files=None,
version=None,
... | get_content_hashes is like get_image_hash, but it returns a complete dictionary
of file names (keys) and their respective hashes (values). This function is intended
for more research purposes and was used to generate the levels in the first place.
If include_sizes is True, we include a second data structur... |
def inverse_mod( a, m ):
"""Inverse of a mod m."""
if a < 0 or m <= a: a = a % m
# From Ferguson and Schneier, roughly:
c, d = a, m
uc, vc, ud, vd = 1, 0, 0, 1
while c != 0:
q, c, d = divmod( d, c ) + ( c, )
uc, vc, ud, vd = ud - q*uc, vd - q*vc, uc, vc
# At this point, d is the GCD, and ud*a+... | Inverse of a mod m. |
def fasta(self):
"""
Create FASTA files of the PointFinder results to be fed into PointFinder
"""
logging.info('Extracting FASTA sequences matching PointFinder database')
for sample in self.runmetadata.samples:
# Ensure that there are sequence data to extract from the... | Create FASTA files of the PointFinder results to be fed into PointFinder |
async def addRelation(self, endpoint1, endpoint2):
"""
:param endpoint1 string:
:param endpoint2 string:
Endpoint1 and Endpoint2 hold relation endpoints in the
"application:interface" form, where the application is always a
placeholder pointing to an applicati... | :param endpoint1 string:
:param endpoint2 string:
Endpoint1 and Endpoint2 hold relation endpoints in the
"application:interface" form, where the application is always a
placeholder pointing to an application change, and the interface is
optional. Examples are "$de... |
def transformer_wikitext103_l4k_memory_v0():
"""HParams for training languagemodel_wikitext103_l4k with memory."""
hparams = transformer_wikitext103_l4k_v0()
hparams.split_targets_chunk_length = 64
hparams.split_targets_max_chunks = 64
hparams.split_targets_strided_training = True
hparams.add_hparam("memor... | HParams for training languagemodel_wikitext103_l4k with memory. |
def parsed_stack(self):
"""The parsed_stack property.
Returns:
(list). the property value. (defaults to: [])
"""
if 'parsedStack' in self._values:
return self._values['parsedStack']
self._values['parsedStack'] = copy.deepcopy(self._defaults['parse... | The parsed_stack property.
Returns:
(list). the property value. (defaults to: []) |
def nucmer(args):
"""
%prog nucmer ref.fasta query.fasta
Run NUCMER using query against reference. Parallel implementation derived
from: <https://github.com/fritzsedlazeck/sge_mummer>
"""
from itertools import product
from jcvi.apps.grid import MakeManager
from jcvi.formats.base import... | %prog nucmer ref.fasta query.fasta
Run NUCMER using query against reference. Parallel implementation derived
from: <https://github.com/fritzsedlazeck/sge_mummer> |
def _metrics_get_endpoints(options):
""" Determine the start and end dates based on user-supplied options. """
if bool(options.start) ^ bool(options.end):
log.error('--start and --end must be specified together')
sys.exit(1)
if options.start and options.end:
start = options.start
... | Determine the start and end dates based on user-supplied options. |
def import_cert(name,
cert_format=_DEFAULT_FORMAT,
context=_DEFAULT_CONTEXT,
store=_DEFAULT_STORE,
exportable=True,
password='',
saltenv='base'):
'''
Import the certificate file into the given certificate store.
... | Import the certificate file into the given certificate store.
:param str name: The path of the certificate file to import.
:param str cert_format: The certificate format. Specify 'cer' for X.509, or
'pfx' for PKCS #12.
:param str context: The name of the certificate store location context.
:par... |
def fit(
self,
df,
duration_col=None,
event_col=None,
show_progress=False,
initial_point=None,
strata=None,
step_size=None,
weights_col=None,
cluster_col=None,
robust=False,
batch_mode=None,
):
"""
Fit th... | Fit the Cox proportional hazard model to a dataset.
Parameters
----------
df: DataFrame
a Pandas DataFrame with necessary columns `duration_col` and
`event_col` (see below), covariates columns, and special columns (weights, strata).
`duration_col` refers to
... |
def _user_yes_no_query(self, question):
""" Helper asking if the user want to download the file
Note:
Dowloading huge file can take a while
"""
sys.stdout.write('%s [y/n]\n' % question)
while True:
try:
return strtobool(raw_input().lower(... | Helper asking if the user want to download the file
Note:
Dowloading huge file can take a while |
def advise(self, name, f, *a, **kw):
"""
Add an advice that will be handled later by the handle method.
Arguments:
name
The name of the advice group
f
A callable method or function.
The rest of the arguments will be passed as arguments and
... | Add an advice that will be handled later by the handle method.
Arguments:
name
The name of the advice group
f
A callable method or function.
The rest of the arguments will be passed as arguments and
keyword arguments to f when it's invoked. |
def rpc_call(payload):
"""Simple Flask implementation for making asynchronous Rpc calls. """
# Send the request and store the requests Unique ID.
corr_id = RPC_CLIENT.send_request(payload)
# Wait until we have received a response.
while RPC_CLIENT.queue[corr_id] is None:
sleep(0.1)
# ... | Simple Flask implementation for making asynchronous Rpc calls. |
def sparse_var(X):
'''
Compute variance from
:param X:
:return:
'''
Xc = X.copy()
Xc.data **= 2
return np.array(Xc.mean(axis=0) - np.power(X.mean(axis=0), 2))[0] | Compute variance from
:param X:
:return: |
def _create_state_data(self, context, resp_args, relay_state):
"""
Returns a dict containing the state needed in the response flow.
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, d... | Returns a dict containing the state needed in the response flow.
:type context: satosa.context.Context
:type resp_args: dict[str, str | saml2.samlp.NameIDPolicy]
:type relay_state: str
:rtype: dict[str, dict[str, str] | str]
:param context: The current context
:param re... |
def Parse(self, parser_mediator):
"""Parsers the file entry and extracts event objects.
Args:
parser_mediator (ParserMediator): a parser mediator.
Raises:
UnableToParseFile: when the file cannot be parsed.
"""
file_entry = parser_mediator.GetFileEntry()
if not file_entry:
rai... | Parsers the file entry and extracts event objects.
Args:
parser_mediator (ParserMediator): a parser mediator.
Raises:
UnableToParseFile: when the file cannot be parsed. |
def get_variable_groups(self, project, group_name=None, action_filter=None, top=None, continuation_token=None, query_order=None):
"""GetVariableGroups.
[Preview API] Get variable groups.
:param str project: Project ID or project name
:param str group_name: Name of variable group.
... | GetVariableGroups.
[Preview API] Get variable groups.
:param str project: Project ID or project name
:param str group_name: Name of variable group.
:param str action_filter: Action filter for the variable group. It specifies the action which can be performed on the variable groups.
... |
def to_output(self, value):
"""Convert value to process output format."""
return {self.name: [self.inner.to_output(v)[self.name] for v in value]} | Convert value to process output format. |
def comb(delay, tau=inf):
"""
Feedback comb filter for a given time constant (and delay).
``y[n] = x[n] + alpha * y[n - delay]``
Parameters
----------
delay :
Feedback delay (lag), in number of samples.
tau :
Time decay (up to ``1/e``, or -8.686 dB), in number of samples, which
allows find... | Feedback comb filter for a given time constant (and delay).
``y[n] = x[n] + alpha * y[n - delay]``
Parameters
----------
delay :
Feedback delay (lag), in number of samples.
tau :
Time decay (up to ``1/e``, or -8.686 dB), in number of samples, which
allows finding ``alpha = e ** (-delay / tau)`... |
def update_hslice(self, blob):
"""Update the Hamiltonian slice proposal scale based
on the relative amount of time spent moving vs reflecting."""
nmove, nreflect = blob['nmove'], blob['nreflect']
ncontract = blob.get('ncontract', 0)
fmove = (1. * nmove) / (nmove + nreflect + nco... | Update the Hamiltonian slice proposal scale based
on the relative amount of time spent moving vs reflecting. |
def get_graphics(vm_, **kwargs):
'''
Returns the information on vnc for a given vm
:param vm_: name of the domain
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadde... | Returns the information on vnc for a given vm
:param vm_: name of the domain
:param connection: libvirt connection URI, overriding defaults
.. versionadded:: 2019.2.0
:param username: username to connect with, overriding defaults
.. versionadded:: 2019.2.0
:param password: password to... |
def parameterized_expectations(model, verbose=False, initial_dr=None,
pert_order=1, with_complementarities=True,
grid={}, distribution={},
maxit=100, tol=1e-8, inner_maxit=10,
direct=False):
... | Find global solution for ``model`` via parameterized expectations.
Controls must be expressed as a direct function of equilibrium objects.
Algorithm iterates over the expectations function in the arbitrage equation.
Parameters:
----------
model : NumericModel
``dtcscc`` model to be solved
... |
def in_repo(self, filepath):
"""
This excludes repository directories because they cause some exceptions
occationally.
"""
filepath = set(filepath.replace('\\', '/').split('/'))
for p in ('.git', '.hg', '.svn', '.cvs', '.bzr'):
if p in filepath:
... | This excludes repository directories because they cause some exceptions
occationally. |
def def_linear(fun):
"""Flags that a function is linear wrt all args"""
defjvp_argnum(fun, lambda argnum, g, ans, args, kwargs:
fun(*subval(args, argnum, g), **kwargs)) | Flags that a function is linear wrt all args |
def start_semester_view(request):
"""
Initiates a semester"s worth of workshift, with the option to copy
workshift types from the previous semester.
"""
page_name = "Start Semester"
year, season = utils.get_year_season()
start_date, end_date = utils.get_semester_start_end(year, season)
... | Initiates a semester"s worth of workshift, with the option to copy
workshift types from the previous semester. |
def join(*paths):
r"""
Wrapper around os.path.join that works with Windows drive letters.
>>> join('d:\\foo', '\\bar')
'd:\\bar'
"""
paths_with_drives = map(os.path.splitdrive, paths)
drives, paths = zip(*paths_with_drives)
# the drive we care about is the last one in the list
drive = next(filter(None, revers... | r"""
Wrapper around os.path.join that works with Windows drive letters.
>>> join('d:\\foo', '\\bar')
'd:\\bar' |
def get_paths_cfg(
sys_file='pythran.cfg',
platform_file='pythran-{}.cfg'.format(sys.platform),
user_file='.pythranrc'
):
"""
>>> os.environ['HOME'] = '/tmp/test'
>>> get_paths_cfg()['user']
'/tmp/test/.pythranrc'
>>> os.environ['HOME'] = '/tmp/test'
>>> os.environ['XDG_CONFIG_HOME']... | >>> os.environ['HOME'] = '/tmp/test'
>>> get_paths_cfg()['user']
'/tmp/test/.pythranrc'
>>> os.environ['HOME'] = '/tmp/test'
>>> os.environ['XDG_CONFIG_HOME'] = '/tmp/test2'
>>> get_paths_cfg()['user']
'/tmp/test2/.pythranrc'
>>> os.environ['HOME'] = '/tmp/test'
>>> os.environ['XDG_CONFI... |
def _get(self, end_point, params=None, **kwargs):
"""Send a HTTP GET request to a Todoist API end-point.
:param end_point: The Todoist API end-point.
:type end_point: str
:param params: The required request parameters.
:type params: dict
:param kwargs: Any optional param... | Send a HTTP GET request to a Todoist API end-point.
:param end_point: The Todoist API end-point.
:type end_point: str
:param params: The required request parameters.
:type params: dict
:param kwargs: Any optional parameters.
:type kwargs: dict
:return: The HTTP r... |
def transform_annotation(self, ann, duration):
'''Apply the structure agreement transformation.
Parameters
----------
ann : jams.Annotation
The segment annotation
duration : number > 0
The target duration
Returns
-------
data : d... | Apply the structure agreement transformation.
Parameters
----------
ann : jams.Annotation
The segment annotation
duration : number > 0
The target duration
Returns
-------
data : dict
data['agree'] : np.ndarray, shape=(n, n), ... |
def get_training_image_text_data_iters(source_root: str,
source: str, target: str,
validation_source_root: str,
validation_source: str, validation_target: str,
voca... | Returns data iterators for training and validation data.
:param source_root: Path to source images since the file in source contains relative paths.
:param source: Path to source training data.
:param target: Path to target training data.
:param validation_source_root: Path to validation source images ... |
def detect_worksheets(archive):
"""Return a list of worksheets"""
# content types has a list of paths but no titles
# workbook has a list of titles and relIds but no paths
# workbook_rels has a list of relIds and paths but no titles
# rels = {'id':{'title':'', 'path':''} }
content_types = read_c... | Return a list of worksheets |
def encrypt_files(selected_host, only_link, file_name):
"""
Encrypts file with gpg and random generated password
"""
if ENCRYPTION_DISABLED:
print('For encryption please install gpg')
exit()
passphrase = '%030x' % random.randrange(16**30)
source_filename = file_name
cmd = 'gp... | Encrypts file with gpg and random generated password |
def _fetch_seq_ensembl(ac, start_i=None, end_i=None):
"""Fetch the specified sequence slice from Ensembl using the public
REST interface.
An interbase interval may be optionally provided with start_i and
end_i. However, the Ensembl REST interface does not currently
accept intervals, so the entire s... | Fetch the specified sequence slice from Ensembl using the public
REST interface.
An interbase interval may be optionally provided with start_i and
end_i. However, the Ensembl REST interface does not currently
accept intervals, so the entire sequence is returned and sliced
locally.
>> len(_fetc... |
def set_locked_variable(self, key, access_key, value):
"""Set an already locked global variable
:param key: the key of the global variable to be set
:param access_key: the access key to the already locked global variable
:param value: the new value of the global variable
"""
... | Set an already locked global variable
:param key: the key of the global variable to be set
:param access_key: the access key to the already locked global variable
:param value: the new value of the global variable |
def register_task_with_maintenance_window(WindowId=None, Targets=None, TaskArn=None, ServiceRoleArn=None, TaskType=None, TaskParameters=None, Priority=None, MaxConcurrency=None, MaxErrors=None, LoggingInfo=None, ClientToken=None):
"""
Adds a new task to a Maintenance Window.
See also: AWS API Documentation
... | Adds a new task to a Maintenance Window.
See also: AWS API Documentation
:example: response = client.register_task_with_maintenance_window(
WindowId='string',
Targets=[
{
'Key': 'string',
'Values': [
'string',
... |
def init_db(sqlalchemy_url):
"""
Initialize database with gsshapy tables
"""
engine = create_engine(sqlalchemy_url)
start = time.time()
metadata.create_all(engine)
return time.time() - start | Initialize database with gsshapy tables |
def invoke(self, results):
"""
Handles invocation of the component. The default implementation invokes
it with positional arguments based on order of dependency declaration.
"""
args = [results.get(d) for d in self.deps]
return self.component(*args) | Handles invocation of the component. The default implementation invokes
it with positional arguments based on order of dependency declaration. |
def bootstrap_noise(data, func, n=10000, std=1, symmetric=True):
"""
Bootstrap by adding noise
"""
boot_dist = []
arr = N.zeros(data.shape)
for i in range(n):
if symmetric:
# Noise on all three axes
arr = N.random.randn(*data.shape)*std
else:
#... | Bootstrap by adding noise |
def _get_total_services_problems_unhandled(self):
"""Get the number of services that are a problem and that are not acknowledged
:return: number of problem services which are not acknowledged
:rtype: int
"""
return sum(1 for s in self.services if s.is_problem and not s.problem_h... | Get the number of services that are a problem and that are not acknowledged
:return: number of problem services which are not acknowledged
:rtype: int |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.