code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def visited(self):
"""Called just after this node has been visited (with or
without a build)."""
try:
binfo = self.binfo
except AttributeError:
# Apparently this node doesn't need build info, so
# don't bother calculating or storing it.
pas... | Called just after this node has been visited (with or
without a build). |
def save_csv(p, sheet):
'Save as single CSV file, handling column names as first line.'
with p.open_text(mode='w') as fp:
cw = csv.writer(fp, **csvoptions())
colnames = [col.name for col in sheet.visibleCols]
if ''.join(colnames):
cw.writerow(colnames)
for r in Progre... | Save as single CSV file, handling column names as first line. |
def main():
"""The main function of the script"""
desc = 'Benchmark the files generated by generate.py'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument(
'--src',
dest='src_dir',
default='generated',
help='The directory containing the sources to benc... | The main function of the script |
def cat(src_filename, dst_file):
"""Copies the contents of the indicated file to an already opened file."""
(dev, dev_filename) = get_dev_and_path(src_filename)
if dev is None:
with open(dev_filename, 'rb') as txtfile:
for line in txtfile:
dst_file.write(line)
else:
... | Copies the contents of the indicated file to an already opened file. |
def _create_messages(self, names, data, isDms=False):
"""
Creates object of arrays of messages from each json file specified by the names or ids
:param [str] names: names of each group of messages
:param [object] data: array of objects detailing where to get the messages from in
... | Creates object of arrays of messages from each json file specified by the names or ids
:param [str] names: names of each group of messages
:param [object] data: array of objects detailing where to get the messages from in
the directory structure
:param bool isDms: boolean value used t... |
def _settle_message(self, message_number, response):
"""Send a settle dispostition for a received message.
:param message_number: The delivery number of the message
to settle.
:type message_number: int
:response: The type of disposition to respond with, e.g. whether
th... | Send a settle dispostition for a received message.
:param message_number: The delivery number of the message
to settle.
:type message_number: int
:response: The type of disposition to respond with, e.g. whether
the message was accepted, rejected or abandoned.
:type res... |
def positions(self, word):
"""
Returns a list of positions where the word can be hyphenated.
See also Hyph_dict.positions. The points that are too far to
the left or right are removed.
"""
right = len(word) - self.right
return [i for i in self.hd.positions(word) i... | Returns a list of positions where the word can be hyphenated.
See also Hyph_dict.positions. The points that are too far to
the left or right are removed. |
def setup(self, config_file=None, aws_config=None, gpg_config=None,
decrypt_gpg=True, decrypt_kms=True):
"""Make setup easier by providing a constructor method.
Move to config_file
File can be located with a filename only, relative path, or absolute path.
If only name or r... | Make setup easier by providing a constructor method.
Move to config_file
File can be located with a filename only, relative path, or absolute path.
If only name or relative path is provided, look in this order:
1. current directory
2. `~/.config/<file_name>`
3. `/etc/<f... |
def delete(gandi, resource, background, force):
"""Delete one or more IPs (after detaching them from VMs if necessary).
resource can be an ip id or ip.
"""
resource = sorted(tuple(set(resource)))
possible_resources = gandi.ip.resource_list()
# check that each IP can be deleted
for item in ... | Delete one or more IPs (after detaching them from VMs if necessary).
resource can be an ip id or ip. |
def dataframe(self, force_refresh=False):
"""A pandas dataframe with lots of interesting results about this object.
Created by calling SageMaker List and Describe APIs and converting them into
a convenient tabular summary.
Args:
force_refresh (bool): Set to True to fetch the... | A pandas dataframe with lots of interesting results about this object.
Created by calling SageMaker List and Describe APIs and converting them into
a convenient tabular summary.
Args:
force_refresh (bool): Set to True to fetch the latest data from SageMaker API. |
def require(method):
"""
Decorator for managing chained dependencies of different class
properties. The @require decorator allows developers to specify
that a function call must be operated on before another property
or function call is accessed, so that data and processing for an
entire class c... | Decorator for managing chained dependencies of different class
properties. The @require decorator allows developers to specify
that a function call must be operated on before another property
or function call is accessed, so that data and processing for an
entire class can be evaluated in a lazy way (i.... |
def get_python_logger():
"""Returns logger to receive Python messages (as opposed to Fortran).
At first call, _python_logger is created. At subsequent calls, _python_logger is returned.
Therefore, if you want to change `a99.flag_log_file` or `a99.flag_log_console`, do so
before calling get_pytho... | Returns logger to receive Python messages (as opposed to Fortran).
At first call, _python_logger is created. At subsequent calls, _python_logger is returned.
Therefore, if you want to change `a99.flag_log_file` or `a99.flag_log_console`, do so
before calling get_python_logger(), otherwise these chang... |
def generate_main_h(directory, xml):
'''generate main header per XML file'''
f = open(os.path.join(directory, xml.basename + ".h"), mode='w')
t.write(f, '''
/** @file
* @brief MAVLink comm protocol generated from ${basename}.xml
* @see http://mavlink.org
*/
#pragma once
#ifndef MAVLINK_${basename_upper}_... | generate main header per XML file |
def module_path(name, path):
# type: (AModuleName, AModulePath) -> ADefine
"""Load an external malcolm module (e.g. ADCore/etc/malcolm)"""
define = Define(name, path)
assert os.path.isdir(path), "%r doesn't exist" % path
name = "malcolm.modules.%s" % name
import_package_from_path(name, path)
... | Load an external malcolm module (e.g. ADCore/etc/malcolm) |
def _add_vertex_attributes(self, genes: List[Gene],
disease_associations: Optional[dict] = None) -> None:
"""Add attributes to vertices.
:param genes: A list of genes containing attribute information.
"""
self._set_default_vertex_attributes()
self.... | Add attributes to vertices.
:param genes: A list of genes containing attribute information. |
def _blocks_to_samples(sig_data, n_samp, fmt):
"""
Convert uint8 blocks into signal samples for unaligned dat formats.
Parameters
----------
sig_data : numpy array
The uint8 data blocks.
n_samp : int
The number of samples contained in the bytes
Returns
-------
signa... | Convert uint8 blocks into signal samples for unaligned dat formats.
Parameters
----------
sig_data : numpy array
The uint8 data blocks.
n_samp : int
The number of samples contained in the bytes
Returns
-------
signal : numpy array
The numpy array of digital samples |
def get_xml_parser(encoding=None):
"""Returns an ``etree.ETCompatXMLParser`` instance."""
parser = etree.ETCompatXMLParser(
huge_tree=True,
remove_comments=True,
strip_cdata=False,
remove_blank_text=True,
resolve_entities=False,
encoding=encoding
)
return... | Returns an ``etree.ETCompatXMLParser`` instance. |
def _set_sample_rate_cpu(self, v, load=False):
"""
Setter method for sample_rate_cpu, mapped from YANG variable /resource_monitor/cpu/sample_rate_cpu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_sample_rate_cpu is considered as a private
method. Backends ... | Setter method for sample_rate_cpu, mapped from YANG variable /resource_monitor/cpu/sample_rate_cpu (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_sample_rate_cpu is considered as a private
method. Backends looking to populate this variable should
do so via call... |
def delete_feature_base(dbpath, set_object, name):
"""
Generic function which deletes a feature from a database
Parameters
----------
dbpath : string, path to SQLite database file
set_object : object (either TestSet or TrainSet) which is stored in the database
name : string, name of the fea... | Generic function which deletes a feature from a database
Parameters
----------
dbpath : string, path to SQLite database file
set_object : object (either TestSet or TrainSet) which is stored in the database
name : string, name of the feature to be deleted
Returns
-------
None |
def execute_process_synchronously_or_raise(self, execute_process_request, name, labels=None):
"""Execute process synchronously, and throw if the return code is not 0.
See execute_process_synchronously for the api docs.
"""
fallible_result = self.execute_process_synchronously_without_raising(execute_pro... | Execute process synchronously, and throw if the return code is not 0.
See execute_process_synchronously for the api docs. |
def bz2_pack(source):
"""
Returns 'source' as a bzip2-compressed, self-extracting python script.
.. note::
This method uses up more space than the zip_pack method but it has the
advantage in that the resulting .py file can still be imported into a
python program.
"""
import... | Returns 'source' as a bzip2-compressed, self-extracting python script.
.. note::
This method uses up more space than the zip_pack method but it has the
advantage in that the resulting .py file can still be imported into a
python program. |
def _set_overlay_gateway(self, v, load=False):
"""
Setter method for overlay_gateway, mapped from YANG variable /overlay_gateway (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_overlay_gateway is considered as a private
method. Backends looking to populate thi... | Setter method for overlay_gateway, mapped from YANG variable /overlay_gateway (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_overlay_gateway is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_overla... |
def merge_odd_even_csu_configurations(conf_odd, conf_even):
"""Merge CSU configuration using odd- and even-numbered values.
The CSU returned CSU configuration include the odd-numbered values
from 'conf_odd' and the even-numbered values from 'conf_even'.
Parameters
----------
conf_odd : CsuConf... | Merge CSU configuration using odd- and even-numbered values.
The CSU returned CSU configuration include the odd-numbered values
from 'conf_odd' and the even-numbered values from 'conf_even'.
Parameters
----------
conf_odd : CsuConfiguration instance
CSU configuration corresponding to odd-n... |
def get_station_year_text(WMO, WBAN, year):
'''Basic method to download data from the GSOD database, given a
station identifier and year.
Parameters
----------
WMO : int or None
World Meteorological Organization (WMO) identifiers, [-]
WBAN : int or None
Weather Bureau Army Na... | Basic method to download data from the GSOD database, given a
station identifier and year.
Parameters
----------
WMO : int or None
World Meteorological Organization (WMO) identifiers, [-]
WBAN : int or None
Weather Bureau Army Navy (WBAN) weather station identifier, [-]
year ... |
def default(self, o):
"""Default encoder.
:param o: Atom or Bond instance.
:type o: :class:`~ctfile.ctfile.Atom` or :class:`~ctfile.ctfile.Bond`.
:return: Dictionary that contains information required for atom and bond block of ``Ctab``.
:rtype: :py:class:`collections.OrderedDic... | Default encoder.
:param o: Atom or Bond instance.
:type o: :class:`~ctfile.ctfile.Atom` or :class:`~ctfile.ctfile.Bond`.
:return: Dictionary that contains information required for atom and bond block of ``Ctab``.
:rtype: :py:class:`collections.OrderedDict` |
def timeout(delay, handler=None):
"""
Context manager to run code and deliver a SIGALRM signal after `delay` seconds.
Note that `delay` must be a whole number; otherwise it is converted to an
integer by Python's `int()` built-in function. For floating-point numbers,
that means rounding off to the n... | Context manager to run code and deliver a SIGALRM signal after `delay` seconds.
Note that `delay` must be a whole number; otherwise it is converted to an
integer by Python's `int()` built-in function. For floating-point numbers,
that means rounding off to the nearest integer from below.
If the optiona... |
def bounds(self, pixelbuffer=0):
"""
Return Tile boundaries.
- pixelbuffer: tile buffer in pixels
"""
left = self._left
bottom = self._bottom
right = self._right
top = self._top
if pixelbuffer:
offset = self.pixel_x_size * float(pixelb... | Return Tile boundaries.
- pixelbuffer: tile buffer in pixels |
def fit(self, X):
"""
Parameters
----------
X: shape = [n_samples, n_features]
"""
D = self._initialize(X)
for i in range(self.max_iter):
gamma = self._transform(D, X)
e = np.linalg.norm(X - gamma.dot(D))
if e < self.tol:
... | Parameters
----------
X: shape = [n_samples, n_features] |
def completed_work_items(self):
"Iterable of `(work-item, result)`s for all completed items."
completed = self._conn.execute(
"SELECT * FROM work_items, results WHERE work_items.job_id == results.job_id"
)
return ((_row_to_work_item(result), _row_to_work_result(result))
... | Iterable of `(work-item, result)`s for all completed items. |
def get_requirements(*args):
"""Get requirements from pip requirement files."""
requirements = set()
contents = get_contents(*args)
for line in contents.splitlines():
# Strip comments.
line = re.sub(r'^#.*|\s#.*', '', line)
# Ignore empty lines
if line and not line.isspac... | Get requirements from pip requirement files. |
def grad_local_log_likelihood(self, x):
"""
d/d \psi y \psi - log (1 + exp(\psi))
= y - exp(\psi) / (1 + exp(\psi))
= y - sigma(psi)
= y - p
d \psi / dx = C
d / dx = (y - sigma(psi)) * C
"""
C, D, u, y = self.C, self.D, self.inputs, ... | d/d \psi y \psi - log (1 + exp(\psi))
= y - exp(\psi) / (1 + exp(\psi))
= y - sigma(psi)
= y - p
d \psi / dx = C
d / dx = (y - sigma(psi)) * C |
def add_conversion_steps(self, converters: List[Converter], inplace: bool = False):
"""
Utility method to add converters to this chain. If inplace is True, this object is modified and
None is returned. Otherwise, a copy is returned
:param converters: the list of converters to add
... | Utility method to add converters to this chain. If inplace is True, this object is modified and
None is returned. Otherwise, a copy is returned
:param converters: the list of converters to add
:param inplace: boolean indicating whether to modify this object (True) or return a copy (False)
... |
def linear(X, n, *args, **kwargs):
"""Linear mean function of arbitrary dimension, suitable for use with :py:class:`MeanFunction`.
The form is :math:`m_0 * X[:, 0] + m_1 * X[:, 1] + \dots + b`.
Parameters
----------
X : array, (`M`, `D`)
The points to evaluate the model at.
n :... | Linear mean function of arbitrary dimension, suitable for use with :py:class:`MeanFunction`.
The form is :math:`m_0 * X[:, 0] + m_1 * X[:, 1] + \dots + b`.
Parameters
----------
X : array, (`M`, `D`)
The points to evaluate the model at.
n : array of non-negative int, (`D`)
... |
def main():
"""Execute all checks."""
check_python_version()
check_python_modules()
check_executables()
home = os.path.expanduser("~")
print("\033[1mCheck files\033[0m")
rcfile = os.path.join(home, ".hwrtrc")
if os.path.isfile(rcfile):
print("~/.hwrtrc... %sFOUND%s" %
... | Execute all checks. |
def available_actions(self, obs):
"""Return the list of available action ids."""
available_actions = set()
hide_specific_actions = self._agent_interface_format.hide_specific_actions
for i, func in six.iteritems(actions.FUNCTIONS_AVAILABLE):
if func.avail_fn(obs):
available_actions.add(i)
... | Return the list of available action ids. |
def extract_edges(self, feature_angle=30, boundary_edges=True,
non_manifold_edges=True, feature_edges=True,
manifold_edges=True, inplace=False):
"""
Extracts edges from the surface of the grid. From vtk documentation:
These edges are either
... | Extracts edges from the surface of the grid. From vtk documentation:
These edges are either
1) boundary (used by one polygon) or a line cell;
2) non-manifold (used by three or more polygons)
3) feature edges (edges used by two triangles and whose
dihedral ang... |
def mk_token(self, load):
'''
Run time_auth and create a token. Return False or the token
'''
if not self.authenticate_eauth(load):
return {}
if self._allow_custom_expire(load):
token_expire = load.pop('token_expire', self.opts['token_expire'])
el... | Run time_auth and create a token. Return False or the token |
def flatten(self):
"""
Flattens any np.array of column vectors into 1D arrays. Basically,
this makes data readable for humans if you are just inspecting via
the REPL. For example, if you have saved a KalmanFilter object with 89
epochs, self.x will be shape (89, 9, 1) (for example... | Flattens any np.array of column vectors into 1D arrays. Basically,
this makes data readable for humans if you are just inspecting via
the REPL. For example, if you have saved a KalmanFilter object with 89
epochs, self.x will be shape (89, 9, 1) (for example). After flatten
is run, self.x... |
def shifted(self, rows, cols):
"""Returns a new selection that is shifted by rows and cols.
Negative values for rows and cols may result in a selection
that addresses negative cells.
Parameters
----------
rows: Integer
\tNumber of rows that the new selection is ... | Returns a new selection that is shifted by rows and cols.
Negative values for rows and cols may result in a selection
that addresses negative cells.
Parameters
----------
rows: Integer
\tNumber of rows that the new selection is shifted down
cols: Integer
... |
def update_role(self, service_name, deployment_name, role_name,
os_virtual_hard_disk=None, network_config=None,
availability_set_name=None, data_virtual_hard_disks=None,
role_size=None, role_type='PersistentVMRole',
resource_extension_refer... | Updates the specified virtual machine.
service_name:
The name of the service.
deployment_name:
The name of the deployment.
role_name:
The name of the role.
os_virtual_hard_disk:
Contains the parameters Windows Azure uses to create the oper... |
def processData(config, stats):
"""
Collate the stats and report
"""
if 'total_time' not in stats or 'total_clock' not in stats:
# toil job not finished yet
stats.total_time = [0.0]
stats.total_clock = [0.0]
stats.total_time = sum([float(number) for number in stats.total_tim... | Collate the stats and report |
def rhymes(word):
"""Get words rhyming with a given word.
This function may return an empty list if no rhyming words are found in
the dictionary, or if the word you pass to the function is itself not
found in the dictionary.
.. doctest::
>>> import pronouncing
>>> pronouncing.rhym... | Get words rhyming with a given word.
This function may return an empty list if no rhyming words are found in
the dictionary, or if the word you pass to the function is itself not
found in the dictionary.
.. doctest::
>>> import pronouncing
>>> pronouncing.rhymes("conditioner")
... |
def fromtimestamp(cls, ts, tzi=None):
# pylint: disable=invalid-name
"""
Factory method that returns a new :class:`~pywbem.CIMDateTime` object
from a POSIX timestamp value and optional timezone information.
A POSIX timestamp value is the number of seconds since "the epoch",
... | Factory method that returns a new :class:`~pywbem.CIMDateTime` object
from a POSIX timestamp value and optional timezone information.
A POSIX timestamp value is the number of seconds since "the epoch",
i.e. 1970-01-01 00:00:00 UTC. Thus, a POSIX timestamp value is
unambiguous w.r.t. the... |
def update(self, track=values.unset, publisher=values.unset, kind=values.unset,
status=values.unset):
"""
Update the SubscribedTrackInstance
:param unicode track: The track
:param unicode publisher: The publisher
:param SubscribedTrackInstance.Kind kind: The kind
... | Update the SubscribedTrackInstance
:param unicode track: The track
:param unicode publisher: The publisher
:param SubscribedTrackInstance.Kind kind: The kind
:param SubscribedTrackInstance.Status status: The status
:returns: Updated SubscribedTrackInstance
:rtype: twili... |
def to_array(self):
"""
Serializes this StickerMessage to a dictionary.
:return: dictionary representation of this object.
:rtype: dict
"""
array = super(StickerMessage, self).to_array()
if isinstance(self.sticker, InputFile):
array['sticker'] = self.... | Serializes this StickerMessage to a dictionary.
:return: dictionary representation of this object.
:rtype: dict |
def installedRequirements(self, target):
"""
Return an iterable of things installed on the target that this
item requires.
"""
myDepends = dependentsOf(self.__class__)
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target == target):
... | Return an iterable of things installed on the target that this
item requires. |
def minimum_needs_extractor(impact_report, component_metadata):
"""Extracting minimum needs of the impact layer.
:param impact_report: the impact report that acts as a proxy to fetch
all the data that extractor needed
:type impact_report: safe.report.impact_report.ImpactReport
:param component... | Extracting minimum needs of the impact layer.
:param impact_report: the impact report that acts as a proxy to fetch
all the data that extractor needed
:type impact_report: safe.report.impact_report.ImpactReport
:param component_metadata: the component metadata. Used to obtain
information a... |
def _readLoop(self):
""" Read thread main loop
Reads lines from the connected device
"""
try:
readTermSeq = list(self.RX_EOL_SEQ)
readTermLen = len(readTermSeq)
rxBuffer = []
while self.alive:
data = self.serial.rea... | Read thread main loop
Reads lines from the connected device |
def _standardize_data(
model: pd.DataFrame,
data: pd.DataFrame,
batch_key: str,
) -> Tuple[pd.DataFrame, pd.DataFrame, np.ndarray, np.ndarray]:
"""
Standardizes the data per gene.
The aim here is to make mean and variance be comparable across batches.
Parameters
--------
model
... | Standardizes the data per gene.
The aim here is to make mean and variance be comparable across batches.
Parameters
--------
model
Contains the batch annotation
data
Contains the Data
batch_key
Name of the batch column in the model matrix
Returns
--------
s_... |
def plot_fit(self, **kwargs):
""" Plots the fit of the model
Notes
----------
Intervals are bootstrapped as follows: take the filtered values from the
algorithm (thetas). Use these thetas to generate a pseudo data stream from
the measurement density. Use the GAS algorith... | Plots the fit of the model
Notes
----------
Intervals are bootstrapped as follows: take the filtered values from the
algorithm (thetas). Use these thetas to generate a pseudo data stream from
the measurement density. Use the GAS algorithm and estimated latent variables to
... |
def partial_to_complete_sha_hex(self, partial_hexsha):
""":return: Full binary 20 byte sha from the given partial hexsha
:raise AmbiguousObjectName:
:raise BadObject:
:note: currently we only raise BadObject as git does not communicate
AmbiguousObjects separately"""
t... | :return: Full binary 20 byte sha from the given partial hexsha
:raise AmbiguousObjectName:
:raise BadObject:
:note: currently we only raise BadObject as git does not communicate
AmbiguousObjects separately |
def clean(self, point_merging=True, merge_tol=None, lines_to_points=True,
polys_to_lines=True, strips_to_polys=True, inplace=False):
"""
Cleans mesh by merging duplicate points, remove unused
points, and/or remove degenerate cells.
Parameters
----------
poi... | Cleans mesh by merging duplicate points, remove unused
points, and/or remove degenerate cells.
Parameters
----------
point_merging : bool, optional
Enables point merging. On by default.
merge_tol : float, optional
Set merging tolarance. When enabled me... |
def QA_util_sql_async_mongo_setting(uri='mongodb://localhost:27017/quantaxis'):
"""异步mongo示例
Keyword Arguments:
uri {str} -- [description] (default: {'mongodb://localhost:27017/quantaxis'})
Returns:
[type] -- [description]
"""
# loop = asyncio.new_event_loop()
# asyncio.set_eve... | 异步mongo示例
Keyword Arguments:
uri {str} -- [description] (default: {'mongodb://localhost:27017/quantaxis'})
Returns:
[type] -- [description] |
def sg_init(sess):
r""" Initializes session variables.
Args:
sess: Session to initialize.
"""
# initialize variables
sess.run(tf.group(tf.global_variables_initializer(),
tf.local_variables_initializer())) | r""" Initializes session variables.
Args:
sess: Session to initialize. |
def import_ohm(filename, verbose=False, reciprocals=False):
"""Construct pandas data frame from BERT`s unified data format (.ohm).
Parameters
----------
filename : string
File path to .ohm file
verbose : bool, optional
Enables extended debug output
reciprocals : int, optional
... | Construct pandas data frame from BERT`s unified data format (.ohm).
Parameters
----------
filename : string
File path to .ohm file
verbose : bool, optional
Enables extended debug output
reciprocals : int, optional
if provided, then assume that this is a reciprocal measuremen... |
def get_single_review_comments(self, id):
"""
:calls: `GET /repos/:owner/:repo/pulls/:number/review/:id/comments <https://developer.github.com/v3/pulls/reviews/>`_
:param id: integer
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.PullRequestComment.PullRequestComme... | :calls: `GET /repos/:owner/:repo/pulls/:number/review/:id/comments <https://developer.github.com/v3/pulls/reviews/>`_
:param id: integer
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.PullRequestComment.PullRequestComment` |
def kill(self):
"""
If run_step needs to be killed, this method will be called
:return: None
"""
try:
logger.info('Trying to terminating run_step...')
self.process.terminate()
time_waited_seconds = 0
while self.process.poll() is None and time_waited_seconds < CONSTANTS.SECOND... | If run_step needs to be killed, this method will be called
:return: None |
def lookup_by_number(errno):
""" Used for development only """
for key, val in globals().items():
if errno == val:
print(key) | Used for development only |
def sum_from(zero: T1 = None) -> Callable[[ActualIterable[T1]], T1]:
"""
>>> from Redy.Collections import Traversal, Flow
>>> lst: Iterable[int] = [1, 2, 3]
>>> x = Flow(lst)[Traversal.sum_from(0)].unbox
>>> assert x is 6
>>> x = Flow(lst)[Traversal.sum_from()].unbox
>>> assert x is 6
"... | >>> from Redy.Collections import Traversal, Flow
>>> lst: Iterable[int] = [1, 2, 3]
>>> x = Flow(lst)[Traversal.sum_from(0)].unbox
>>> assert x is 6
>>> x = Flow(lst)[Traversal.sum_from()].unbox
>>> assert x is 6 |
def parse_identifier(source, start, throw=True):
"""passes white space from start and returns first identifier,
if identifier invalid and throw raises SyntaxError otherwise returns None"""
start = pass_white(source, start)
end = start
if not end < len(source):
if throw:
raise ... | passes white space from start and returns first identifier,
if identifier invalid and throw raises SyntaxError otherwise returns None |
def copy_file_to_remote(self, local_path, remote_path):
"""scp the local file to remote folder.
:param local_path: local path
:param remote_path: remote path
"""
sftp_client = self.transport.open_sftp_client()
LOG.debug('Copy the local file to remote. '
... | scp the local file to remote folder.
:param local_path: local path
:param remote_path: remote path |
def add_handler(self, name='console-color', level='info', formatter='standard', **kwargs):
"""
Add another handler to the logging system if not present already.
Available handlers are currently: ['console-bw', 'console-color', 'rotating-log']
"""
# make sure the the log file has ... | Add another handler to the logging system if not present already.
Available handlers are currently: ['console-bw', 'console-color', 'rotating-log'] |
def splitext(self):
""" p.splitext() -> Return ``(p.stripext(), p.ext)``.
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from ``'.'`` to the end of the
last path segment. This has the property that i... | p.splitext() -> Return ``(p.stripext(), p.ext)``.
Split the filename extension from this path and return
the two parts. Either part may be empty.
The extension is everything from ``'.'`` to the end of the
last path segment. This has the property that if
``(a, b) == p.splitext... |
def _compileRegExp(string, insensitive, minimal):
"""Compile regular expression.
Python function, used by C code
NOTE minimal flag is not supported here, but supported on PCRE
"""
flags = 0
if insensitive:
flags = re.IGNORECASE
string = string.replac... | Compile regular expression.
Python function, used by C code
NOTE minimal flag is not supported here, but supported on PCRE |
def convert2geojson(jsonfile, src_srs, dst_srs, src_file):
"""convert shapefile to geojson file"""
if os.path.exists(jsonfile):
os.remove(jsonfile)
if sysstr == 'Windows':
exepath = '"%s/Lib/site-packages/osgeo/ogr2ogr"' % sys.exec_prefix
else:
exepath... | convert shapefile to geojson file |
def get_token(self):
"""
Retrieves the token from the File System
:return dict or None: The token if exists, None otherwise
"""
token = None
if self.token_path.exists():
with self.token_path.open('r') as token_file:
token = self.token_construct... | Retrieves the token from the File System
:return dict or None: The token if exists, None otherwise |
def search():
"""Show all keywords that match a pattern"""
pattern = flask.request.args.get('pattern', "*").strip().lower()
# if the pattern contains "in:<collection>" (eg: in:builtin),
# filter results to only that (or those) collections
# This was kind-of hacked together, but seems to work well e... | Show all keywords that match a pattern |
def compute_stats2(arrayNR, stats, weights):
"""
:param arrayNR:
an array of (N, R) elements
:param stats:
a sequence of S statistic functions
:param weights:
a list of R weights
:returns:
an array of (N, S) elements
"""
newshape = list(arrayNR.shape)
if n... | :param arrayNR:
an array of (N, R) elements
:param stats:
a sequence of S statistic functions
:param weights:
a list of R weights
:returns:
an array of (N, S) elements |
def generate_login(self, min_length=6, max_length=10, digits=True):
"""
Generate string for email address login with defined length and
alphabet.
:param min_length: (optional) min login length.
Default value is ``6``.
:param max_length: (optional) max login length.
... | Generate string for email address login with defined length and
alphabet.
:param min_length: (optional) min login length.
Default value is ``6``.
:param max_length: (optional) max login length.
Default value is ``10``.
:param digits: (optional) use digits in login genera... |
def delete_local_docker_cache(docker_tag):
"""
Delete the local docker cache for the entire docker image chain
:param docker_tag: Docker tag
:return: None
"""
history_cmd = ['docker', 'history', '-q', docker_tag]
try:
image_ids_b = subprocess.check_output(history_cmd)
image_... | Delete the local docker cache for the entire docker image chain
:param docker_tag: Docker tag
:return: None |
def move(self, target):
""" Moves this DriveItem to another Folder.
Can't move between different Drives.
:param target: a Folder, Drive item or Item Id string.
If it's a drive the item will be moved to the root folder.
:type target: drive.Folder or DriveItem or str
:ret... | Moves this DriveItem to another Folder.
Can't move between different Drives.
:param target: a Folder, Drive item or Item Id string.
If it's a drive the item will be moved to the root folder.
:type target: drive.Folder or DriveItem or str
:return: Success / Failure
:rtyp... |
def _ring_2d(m, n):
"""Ring-order of a mxn mesh.
Args:
m: an integer
n: an integer
Returns:
a list of mxn pairs
"""
if m == 1:
return [(0, i) for i in range(n)]
if n == 1:
return [(i, 0) for i in range(m)]
if m % 2 != 0:
tf.logging.warning("Odd dimension")
return [(i % m, i //... | Ring-order of a mxn mesh.
Args:
m: an integer
n: an integer
Returns:
a list of mxn pairs |
def vol_tetra(vt1, vt2, vt3, vt4):
"""
Calculate the volume of a tetrahedron, given the four vertices of vt1,
vt2, vt3 and vt4.
Args:
vt1 (array-like): coordinates of vertex 1.
vt2 (array-like): coordinates of vertex 2.
vt3 (array-like): coordinates of vertex 3.
vt4 (arra... | Calculate the volume of a tetrahedron, given the four vertices of vt1,
vt2, vt3 and vt4.
Args:
vt1 (array-like): coordinates of vertex 1.
vt2 (array-like): coordinates of vertex 2.
vt3 (array-like): coordinates of vertex 3.
vt4 (array-like): coordinates of vertex 4.
Returns:
... |
def get_bgp_config(self, group="", neighbor=""):
"""
Parse BGP config params into a dict
:param group='':
:param neighbor='':
"""
bgp_config = {}
def build_prefix_limit(af_table, limit, prefix_percent, prefix_timeout):
prefix_limit = {}
... | Parse BGP config params into a dict
:param group='':
:param neighbor='': |
def get_core(self):
"""
Get an unsatisfiable core if the formula was previously
unsatisfied.
"""
if self.maplesat and self.status == False:
return pysolvers.maplesat_core(self.maplesat) | Get an unsatisfiable core if the formula was previously
unsatisfied. |
def _add_element(self, element, parent_node):
"""
add an element (i.e. a unit/connective/discourse or modifier)
to the docgraph.
"""
if element.tag == 'unit':
element_node_id = element.attrib['id']+':'+element.attrib['type']
node_layers = {self.ns, self.ns... | add an element (i.e. a unit/connective/discourse or modifier)
to the docgraph. |
def loadRecords(self, records):
"""
Loads the inputed records as children to this item.
:param records | [<orb.Table>, ..] || {<str> sub: <variant>, .. }
"""
self.setChildIndicatorPolicy(self.DontShowIndicatorWhenChildless)
self._loaded = True
... | Loads the inputed records as children to this item.
:param records | [<orb.Table>, ..] || {<str> sub: <variant>, .. } |
def get_listing(path):
"""
Returns the list of files and directories in a path.
Prepents a ".." (parent directory link) if path is not current dir.
"""
if path != ".":
listing = sorted(['..'] + os.listdir(path))
else:
listing = sorted(os.listdir(path))
return listing | Returns the list of files and directories in a path.
Prepents a ".." (parent directory link) if path is not current dir. |
def mins(self):
""" Returns de minimum values of x, y, z as a numpy array
"""
return np.array([self.x_min, self.y_min, self.z_min]) | Returns de minimum values of x, y, z as a numpy array |
def find_vulnerabilities(
cfg_list,
blackbox_mapping_file,
sources_and_sinks_file,
interactive=False,
nosec_lines=defaultdict(set)
):
"""Find vulnerabilities in a list of CFGs from a trigger_word_file.
Args:
cfg_list(list[CFG]): the list of CFGs to scan.
blackbox_mapping_fil... | Find vulnerabilities in a list of CFGs from a trigger_word_file.
Args:
cfg_list(list[CFG]): the list of CFGs to scan.
blackbox_mapping_file(str)
sources_and_sinks_file(str)
interactive(bool): determines if we ask the user about blackbox functions not in the mapping file.
Returns... |
def raw(self, from_, to, body):
"""
Send a raw MIME message.
"""
if isinstance(to, string_types):
raise TypeError('"to" parameter must be enumerable')
return self._session.post('{}/raw'.format(self._url), json={
'from': from_,
'to': to,
... | Send a raw MIME message. |
def absent(name, user=None, signal=None):
'''
Ensures that the named command is not running.
name
The pattern to match.
user
The user to which the process belongs
signal
Signal to send to the process(es).
'''
ret = {'name': name,
'changes': {},
... | Ensures that the named command is not running.
name
The pattern to match.
user
The user to which the process belongs
signal
Signal to send to the process(es). |
def multi_plot_time(DataArray, SubSampleN=1, units='s', xlim=None, ylim=None, LabelArray=[], show_fig=True):
"""
plot the time trace for multiple data sets on the same axes.
Parameters
----------
DataArray : array-like
array of DataObject instances for which to plot the PSDs
SubSampleN ... | plot the time trace for multiple data sets on the same axes.
Parameters
----------
DataArray : array-like
array of DataObject instances for which to plot the PSDs
SubSampleN : int, optional
Number of intervals between points to remove (to sub-sample data so
that you effectively ... |
def set_membership(self, membership):
""" Set membership. """
_c_leiden._MutableVertexPartition_set_membership(self._partition, list(membership))
self._update_internal_membership() | Set membership. |
def infer_shape(self, *args, **kwargs):
"""Infers the shapes of all arguments and all outputs given the known shapes of
some arguments.
This function takes the known shapes of some arguments in either positional way
or keyword argument way as input. It returns a tuple of `None` values
... | Infers the shapes of all arguments and all outputs given the known shapes of
some arguments.
This function takes the known shapes of some arguments in either positional way
or keyword argument way as input. It returns a tuple of `None` values
if there is not enough information to deduce... |
def get_below_threshold(umi_quals, quality_encoding, quality_filter_threshold):
'''test whether the umi_quals are below the threshold'''
umi_quals = [x - RANGES[quality_encoding][0] for x in map(ord, umi_quals)]
below_threshold = [x < quality_filter_threshold for x in umi_quals]
return below_threshold | test whether the umi_quals are below the threshold |
def apply_patch(self, patch):
"""
Applies given patch.
:param patch: Patch.
:type patch: Patch
:return: Method success.
:rtype: bool
"""
history_file = File(self.__history_file)
patches_history = history_file.cache() and [line.strip() for line in... | Applies given patch.
:param patch: Patch.
:type patch: Patch
:return: Method success.
:rtype: bool |
def log_state(self, state):
""" Gathers the stats from self.trainer.stats and passes them into
self.log, as a list """
results = []
for field_idx, field in enumerate(self.fields):
parent, stat = None, state
for f in field:
parent, stat = stat, ... | Gathers the stats from self.trainer.stats and passes them into
self.log, as a list |
def sort_trigger_set(triggers, exclude_previous=True, say=None):
"""Sort a group of triggers in optimal sorting order.
The optimal sorting order is, briefly:
* Atomic triggers (containing nothing but plain words and alternation
groups) are on top, with triggers containing the most words coming
... | Sort a group of triggers in optimal sorting order.
The optimal sorting order is, briefly:
* Atomic triggers (containing nothing but plain words and alternation
groups) are on top, with triggers containing the most words coming
first. Triggers with equal word counts are sorted by length, and then
... |
def cycle_app(parser, cmd, args): # pragma: no cover
"""
Generate a de Bruijn sequence of a given length.
"""
parser.add_argument('-w', '--width', type=int, default=4, help='the length of the cycled value')
parser.add_argument('length', type=int, help='the cycle length to generate')
args = par... | Generate a de Bruijn sequence of a given length. |
def get_env_dirs(self):
"""Return list of directories in env_root."""
repo_dirs = next(os.walk(self.env_root))[1]
if '.git' in repo_dirs:
repo_dirs.remove('.git') # not relevant for any repo operations
return repo_dirs | Return list of directories in env_root. |
def build_pmid_exclusion_filter(pmids: Strings) -> EdgePredicate:
"""Fail for edges with citations whose references are one of the given PubMed identifiers.
:param pmids: A PubMed identifier or list of PubMed identifiers to filter against
"""
if isinstance(pmids, str):
@edge_predicate
d... | Fail for edges with citations whose references are one of the given PubMed identifiers.
:param pmids: A PubMed identifier or list of PubMed identifiers to filter against |
def get_header(headers, name, default=None):
"""Return the value of header *name*.
The *headers* argument must be a list of ``(name, value)`` tuples. If the
header is found its associated value is returned, otherwise *default* is
returned. Header names are matched case insensitively.
"""
name =... | Return the value of header *name*.
The *headers* argument must be a list of ``(name, value)`` tuples. If the
header is found its associated value is returned, otherwise *default* is
returned. Header names are matched case insensitively. |
def get_room_history(
self,
room_id,
oldest=None,
latest=datetime.now(),
inclusive=False,
count=20,
unreads=False,
**kwa... | Get various history of specific channel/room
:param room_id:
:param kwargs:
:return: |
def _pquery(scheduler, data, ndata, ndim, leafsize,
x, nx, d, i, k, eps, p, dub, ierr):
"""
Function that parallelly queries the K-D tree based on chunks of data returned by the scheduler
"""
try:
_data = shmem_as_nparray(data).reshape((ndata, ndim))
_x = shmem_as_nparray(x).... | Function that parallelly queries the K-D tree based on chunks of data returned by the scheduler |
def validate(self, value):
"""Validate the length of a list.
:param value: List of values.
:raises: :class:`halogen.exception.ValidationError` exception when length of the list is less than
minimum or greater than maximum.
"""
try:
length = len(value)
... | Validate the length of a list.
:param value: List of values.
:raises: :class:`halogen.exception.ValidationError` exception when length of the list is less than
minimum or greater than maximum. |
def is_base64(string):
"""Determines whether or not a string is likely to
be base64 encoded binary nonsense"""
return (not re.match('^[0-9]+$', string)) and \
(len(string) % 4 == 0) and \
re.match('^[A-Za-z0-9+/]+[=]{0,2}$', string) | Determines whether or not a string is likely to
be base64 encoded binary nonsense |
def efficiency(self):
"""Calculate :ref:`pysynphot-formula-qtlam`.
Returns
-------
ans : float
Bandpass dimensionless efficiency.
"""
mywaveunits = self.waveunits.name
self.convert('angstroms')
wave = self.wave
thru = self.throughput... | Calculate :ref:`pysynphot-formula-qtlam`.
Returns
-------
ans : float
Bandpass dimensionless efficiency. |
def _save_trace(self):
"""
Save current stack trace as formatted string.
"""
stack_trace = stack()
try:
self.trace = []
for frm in stack_trace[5:]: # eliminate our own overhead
self.trace.insert(0, frm[1:])
finally:
del ... | Save current stack trace as formatted string. |
async def fire(self, name, payload=None, *,
dc=None, node=None, service=None, tag=None):
"""Fires a new event
Parameters:
name (str): Event name
payload (Payload): Opaque data
node (Filter): Regular expression to filter by node name
ser... | Fires a new event
Parameters:
name (str): Event name
payload (Payload): Opaque data
node (Filter): Regular expression to filter by node name
service (Filter): Regular expression to filter by service
tag (Filter): Regular expression to filter by servic... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.