code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def log(*args, **kwargs):
"""Log things with the global logger."""
level = kwargs.pop('level', logging.INFO)
logger.log(level, *args, **kwargs) | Log things with the global logger. |
def _relative_score(self, start_eot, end_eot, active, passive):
"""Return the balance of perception between the two nodes.
A positive score indicates the result is relatively better for active.
"""
active_start = self._score_eot_for_actor(start_eot, active)
passive_start = self._... | Return the balance of perception between the two nodes.
A positive score indicates the result is relatively better for active. |
def default_arguments(self):
"""
:rtype dict
:rtype dict
"""
d = OrderedDict()
for arg in self._default_args:
d.update({arg.name: arg})
return d | :rtype dict
:rtype dict |
def fetch_bug_details(self, bug_ids):
"""Fetches bug metadata from bugzilla and returns an encoded
dict if successful, otherwise returns None."""
params = {'include_fields': 'product, component, priority, whiteboard, id'}
params['id'] = bug_ids
try:
response = sel... | Fetches bug metadata from bugzilla and returns an encoded
dict if successful, otherwise returns None. |
def _init_metadata(self):
"""stub"""
QuestionTextFormRecord._init_metadata(self)
QuestionFilesFormRecord._init_metadata(self)
super(QuestionTextAndFilesMixin, self)._init_metadata() | stub |
def gc_velocity_update(particle, social, state):
""" Guaranteed convergence velocity update.
Args:
particle: cipy.algorithms.pso.Particle: Particle to update the velocity
for.
social: cipy.algorithms.pso.Particle: The social best for the particle.
state: cipy.algorithms.pso.... | Guaranteed convergence velocity update.
Args:
particle: cipy.algorithms.pso.Particle: Particle to update the velocity
for.
social: cipy.algorithms.pso.Particle: The social best for the particle.
state: cipy.algorithms.pso.State: The state of the PSO algorithm.
Returns:
... |
def parse_cache_control(self, headers):
"""
Parse the cache control headers returning a dictionary with values
for the different directives.
"""
retval = {}
cc_header = 'cache-control'
if 'Cache-Control' in headers:
cc_header = 'Cache-Control'
... | Parse the cache control headers returning a dictionary with values
for the different directives. |
def new(localfile, jottapath, JFS):
"""Upload a new file from local disk (doesn't exist on JottaCloud).
Returns JottaFile object"""
with open(localfile) as lf:
_new = JFS.up(jottapath, lf)
return _new | Upload a new file from local disk (doesn't exist on JottaCloud).
Returns JottaFile object |
def generate_secret(length=30):
"""
Generate an ASCII secret using random.SysRandom
Based on oauthlib's common.generate_token function
"""
rand = random.SystemRandom()
ascii_characters = string.ascii_letters + string.digits
return ''.join(rand.choice(ascii_characters) for _ in range(length... | Generate an ASCII secret using random.SysRandom
Based on oauthlib's common.generate_token function |
def _check_jwt_claims(jwt_claims):
"""Checks whether the JWT claims should be accepted.
Specifically, this method checks the "exp" claim and the "nbf" claim (if
present), and raises UnauthenticatedException if 1) the current time is
before the time identified by the "nbf" claim, or 2) the current time ... | Checks whether the JWT claims should be accepted.
Specifically, this method checks the "exp" claim and the "nbf" claim (if
present), and raises UnauthenticatedException if 1) the current time is
before the time identified by the "nbf" claim, or 2) the current time is
equal to or after the time identifi... |
def clean_ret_type(ret_type):
"""Clean the erraneous parsed return type."""
ret_type = get_printable(ret_type).strip()
if ret_type == 'LRESULT LRESULT':
ret_type = 'LRESULT'
for bad in [
'DECLSPEC_NORETURN', 'NTSYSCALLAPI', '__kernel_entry',
'__analysis_noreturn', '_Post_... | Clean the erraneous parsed return type. |
def digest(instr, checksum='md5'):
'''
Return a checksum digest for a string
instr
A string
checksum : ``md5``
The hashing algorithm to use to generate checksums. Valid options: md5,
sha256, sha512.
CLI Example:
.. code-block:: bash
salt '*' hashutil.digest 'g... | Return a checksum digest for a string
instr
A string
checksum : ``md5``
The hashing algorithm to use to generate checksums. Valid options: md5,
sha256, sha512.
CLI Example:
.. code-block:: bash
salt '*' hashutil.digest 'get salted' |
def render_to_response(self, context, indent=None):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context, indent=indent)) | Returns a JSON response containing 'context' as payload |
def deleteAllNetworkViews(self, networkId, verbose=None):
"""
Deletes all Network Views available in the Network specified by the `networkId` parameter. Cytoscape can have multiple views per network model, but this feature is not exposed in the Cytoscape GUI. GUI access is limited to the first available... | Deletes all Network Views available in the Network specified by the `networkId` parameter. Cytoscape can have multiple views per network model, but this feature is not exposed in the Cytoscape GUI. GUI access is limited to the first available view only.
:param networkId: SUID of the Network
:param verb... |
def as_indexable(array):
"""
This function always returns a ExplicitlyIndexed subclass,
so that the vectorized indexing is always possible with the returned
object.
"""
if isinstance(array, ExplicitlyIndexed):
return array
if isinstance(array, np.ndarray):
return NumpyIndexin... | This function always returns a ExplicitlyIndexed subclass,
so that the vectorized indexing is always possible with the returned
object. |
def remove_line(self, section, line):
"""Remove all instances of a line.
Returns:
int: the number of lines removed
"""
try:
s = self._get_section(section, create=False)
except KeyError:
# No such section, skip.
return 0
re... | Remove all instances of a line.
Returns:
int: the number of lines removed |
def vqa_attention_base():
"""VQA attention baseline hparams."""
hparams = common_hparams.basic_params1()
hparams.batch_size = 128
hparams.use_fixed_batch_size = True,
hparams.optimizer = "adam"
hparams.optimizer_adam_beta1 = 0.9
hparams.optimizer_adam_beta2 = 0.999
hparams.optimizer_adam_epsilon = 1e-8
... | VQA attention baseline hparams. |
def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_tx_accepts(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_get_interface = ET.Element("fcoe_get_interface")
config = fcoe_get_interface
output = ET.SubElement(fcoe_get_interface, "output... | Auto Generated Code |
def serialize_with_sampled_logs(self, logs_limit=-1):
"""serialize a result with up to `logs_limit` logs.
If `logs_limit` is -1, this function will return a result with all its
logs.
"""
return {
'id': self.id,
'pathName': self.path_name,
'na... | serialize a result with up to `logs_limit` logs.
If `logs_limit` is -1, this function will return a result with all its
logs. |
def parse():
""" Parse command line options """
parser = argparse.ArgumentParser(
description='Dynamic DynamoDB - Auto provisioning AWS DynamoDB')
parser.add_argument(
'-c', '--config',
help='Read configuration from a configuration file')
parser.add_argument(
'--dry-run',... | Parse command line options |
def to_time(value, ctx):
"""
Tries conversion of any value to a time
"""
if isinstance(value, str):
time = ctx.get_date_parser().time(value)
if time is not None:
return time
elif isinstance(value, datetime.time):
return value
elif isinstance(value, datetime.da... | Tries conversion of any value to a time |
def recursive_glob(base_directory, regex=''):
"""
Uses glob to find all files or folders that match the regex
starting from the base_directory.
Parameters
----------
base_directory: str
regex: str
Returns
-------
files: list
"""
files = glob(op.join(base_directory, re... | Uses glob to find all files or folders that match the regex
starting from the base_directory.
Parameters
----------
base_directory: str
regex: str
Returns
-------
files: list |
def _get_summary_struct(self):
"""
Returns a structured description of the model, including (where relevant)
the schema of the training data, description of the training data,
training statistics, and model hyperparameters.
Returns
-------
sections : list (of lis... | Returns a structured description of the model, including (where relevant)
the schema of the training data, description of the training data,
training statistics, and model hyperparameters.
Returns
-------
sections : list (of list of tuples)
A list of summary sections... |
def get(self, queue_get):
"""
to get states from multiprocessing.queue
"""
if isinstance(queue_get, (tuple, list)):
self.result.extend(queue_get) | to get states from multiprocessing.queue |
def find_and_convert(self, attr_name: str, attr_value: S, desired_attr_type: Type[T], logger: Logger,
options: Dict[str, Dict[str, Any]]) -> T:
"""
Utility method to convert some value into the desired type. It relies on get_all_conversion_chains to find the
converters, ... | Utility method to convert some value into the desired type. It relies on get_all_conversion_chains to find the
converters, and apply them in correct order
:return: |
def inference_q(self, next_action_arr):
'''
Infernce Q-Value.
Args:
next_action_arr: `np.ndarray` of action.
Returns:
`np.ndarray` of Q-Values.
'''
q_arr = next_action_arr.reshape((next_action_arr.shape[0], -1))
self._... | Infernce Q-Value.
Args:
next_action_arr: `np.ndarray` of action.
Returns:
`np.ndarray` of Q-Values. |
def do_load_modules(self, modules):
"""Wrapper for calling load_and_init method of modules_manager attribute
:param modules: list of modules that should be loaded by the daemon
:return: None
"""
_ts = time.time()
logger.info("Loading modules...")
if self.modules... | Wrapper for calling load_and_init method of modules_manager attribute
:param modules: list of modules that should be loaded by the daemon
:return: None |
def skip(type_name, filename):
"""Provide reporting statistics for a skipped file."""
report = ['Skipping {} file: {}'.format(type_name, filename)]
report_stats = ReportStats(filename, report=report)
return report_stats | Provide reporting statistics for a skipped file. |
def set_selection(self, selection, name="default", executor=None):
"""Sets the selection object
:param selection: Selection object
:param name: selection 'slot'
:param executor:
:return:
"""
def create(current):
return selection
self._selectio... | Sets the selection object
:param selection: Selection object
:param name: selection 'slot'
:param executor:
:return: |
def load(self):
"""
Loads this stream by calling River View for data.
"""
print "Loading data for %s..." % self.getName()
self._dataHandle = self._stream.data(
since=self._since, until=self._until,
limit=self._limit, aggregate=self._aggregate
)
self._data = self._dataHandle.data... | Loads this stream by calling River View for data. |
def get_property(self):
"""Establishes the dynamic behavior of Property values"""
scope = self
def fget(self):
"""Call dynamic function then validate output"""
value = scope.func(self)
if value is None or value is undefined:
return None
... | Establishes the dynamic behavior of Property values |
def _CreateNewSeasonDir(self, seasonNum):
"""
Creates a new season directory name in the form 'Season <NUM>'.
If skipUserInput is True this will be accepted by default otherwise the
user can choose to accept this, use the base show directory or enter
a different name.
Parameters
----------... | Creates a new season directory name in the form 'Season <NUM>'.
If skipUserInput is True this will be accepted by default otherwise the
user can choose to accept this, use the base show directory or enter
a different name.
Parameters
----------
seasonNum : int
Season number.
Ret... |
def _write_particle_information(gsd_file, structure, xyz, ref_distance,
ref_mass, ref_energy, rigid_bodies):
"""Write out the particle information.
"""
gsd_file.particles.N = len(structure.atoms)
gsd_file.particles.position = xyz / ref_distance
types = [atom.name if atom.type == '' else a... | Write out the particle information. |
def EnableEditingOnService(self, url, definition = None):
"""Enables editing capabilities on a feature service.
Args:
url (str): The URL of the feature service.
definition (dict): A dictionary containing valid definition values. Defaults to ``None``.
Returns:
... | Enables editing capabilities on a feature service.
Args:
url (str): The URL of the feature service.
definition (dict): A dictionary containing valid definition values. Defaults to ``None``.
Returns:
dict: The existing feature service definition capabilities.
... |
def QA_util_get_trade_datetime(dt=datetime.datetime.now()):
"""交易的真实日期
Returns:
[type] -- [description]
"""
#dt= datetime.datetime.now()
if QA_util_if_trade(str(dt.date())) and dt.time() < datetime.time(15, 0, 0):
return str(dt.date())
else:
return QA_util_get_real_dat... | 交易的真实日期
Returns:
[type] -- [description] |
def load_mnist():
'''Load the MNIST digits dataset.'''
mnist = skdata.mnist.dataset.MNIST()
mnist.meta # trigger download if needed.
def arr(n, dtype):
arr = mnist.arrays[n]
return arr.reshape((len(arr), -1)).astype(dtype)
train_images = arr('train_images', np.float32) / 128 - 1
... | Load the MNIST digits dataset. |
def output_eol_literal_marker(self, m):
"""Pass through rest link."""
marker = ':' if m.group(1) is None else ''
return self.renderer.eol_literal_marker(marker) | Pass through rest link. |
def from_edgelist(self, edges, strict=True):
"""
Load transform data from an edge list into the current
scene graph.
Parameters
-------------
edgelist : (n,) tuples
(node_a, node_b, {key: value})
strict : bool
If true, raise a ValueError w... | Load transform data from an edge list into the current
scene graph.
Parameters
-------------
edgelist : (n,) tuples
(node_a, node_b, {key: value})
strict : bool
If true, raise a ValueError when a
malformed edge is passed in a tuple. |
def get_summary_str(self, sec2d_nt):
"""Get string describing counts of placed/unplaced GO IDs and count of sections."""
data = self.get_summary_data(sec2d_nt)
return "{M} GO IDs placed into {N} sections; {U} unplaced GO IDs".format(
N=len(data['sections']), M=len(data['grouped']), U... | Get string describing counts of placed/unplaced GO IDs and count of sections. |
def add_line_data(self, line_data):
"""Add executed line data.
`line_data` is { filename: { lineno: None, ... }, ...}
"""
for filename, linenos in iitems(line_data):
self.lines.setdefault(filename, {}).update(linenos) | Add executed line data.
`line_data` is { filename: { lineno: None, ... }, ...} |
def collect_from_bundles(self, bundles: List[Bundle]) -> Dict[str, Any]:
"""
Collect objects where :meth:`type_check` returns ``True`` from bundles.
Names (keys) are expected to be unique across bundles, except for the
app bundle, which can override anything from other bundles.
"... | Collect objects where :meth:`type_check` returns ``True`` from bundles.
Names (keys) are expected to be unique across bundles, except for the
app bundle, which can override anything from other bundles. |
def do_roles(self, service):
"""
Role information
Usage:
> roles <servicename> Display role information for service
> roles all Display all role information for cluster
"""
if not self.has_cluster():
return None
if not se... | Role information
Usage:
> roles <servicename> Display role information for service
> roles all Display all role information for cluster |
def load_pyproject_toml(
use_pep517, # type: Optional[bool]
pyproject_toml, # type: str
setup_py, # type: str
req_name # type: str
):
# type: (...) -> Optional[Tuple[List[str], str, List[str]]]
"""Load the pyproject.toml file.
Parameters:
use_pep517 - Has the user requested PEP ... | Load the pyproject.toml file.
Parameters:
use_pep517 - Has the user requested PEP 517 processing? None
means the user hasn't explicitly specified.
pyproject_toml - Location of the project's pyproject.toml file
setup_py - Location of the project's setup.py file
r... |
def set_source_nodes(self, source_nodes):
r"""
Set multiple source nodes and compute their t-weights.
Parameters
----------
source_nodes : sequence of integers
Declare the source nodes via their ids.
Raises
------
ValueError
... | r"""
Set multiple source nodes and compute their t-weights.
Parameters
----------
source_nodes : sequence of integers
Declare the source nodes via their ids.
Raises
------
ValueError
If a passed node id does not refer to ... |
def exists(self, path):
"""
Does provided path exist on S3?
"""
(bucket, key) = self._path_to_bucket_and_key(path)
# root always exists
if self._is_root(key):
return True
# file
if self._exists(bucket, key):
return True
i... | Does provided path exist on S3? |
def read_preferences_file(self):
"""
If json preferences file exists, read it in.
"""
user_data_dir = find_pmag_dir.find_user_data_dir("thellier_gui")
if not user_data_dir:
return {}
if os.path.exists(user_data_dir):
pref_file = os.path.join(user_d... | If json preferences file exists, read it in. |
def load(self, filename):
"""Load proxies from file"""
with open(filename, 'r') as fin:
proxies = json.load(fin)
for protocol in proxies:
for proxy in proxies[protocol]:
self.proxies[protocol][proxy['addr']] = Proxy(
proxy['addr'], prox... | Load proxies from file |
def in_git_clone():
"""Returns `True` if the current directory is a git repository
Logic is 'borrowed' from :func:`git.repo.fun.is_git_dir`
"""
gitdir = '.git'
return os.path.isdir(gitdir) and (
os.path.isdir(os.path.join(gitdir, 'objects')) and
os.path.isdir(os.path.join(gitdir, 'r... | Returns `True` if the current directory is a git repository
Logic is 'borrowed' from :func:`git.repo.fun.is_git_dir` |
def example_clinical_data(study_name, environment):
"""Test demonstrating building clinical data"""
odm = ODM("test system")(
ClinicalData("Mediflex", "DEV")(
SubjectData("MDSOL", "IJS TEST4", transaction_type="Insert")(
StudyEventData("SUBJECT")(
FormData("EN", tr... | Test demonstrating building clinical data |
def select_code(self, code):
"""
选择股票
@2018/06/03 pandas 的索引问题导致
https://github.com/pandas-dev/pandas/issues/21299
因此先用set_index去重做一次index
影响的有selects,select_time,select_month,get_bar
@2018/06/04
当选择的时间越界/股票不存在,raise ValueError
@2018/06/04 pand... | 选择股票
@2018/06/03 pandas 的索引问题导致
https://github.com/pandas-dev/pandas/issues/21299
因此先用set_index去重做一次index
影响的有selects,select_time,select_month,get_bar
@2018/06/04
当选择的时间越界/股票不存在,raise ValueError
@2018/06/04 pandas索引问题已经解决
全部恢复 |
def normalize(Y, normalization_type='stats'):
"""Normalize the vector Y using statistics or its range.
:param Y: Row or column vector that you want to normalize.
:param normalization_type: String specifying the kind of normalization
to use. Options are 'stats' to use mean and standard deviation,
or... | Normalize the vector Y using statistics or its range.
:param Y: Row or column vector that you want to normalize.
:param normalization_type: String specifying the kind of normalization
to use. Options are 'stats' to use mean and standard deviation,
or 'maxmin' to use the range of function values.
:r... |
def set_network_connection(self, network):
"""
Set the network connection for the remote device.
Example of setting airplane mode::
driver.mobile.set_network_connection(driver.mobile.AIRPLANE_MODE)
"""
mode = network.mask if isinstance(network, self.ConnectionType) ... | Set the network connection for the remote device.
Example of setting airplane mode::
driver.mobile.set_network_connection(driver.mobile.AIRPLANE_MODE) |
def site(self, site):
"""
Sets the site of this OauthTokenReference.
:param site: The site of this OauthTokenReference.
:type: str
"""
if site is None:
raise ValueError("Invalid value for `site`, must not be `None`")
if site is not None and len(site) ... | Sets the site of this OauthTokenReference.
:param site: The site of this OauthTokenReference.
:type: str |
def connect(self):
""" Connects to the device and starts the read thread """
self.serial = serial.Serial(port=self.port, baudrate=self.baudrate, timeout=self.timeout)
# Start read thread
self.alive = True
self.rxThread = threading.Thread(target=self._readLoop)
... | Connects to the device and starts the read thread |
def get_games(ctx):
"""Prints out games owned by a Steam user."""
username = ctx.obj['username']
games = User(username).get_games_owned()
for game in sorted(games.values(), key=itemgetter('title')):
click.echo('%s [appid: %s]' % (game['title'], game['appid']))
click.secho('Total gems owne... | Prints out games owned by a Steam user. |
def format(self, record):
# type: (logging.LogRecord) -> str
"""Format the log record with timestamps and level based colors.
Args:
record: The log record to format.
Returns:
The formatted log record.
"""
if record.levelno >= logging.ERROR:
... | Format the log record with timestamps and level based colors.
Args:
record: The log record to format.
Returns:
The formatted log record. |
def notify_peer_message(self, message, sender_id):
"""A new message was received from a peer"""
payload = message.SerializeToString()
self._notify(
"consensus_notifier_notify_peer_message",
payload,
len(payload),
sender_id,
len(sender_i... | A new message was received from a peer |
def locked_get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
credentials = None
if self._cache:
json = self._cache.get(self._key_name)
if json:
credentials = client.Credentials.new_from_jso... | Retrieve Credential from datastore.
Returns:
oauth2client.Credentials |
def __configure_client(self, config):
""" write the perforce client """
self.logger.info("Configuring p4 client...")
client_dict = config.to_dict()
client_dict['root_path'] = os.path.expanduser(config.get('root_path'))
os.chdir(client_dict['root_path'])
client_dict['hostn... | write the perforce client |
def iteritems(self):
"""
Iterates over all mappings
Yields
------
(int,Mapping)
The next pair (index, mapping)
"""
for m in self.mappings:
yield self.indexes[m.clause][m.target], m | Iterates over all mappings
Yields
------
(int,Mapping)
The next pair (index, mapping) |
def delay_for(
self,
wait: typing.Union[int, float],
identifier: typing.Any,
) -> bool:
"""Defer the execution of a function for some number of seconds.
Args:
wait (typing.Union[int, float]): A numeric value that represents
the number ... | Defer the execution of a function for some number of seconds.
Args:
wait (typing.Union[int, float]): A numeric value that represents
the number of seconds that must pass before the callback
becomes available for execution. All given values must be
pos... |
def treat(request_body):
"""
Treat a notification and guarantee its authenticity.
:param request_body: The request body in plain text.
:type request_body: string
:return: A safe APIResource
:rtype: APIResource
"""
# Python 3+ support
if isinstance(request_body, six.binary_type):
... | Treat a notification and guarantee its authenticity.
:param request_body: The request body in plain text.
:type request_body: string
:return: A safe APIResource
:rtype: APIResource |
def _select_features(example, feature_list=None):
"""Select a subset of features from the example dict."""
feature_list = feature_list or ["inputs", "targets"]
return {f: example[f] for f in feature_list} | Select a subset of features from the example dict. |
def parse_http_date(date):
"""
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Return an integer expressed in seconds since the epoch, in UTC.
Implementation copied from... | Parse a date format as specified by HTTP RFC7231 section 7.1.1.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Return an integer expressed in seconds since the epoch, in UTC.
Implementation copied from Django.
https://github.com/django/... |
def dtdQAttrDesc(self, elem, name, prefix):
"""Search the DTD for the description of this qualified
attribute on this element. """
ret = libxml2mod.xmlGetDtdQAttrDesc(self._o, elem, name, prefix)
if ret is None:raise treeError('xmlGetDtdQAttrDesc() failed')
__tmp = xmlAttribut... | Search the DTD for the description of this qualified
attribute on this element. |
def set_hyperparams(self, new_params):
"""Sets the free hyperparameters to the new parameter values in new_params.
Parameters
----------
new_params : :py:class:`Array` or other Array-like, (len(:py:attr:`self.free_params`),)
New parameter values, ordered as dictated ... | Sets the free hyperparameters to the new parameter values in new_params.
Parameters
----------
new_params : :py:class:`Array` or other Array-like, (len(:py:attr:`self.free_params`),)
New parameter values, ordered as dictated by the docstring for the
class. |
def fix_multiple_files(filenames, options, output=None):
"""Fix list of files.
Optionally fix files recursively.
"""
filenames = find_files(filenames, options.recursive, options.exclude)
if options.jobs > 1:
import multiprocessing
pool = multiprocessing.Pool(options.jobs)
p... | Fix list of files.
Optionally fix files recursively. |
def edit_ipv6(self, ip6, descricao, id_ip):
"""
Edit a IP6
:param ip6: An IP6 available to save in format xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx.
:param descricao: IP description.
:param id_ip: Ipv6 identifier. Integer value and greater than zero.
:return: None
... | Edit a IP6
:param ip6: An IP6 available to save in format xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx.
:param descricao: IP description.
:param id_ip: Ipv6 identifier. Integer value and greater than zero.
:return: None |
def crop(self, start_timestamp, end_timestamp):
"""
Return a new TimeSeries object contains all the timstamps and values within
the specified range.
:param int start_timestamp: the start timestamp value
:param int end_timestamp: the end timestamp value
:return: :class:`T... | Return a new TimeSeries object contains all the timstamps and values within
the specified range.
:param int start_timestamp: the start timestamp value
:param int end_timestamp: the end timestamp value
:return: :class:`TimeSeries` object. |
def simplex_grid(m, n):
r"""
Construct an array consisting of the integer points in the
(m-1)-dimensional simplex :math:`\{x \mid x_0 + \cdots + x_{m-1} = n
\}`, or equivalently, the m-part compositions of n, which are listed
in lexicographic order. The total number of the points (hence the
leng... | r"""
Construct an array consisting of the integer points in the
(m-1)-dimensional simplex :math:`\{x \mid x_0 + \cdots + x_{m-1} = n
\}`, or equivalently, the m-part compositions of n, which are listed
in lexicographic order. The total number of the points (hence the
length of the output array) is L... |
def asRemoteException(ErrorType):
'''return the remote exception version of the error above
you can catch errors as usally:
>>> try:
raise asRemoteException(ValueError)
except ValueError:
pass
or you can catch the remote Exception
>>> try:
raise asRemoteException(ReferenceError)(ReferenceError(),'')
except asRemote... | return the remote exception version of the error above
you can catch errors as usally:
>>> try:
raise asRemoteException(ValueError)
except ValueError:
pass
or you can catch the remote Exception
>>> try:
raise asRemoteException(ReferenceError)(ReferenceError(),'')
except asRemoteException(ReferenceError):
pass |
def iter_descendants(self, strategy="levelorder", is_leaf_fn=None):
""" Returns an iterator over all descendant nodes."""
for n in self.traverse(strategy=strategy, is_leaf_fn=is_leaf_fn):
if n is not self:
yield n | Returns an iterator over all descendant nodes. |
def execute(self, args):
""" Executes the command invocation
:param args: The command arguments for this invocation
:type args: list
:return: The command result
:rtype: knack.util.CommandResultItem
"""
import colorama
self.cli_ctx.raise_event(EVENT_INVOK... | Executes the command invocation
:param args: The command arguments for this invocation
:type args: list
:return: The command result
:rtype: knack.util.CommandResultItem |
def get_schema(self, filename):
"""
Guess schema using messytables
"""
table_set = self.read_file(filename)
# Have I been able to read the filename
if table_set is None:
return []
# Get the first table as rowset
row_set = table_... | Guess schema using messytables |
def set_events_callback(self, call_back):
"""Sets the user callback that the Server object has to call when an
event is created.
"""
logger.info("setting event callback")
callback_wrap = ctypes.CFUNCTYPE(None, ctypes.c_void_p,
ctypes.POINT... | Sets the user callback that the Server object has to call when an
event is created. |
def _del_thread(self, dwThreadId):
"""
Private method to remove a thread object from the snapshot.
@type dwThreadId: int
@param dwThreadId: Global thread ID.
"""
try:
aThread = self.__threadDict[dwThreadId]
del self.__threadDict[dwThreadId]
... | Private method to remove a thread object from the snapshot.
@type dwThreadId: int
@param dwThreadId: Global thread ID. |
def macs_filtered_reads_plot(self):
""" Plot of filtered reads for control and treatment samples """
data = dict()
req_cats = ['control_fragments_total', 'control_fragments_after_filtering', 'treatment_fragments_total', 'treatment_fragments_after_filtering']
for s_name, d in self.macs_da... | Plot of filtered reads for control and treatment samples |
def validate_arguments(args):
"""Makes sure arguments are valid, specified files exist, etc."""
#check arguments
print
print "Checking input...",
semantic_tests = ["animals", "custom"]
phonemic_tests = ["a", "p", "s", "f"]
if args.similarity_file:
print
print "Custom simila... | Makes sure arguments are valid, specified files exist, etc. |
def read(self, n):
""" Consume `n` characters from the stream. """
while len(self.buf) < n:
chunk = self.f.recv(4096)
if not chunk:
raise EndOfStreamError()
self.buf += chunk
res, self.buf = self.buf[:n], self.buf[n:]
return res | Consume `n` characters from the stream. |
def _get_data_dtype(self):
"""Get the dtype of the file based on the actual available channels"""
pkhrec = [
('GP_PK_HEADER', GSDTRecords.gp_pk_header),
('GP_PK_SH1', GSDTRecords.gp_pk_sh1)
]
pk_head_dtype = np.dtype(pkhrec)
def get_lrec(cols):
... | Get the dtype of the file based on the actual available channels |
def clone(self) -> 'ImageBBox':
"Mimic the behavior of torch.clone for `Image` objects."
flow = FlowField(self.size, self.flow.flow.clone())
return self.__class__(flow, scale=False, y_first=False, labels=self.labels, pad_idx=self.pad_idx) | Mimic the behavior of torch.clone for `Image` objects. |
def update_dashboard(self, id, **kwargs): # noqa: E501
"""Update a specific dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dashboard(id, asy... | Update a specific dashboard # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dashboard(id, async_req=True)
>>> result = thread.get()
:param asyn... |
def summary(self, sortOn=None):
"""
Summarize all the alignments for this title.
@param sortOn: A C{str} attribute to sort titles on. One of 'length',
'maxScore', 'medianScore', 'readCount', or 'title'.
@raise ValueError: If an unknown C{sortOn} value is given.
@retu... | Summarize all the alignments for this title.
@param sortOn: A C{str} attribute to sort titles on. One of 'length',
'maxScore', 'medianScore', 'readCount', or 'title'.
@raise ValueError: If an unknown C{sortOn} value is given.
@return: A generator that yields C{dict} instances as pro... |
def store_result(self, message, result: Result, ttl: int) -> None:
"""Store a result in the backend.
Parameters:
message(Message)
result(object): Must be serializable.
ttl(int): The maximum amount of time the result may be
stored in the backend for.
"""... | Store a result in the backend.
Parameters:
message(Message)
result(object): Must be serializable.
ttl(int): The maximum amount of time the result may be
stored in the backend for. |
def radiation_values(self, location, timestep=1):
"""Lists of driect normal, diffuse horiz, and global horiz rad at each timestep.
"""
# create sunpath and get altitude at every timestep of the design day
sp = Sunpath.from_location(location)
altitudes = []
dates = self._g... | Lists of driect normal, diffuse horiz, and global horiz rad at each timestep. |
def list_semod():
'''
Return a structure listing all of the selinux modules on the system and
what state they are in
CLI Example:
.. code-block:: bash
salt '*' selinux.list_semod
.. versionadded:: 2016.3.0
'''
helptext = __salt__['cmd.run']('semodule -h').splitlines()
sem... | Return a structure listing all of the selinux modules on the system and
what state they are in
CLI Example:
.. code-block:: bash
salt '*' selinux.list_semod
.. versionadded:: 2016.3.0 |
def pp_hex(raw, reverse=True):
"""Return a pretty-printed (hex style) version of a binary string.
Args:
raw (bytes): any sequence of bytes
reverse (bool): True if output should be in reverse order.
Returns:
Hex string corresponding to input byte sequence.
"""
if not reverse... | Return a pretty-printed (hex style) version of a binary string.
Args:
raw (bytes): any sequence of bytes
reverse (bool): True if output should be in reverse order.
Returns:
Hex string corresponding to input byte sequence. |
def default_branch(self, file):
""" Decide the name of the default branch given the file and the configuration
:param file: File with informations about it
:return: Branch Name
"""
if isinstance(self.__default_branch__, str):
return self.__default_branch__
el... | Decide the name of the default branch given the file and the configuration
:param file: File with informations about it
:return: Branch Name |
def iterstraight(self, raw):
"""Iterator that undoes the effect of filtering, and yields
each row in serialised format (as a sequence of bytes).
Assumes input is straightlaced. `raw` should be an iterable
that yields the raw bytes in chunks of arbitrary size.
"""
# leng... | Iterator that undoes the effect of filtering, and yields
each row in serialised format (as a sequence of bytes).
Assumes input is straightlaced. `raw` should be an iterable
that yields the raw bytes in chunks of arbitrary size. |
def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_last_counters_cleared(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
fcoe_get_interface = ET.Element("fcoe_get_interface")
config = fcoe_get_interface
output = ET.SubElement(fcoe_get_interfa... | Auto Generated Code |
def chunk_count(self):
"""Return a count of the chunks in this world folder."""
c = 0
for r in self.iter_regions():
c += r.chunk_count()
return c | Return a count of the chunks in this world folder. |
def train_evaluate_model_from_config(config: Union[str, Path, dict],
iterator: Union[DataLearningIterator, DataFittingIterator] = None, *,
to_train: bool = True,
evaluation_targets: Optional[Iterable[str]] = N... | Make training and evaluation of the model described in corresponding configuration file. |
def get_key_pair(self, alias_name):
"""
Retrieves the public and private key pair associated with the specified alias name.
Args:
alias_name: Key pair associated with the RabbitMQ
Returns:
dict: RabbitMQ certificate
"""
uri = self.URI + "/keypair... | Retrieves the public and private key pair associated with the specified alias name.
Args:
alias_name: Key pair associated with the RabbitMQ
Returns:
dict: RabbitMQ certificate |
def delete_by_ids(self, ids):
"""Delete objects by ids.
:param ids: list of objects ids to delete.
:return: True if objects were deleted. Otherwise, return False if no
objects were found or the delete was not successful.
"""
try:
self.filter(id__in=i... | Delete objects by ids.
:param ids: list of objects ids to delete.
:return: True if objects were deleted. Otherwise, return False if no
objects were found or the delete was not successful. |
def check_name(name):
"""
Verify the name is well-formed
>>> check_name(123)
False
>>> check_name('')
False
>>> check_name('abc')
False
>>> check_name('abc.def')
True
>>> check_name('abc.def.ghi')
False
>>> check_name('abc.d-ef')
True
>>> check_name('abc.d+ef... | Verify the name is well-formed
>>> check_name(123)
False
>>> check_name('')
False
>>> check_name('abc')
False
>>> check_name('abc.def')
True
>>> check_name('abc.def.ghi')
False
>>> check_name('abc.d-ef')
True
>>> check_name('abc.d+ef')
False
>>> check_name('.... |
def modify_snapshot(snapshot_id=None,
description=None,
userdata=None,
cleanup=None,
config="root"):
'''
Modify attributes of an existing snapshot.
config
Configuration name. (Default: root)
snapshot_id
ID ... | Modify attributes of an existing snapshot.
config
Configuration name. (Default: root)
snapshot_id
ID of the snapshot to be modified.
cleanup
Change the cleanup method of the snapshot. (str)
description
Change the description of the snapshot. (str)
userdata
... |
def router_connections(self):
"""Return a list of MongoClients, one for each mongos."""
clients = []
for server in self._routers:
if Servers().is_alive(server):
client = self.create_connection(Servers().hostname(server))
clients.append(client)
... | Return a list of MongoClients, one for each mongos. |
def get_column_info(connection, table_name):
"""
Return an in order list of (name, type) tuples describing the
columns in the given table.
"""
cursor = connection.cursor()
cursor.execute("SELECT sql FROM sqlite_master WHERE type == 'table' AND name == ?", (table_name,))
statement, = cursor.fetchone()
coldefs = ... | Return an in order list of (name, type) tuples describing the
columns in the given table. |
def get_ips(self, interface=None, family=None, scope=None, timeout=0):
"""
Get a tuple of IPs for the container.
"""
kwargs = {}
if interface:
kwargs['interface'] = interface
if family:
kwargs['family'] = family
if scope:
k... | Get a tuple of IPs for the container. |
def _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool:
"""Returns true only if the operations have qubits in common."""
return not set(op1.qubits) & set(op2.qubits) | Returns true only if the operations have qubits in common. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.