code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def fit_size(min_length: int = 0, max_length: int = None,
message=None) -> Filter_T:
"""
Validate any sized object to ensure the size/length
is in a given range [min_length, max_length].
"""
def validate(value):
length = len(value) if value is not None else 0
if length ... | Validate any sized object to ensure the size/length
is in a given range [min_length, max_length]. |
def get_owner_asset_ids(self, address):
"""
Get the list of assets owned by an address owner.
:param address: ethereum account address, hex str
:return:
"""
block_filter = self._get_event_filter(owner=address)
log_items = block_filter.get_all_entries(max_tries=5)... | Get the list of assets owned by an address owner.
:param address: ethereum account address, hex str
:return: |
def cancel_spot_requests(self, requests):
"""Cancel one or more EC2 spot instance requests.
:param requests: List of EC2 spot instance request IDs.
:type requests: list
"""
ec2_requests = self.retry_on_ec2_error(self.ec2.get_all_spot_instance_requests, request_ids=requests)
... | Cancel one or more EC2 spot instance requests.
:param requests: List of EC2 spot instance request IDs.
:type requests: list |
def get(self, key, default=None, remote=False):
"""
Overrides dictionary get behavior to retrieve database objects with
support for returning a default. If remote=True then a remote
request is made to retrieve the database from the remote server,
otherwise the client's locally c... | Overrides dictionary get behavior to retrieve database objects with
support for returning a default. If remote=True then a remote
request is made to retrieve the database from the remote server,
otherwise the client's locally cached database object is returned.
:param str key: Database... |
def setup_debug_logging():
"""
set up debug logging
"""
logger = logging.getLogger("xbahn")
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(logging.Formatter("%(name)s: %(message)s"))
logger.addHandler(ch) | set up debug logging |
def check_job_collection_name(self, cloud_service_id, job_collection_id):
'''
The Check Name Availability operation checks if a new job collection with
the given name may be created, or if it is unavailable. The result of the
operation is a Boolean true or false.
cloud_service_i... | The Check Name Availability operation checks if a new job collection with
the given name may be created, or if it is unavailable. The result of the
operation is a Boolean true or false.
cloud_service_id:
The cloud service id
job_collection_id:
The name of the job... |
def openid_form(parser, token):
"""
Render OpenID form. Allows to pre set the provider::
{% openid_form "https://www.google.com/accounts/o8/id" %}
Also creates custom button URLs by concatenating all arguments
after the provider's URL
{% openid_form "https://www.google.com/accounts/o8/id" S... | Render OpenID form. Allows to pre set the provider::
{% openid_form "https://www.google.com/accounts/o8/id" %}
Also creates custom button URLs by concatenating all arguments
after the provider's URL
{% openid_form "https://www.google.com/accounts/o8/id" STATIC_URL "image/for/google.jpg" %} |
def complement(self, other):
"""
Calculate the complement of `self` and `other`.
:param other: Another SimVariableSet instance.
:return: The complement result.
"""
s = SimVariableSet()
s.register_variables = self.register_variables - other.register_vari... | Calculate the complement of `self` and `other`.
:param other: Another SimVariableSet instance.
:return: The complement result. |
def get_meta(self, name, meta_key=None):
'''Get the ``content`` attribute of a meta tag ``name``.
For example::
head.get_meta('decription')
returns the ``content`` attribute of the meta tag with attribute
``name`` equal to ``description`` or ``None``.
If a differen... | Get the ``content`` attribute of a meta tag ``name``.
For example::
head.get_meta('decription')
returns the ``content`` attribute of the meta tag with attribute
``name`` equal to ``description`` or ``None``.
If a different meta key needs to be matched, it can be specified ... |
def date_time_between_dates(
self,
datetime_start=None,
datetime_end=None,
tzinfo=None):
"""
Takes two DateTime objects and returns a random datetime between the two
given datetimes.
Accepts DateTime objects.
:param datetime_start:... | Takes two DateTime objects and returns a random datetime between the two
given datetimes.
Accepts DateTime objects.
:param datetime_start: DateTime
:param datetime_end: DateTime
:param tzinfo: timezone, instance of datetime.tzinfo subclass
:example DateTime('1999-02-02 1... |
def p_expr_exit(p):
'''expr : EXIT
| EXIT LPAREN RPAREN
| EXIT LPAREN expr RPAREN'''
if len(p) == 5:
p[0] = ast.Exit(p[3], lineno=p.lineno(1))
else:
p[0] = ast.Exit(None, lineno=p.lineno(1)) | expr : EXIT
| EXIT LPAREN RPAREN
| EXIT LPAREN expr RPAREN |
def _list_dict(l: Iterator[str], case_insensitive: bool = False):
"""
return a dictionary with all items of l being the keys of the dictionary
If argument case_insensitive is non-zero ldap.cidict.cidict will be
used for case-insensitive string keys
"""
if case_insensitive:
raise NotImpl... | return a dictionary with all items of l being the keys of the dictionary
If argument case_insensitive is non-zero ldap.cidict.cidict will be
used for case-insensitive string keys |
def get_attribute(self, obj, attribute):
""" Returns single object attribute.
:param obj: requested object.
:param attribute: requested attribute to query.
:returns: returned value.
:rtype: str
"""
raw_return = self.send_command_return(obj, attribute, '?')
... | Returns single object attribute.
:param obj: requested object.
:param attribute: requested attribute to query.
:returns: returned value.
:rtype: str |
def trailing_stop_loss(self, accountID, **kwargs):
"""
Shortcut to create a Trailing Stop Loss Order in an Account
Args:
accountID : The ID of the Account
kwargs : The arguments to create a TrailingStopLossOrderRequest
Returns:
v20.response.Response ... | Shortcut to create a Trailing Stop Loss Order in an Account
Args:
accountID : The ID of the Account
kwargs : The arguments to create a TrailingStopLossOrderRequest
Returns:
v20.response.Response containing the results from submitting
the request |
def population_counts(
self,
population_size,
weighted=True,
include_missing=False,
include_transforms_for_dims=None,
prune=False,
):
"""Return counts scaled in proportion to overall population.
The return value is a numpy.ndarray object. Count values... | Return counts scaled in proportion to overall population.
The return value is a numpy.ndarray object. Count values are scaled
proportionally to approximate their value if the entire population
had been sampled. This calculation is based on the estimated size of
the population provided a... |
def __flush_buffer(self):
"""Flush the buffer contents out to a chunk.
"""
self.__flush_data(self._buffer.getvalue())
self._buffer.close()
self._buffer = StringIO() | Flush the buffer contents out to a chunk. |
def vertex_to_entity_path(vertex_path,
graph,
entities,
vertices=None):
"""
Convert a path of vertex indices to a path of entity indices.
Parameters
----------
vertex_path : (n,) int
Ordered list of vertex indices... | Convert a path of vertex indices to a path of entity indices.
Parameters
----------
vertex_path : (n,) int
Ordered list of vertex indices representing a path
graph : nx.Graph
Vertex connectivity
entities : (m,) list
Entity objects
vertices : (p, dimension) float
... |
def identity(self):
"""Return this partition information as a PartitionId."""
if self.dataset is None:
# The relationship will be null until the object is committed
s = object_session(self)
ds = s.query(Dataset).filter(Dataset.id_ == self.d_id).one()
else:
... | Return this partition information as a PartitionId. |
def GET(self, courseid): # pylint: disable=arguments-differ
""" GET request """
course = self.get_course(courseid)
return self.show_page(course) | GET request |
def clean_old_jobs():
'''
Called in the master's event loop every loop_interval. Archives and/or
deletes the events and job details from the database.
:return:
'''
if __opts__.get('keep_jobs', False) and int(__opts__.get('keep_jobs', 0)) > 0:
try:
with _get_serv() as cur:
... | Called in the master's event loop every loop_interval. Archives and/or
deletes the events and job details from the database.
:return: |
def expand(self, url):
"""Base expand method. Only visits the link, and return the response
url"""
url = self.clean_url(url)
response = self._get(url)
if response.ok:
return response.url
raise ExpandingErrorException | Base expand method. Only visits the link, and return the response
url |
def start(config, bugnumber=""):
"""Create a new topic branch."""
repo = config.repo
if bugnumber:
summary, bugnumber, url = get_summary(config, bugnumber)
else:
url = None
summary = None
if summary:
summary = input('Summary ["{}"]: '.format(summary)).strip() or sum... | Create a new topic branch. |
def _set_Buffer(self, v, load=False):
"""
Setter method for Buffer, mapped from YANG variable /rbridge_id/threshold_monitor/Buffer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_Buffer is considered as a private
method. Backends looking to populate this ... | Setter method for Buffer, mapped from YANG variable /rbridge_id/threshold_monitor/Buffer (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_Buffer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set... |
def selectrowindex(self, window_name, object_name, row_index):
"""
Select row index
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full nam... | Select row index
@param window_name: Window name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type window_name: string
@param object_name: Object name to type in, either full name,
LDTP's name convention, or a Unix glob.
@type object_name: stri... |
def set_units(self, unit):
"""Set the unit for this data point
Unit, as with data_type, are actually associated with the stream and not
the individual data point. As such, changing this within a stream is
not encouraged. Setting the unit on the data point is useful when the
st... | Set the unit for this data point
Unit, as with data_type, are actually associated with the stream and not
the individual data point. As such, changing this within a stream is
not encouraged. Setting the unit on the data point is useful when the
stream might be created with the write o... |
def multi_index_insert_row(df, index_row, values_row):
""" Return a new dataframe with a row inserted for a multi-index dataframe.
This will sort the rows according to the ordered multi-index levels.
"""
row_index = pd.MultiIndex(levels=[[i] for i in index_row],
labels=... | Return a new dataframe with a row inserted for a multi-index dataframe.
This will sort the rows according to the ordered multi-index levels. |
def _resample(self, arrays, ji_windows):
"""Resample all arrays with potentially different resolutions to a common resolution."""
# get a destination array template
win_dst = ji_windows[self.dst_res]
aff_dst = self._layer_meta[self._res_indices[self.dst_res][0]]["transform"]
arra... | Resample all arrays with potentially different resolutions to a common resolution. |
def setup_versioneer():
"""
Generate (temporarily) versioneer.py file in project root directory
:return:
"""
try:
# assume versioneer.py was generated using "versioneer install" command
import versioneer
versioneer.get_version()
except ImportError:
# it looks vers... | Generate (temporarily) versioneer.py file in project root directory
:return: |
def matches_querytime(instance, querytime):
"""
Checks whether the given instance satisfies the given QueryTime object.
:param instance: an instance of Versionable
:param querytime: QueryTime value to check against
"""
if not querytime.active:
return True
... | Checks whether the given instance satisfies the given QueryTime object.
:param instance: an instance of Versionable
:param querytime: QueryTime value to check against |
def _set_icmp(self, v, load=False):
"""
Setter method for icmp, mapped from YANG variable /rbridge_id/interface/ve/ip/icmp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_icmp is considered as a private
method. Backends looking to populate this variable s... | Setter method for icmp, mapped from YANG variable /rbridge_id/interface/ve/ip/icmp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_icmp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_icmp() ... |
def get_grouped_psf_model(template_psf_model, star_group, pars_to_set):
"""
Construct a joint PSF model which consists of a sum of PSF's templated on
a specific model, but whose parameters are given by a table of objects.
Parameters
----------
template_psf_model : `astropy.modeling.Fittable2DMo... | Construct a joint PSF model which consists of a sum of PSF's templated on
a specific model, but whose parameters are given by a table of objects.
Parameters
----------
template_psf_model : `astropy.modeling.Fittable2DModel` instance
The model to use for *individual* objects. Must have paramete... |
def subtree(events):
"""selects sub-tree events"""
stack = 0
for obj in events:
if obj['type'] == ENTER:
stack += 1
elif obj['type'] == EXIT:
if stack == 0:
break
stack -= 1
yield obj | selects sub-tree events |
def _generate_replacement(interface_number, segment_number):
"""
This will generate replacement string for
{port0} => {port9}
{segment0} => {segment9}
"""
replacements = {}
for i in range(0, 9):
replacements["port" + str(i)] = interface_number + i
... | This will generate replacement string for
{port0} => {port9}
{segment0} => {segment9} |
def count(a, axis=None):
"""
Count the non-masked elements of the array along the given axis.
.. note:: Currently limited to operating on a single axis.
:param axis: Axis or axes along which the operation is performed.
The default (axis=None) is to perform the operation
... | Count the non-masked elements of the array along the given axis.
.. note:: Currently limited to operating on a single axis.
:param axis: Axis or axes along which the operation is performed.
The default (axis=None) is to perform the operation
over all the dimensions of the inp... |
def special_type(self):
"""
[str] 特别处理状态。’Normal’ - 正常上市, ‘ST’ - ST处理, ‘StarST’ - *ST代表该股票正在接受退市警告,
‘PT’ - 代表该股票连续3年收入为负,将被暂停交易, ‘Other’ - 其他(股票专用)
"""
try:
return self.__dict__["special_type"]
except (KeyError, ValueError):
raise AttributeError(
... | [str] 特别处理状态。’Normal’ - 正常上市, ‘ST’ - ST处理, ‘StarST’ - *ST代表该股票正在接受退市警告,
‘PT’ - 代表该股票连续3年收入为负,将被暂停交易, ‘Other’ - 其他(股票专用) |
def create_from_fits(cls, fitsfile, norm_type='flux'):
"""Build a TSCube object from a fits file created by gttscube
Parameters
----------
fitsfile : str
Path to the tscube FITS file.
norm_type : str
String specifying the quantity used for the normalization... | Build a TSCube object from a fits file created by gttscube
Parameters
----------
fitsfile : str
Path to the tscube FITS file.
norm_type : str
String specifying the quantity used for the normalization |
def triplet_loss(anchor, positive, negative, margin, extra=False, scope="triplet_loss"):
r"""Loss for Triplet networks as described in the paper:
`FaceNet: A Unified Embedding for Face Recognition and Clustering
<https://arxiv.org/abs/1503.03832>`_
by Schroff et al.
Learn embeddings from an anchor ... | r"""Loss for Triplet networks as described in the paper:
`FaceNet: A Unified Embedding for Face Recognition and Clustering
<https://arxiv.org/abs/1503.03832>`_
by Schroff et al.
Learn embeddings from an anchor point and a similar input (positive) as
well as a not-similar input (negative).
Intui... |
def healthy(self, url):
'''determine if a resource is healthy based on an accepted response (200)
or redirect (301)
Parameters
==========
url: the URL to check status for, based on the status_code of HEAD
'''
response = requests.get(url)
status_code = response.status_code
... | determine if a resource is healthy based on an accepted response (200)
or redirect (301)
Parameters
==========
url: the URL to check status for, based on the status_code of HEAD |
def date(self, date):
"""Set File Occurrence date."""
self._occurrence_data['date'] = self._utils.format_datetime(
date, date_format='%Y-%m-%dT%H:%M:%SZ'
) | Set File Occurrence date. |
def pump_reader(self):
"""
Synchronously reads one message from the watch, blocking until a message is available.
All events caused by the message read will be processed before this method returns.
.. note::
You usually don't need to invoke this method manually; instead, see ... | Synchronously reads one message from the watch, blocking until a message is available.
All events caused by the message read will be processed before this method returns.
.. note::
You usually don't need to invoke this method manually; instead, see :meth:`run_sync` and :meth:`run_async`. |
def fast_comp(seq1, seq2, transpositions=False):
"""Compute the distance between the two sequences `seq1` and `seq2` up to a
maximum of 2 included, and return it. If the edit distance between the two
sequences is higher than that, -1 is returned.
If `transpositions` is `True`, transpositions will be taken into ac... | Compute the distance between the two sequences `seq1` and `seq2` up to a
maximum of 2 included, and return it. If the edit distance between the two
sequences is higher than that, -1 is returned.
If `transpositions` is `True`, transpositions will be taken into account for
the computation of the distance. This can ... |
def clean_cell(self, cell, cell_type):
"""
Uses the type of field (from the mapping) to
determine how to clean and format the cell.
"""
try:
# Get rid of non-ASCII characters
cell = cell.encode('ascii', 'ignore').decode()
if cell_type == 'D':
... | Uses the type of field (from the mapping) to
determine how to clean and format the cell. |
def send_calibrate_barometer(self):
"""Request barometer calibration."""
calibration_command = self.message_factory.command_long_encode(
self._handler.target_system, 0, # target_system, target_component
mavutil.mavlink.MAV_CMD_PREFLIGHT_CALIBRATION, # command
0, #... | Request barometer calibration. |
def as_dict(self):
"""
Json-serializable dict representation of Dos.
"""
return {"@module": self.__class__.__module__,
"@class": self.__class__.__name__, "efermi": self.efermi,
"energies": list(self.energies),
"densities": {str(spin): list(... | Json-serializable dict representation of Dos. |
def parse_workflow_declaration(self, wf_declaration_subAST):
'''
Parses a WDL declaration AST subtree into a string and a python
dictionary containing its 'type' and 'value'.
For example:
var_name = refIndex
var_map = {'type': File,
'value': bamIndex}
... | Parses a WDL declaration AST subtree into a string and a python
dictionary containing its 'type' and 'value'.
For example:
var_name = refIndex
var_map = {'type': File,
'value': bamIndex}
:param wf_declaration_subAST: An AST subtree of a workflow declaration.
... |
def temp_db(db, name=None):
"""
A context manager that creates a temporary database.
Useful for automated tests.
Parameters
----------
db: object
a preconfigured DB object
name: str, optional
name of the database to be created. (default: globally unique name)
"""
if... | A context manager that creates a temporary database.
Useful for automated tests.
Parameters
----------
db: object
a preconfigured DB object
name: str, optional
name of the database to be created. (default: globally unique name) |
def app_update_state(app_id,state):
"""
update app state
"""
try:
create_at = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
conn = get_conn()
c = conn.cursor()
c.execute("UPDATE app SET state='{0}',change_at='{1}' WHERE id='{2}'".format(state, create_at, app_id))
... | update app state |
def pkg_resources_env(self, platform_str):
"""Returns a dict that can be used in place of packaging.default_environment."""
os_name = ''
platform_machine = ''
platform_release = ''
platform_system = ''
platform_version = ''
sys_platform = ''
if 'win' in platform_str:
os_name = 'nt... | Returns a dict that can be used in place of packaging.default_environment. |
def get_data():
"""Retrieve static data from the game."""
run_config = run_configs.get()
with run_config.start(want_rgb=False) as controller:
m = maps.get("Sequencer") # Arbitrary ladder map.
create = sc_pb.RequestCreateGame(local_map=sc_pb.LocalMap(
map_path=m.path, map_data=m.data(run_config))... | Retrieve static data from the game. |
def _cfactory(attr, func, argtypes, restype, errcheck=None):
# type: (Any, str, List[Any], Any, Optional[Callable]) -> None
""" Factory to create a ctypes function and automatically manage errors. """
meth = getattr(attr, func)
meth.argtypes = argtypes
meth.restype = restype
... | Factory to create a ctypes function and automatically manage errors. |
def traverse(self, root="ROOT", indent="", transform=None, stream=sys.stdout):
'''
Traverses the C{View} tree and prints its nodes.
The nodes are printed converting them to string but other transformations can be specified
by providing a method name as the C{transform} parameter.
... | Traverses the C{View} tree and prints its nodes.
The nodes are printed converting them to string but other transformations can be specified
by providing a method name as the C{transform} parameter.
@type root: L{View}
@param root: the root node from where the traverse starts
@t... |
def coderelpath(coderoot, relpath):
"""Returns the absolute path of the 'relpath' relative to the specified code directory."""
from os import chdir, getcwd, path
cd = getcwd()
chdir(coderoot)
result = path.abspath(relpath)
chdir(cd)
return result | Returns the absolute path of the 'relpath' relative to the specified code directory. |
def run_kernel(self, func, gpu_args, instance):
""" Run a compiled kernel instance on a device """
logging.debug('run_kernel %s', instance.name)
logging.debug('thread block dims (%d, %d, %d)', *instance.threads)
logging.debug('grid dims (%d, %d, %d)', *instance.grid)
try:
... | Run a compiled kernel instance on a device |
def sync_close(self):
"""
同步关闭
"""
if self._closed:
return
while self._free:
conn = self._free.popleft()
if not conn.closed:
# pragma: no cover
conn.sync_close()
for conn in self._used:
if not... | 同步关闭 |
def bin_dense(M, subsampling_factor=3):
"""Sum over each block of given subsampling factor, returns a matrix whose
dimensions are this much as small (e.g. a 27x27 matrix binned with a
subsampling factor equal to 3 will return a 9x9 matrix whose each component
is the sum of the corresponding 3x3 block in... | Sum over each block of given subsampling factor, returns a matrix whose
dimensions are this much as small (e.g. a 27x27 matrix binned with a
subsampling factor equal to 3 will return a 9x9 matrix whose each component
is the sum of the corresponding 3x3 block in the original matrix).
Remaining columns an... |
def _compute_counts(event, time, order=None):
"""Count right censored and uncensored samples at each unique time point.
Parameters
----------
event : array
Boolean event indicator.
time : array
Survival time or time of censoring.
order : array or None
Indices to order ... | Count right censored and uncensored samples at each unique time point.
Parameters
----------
event : array
Boolean event indicator.
time : array
Survival time or time of censoring.
order : array or None
Indices to order time in ascending order.
If None, order will ... |
def calc_effective_diffusivity(self, inlets=None, outlets=None,
domain_area=None, domain_length=None):
r"""
This calculates the effective diffusivity in this linear transport
algorithm.
Parameters
----------
inlets : array_like
... | r"""
This calculates the effective diffusivity in this linear transport
algorithm.
Parameters
----------
inlets : array_like
The pores where the inlet composition boundary conditions were
applied. If not given an attempt is made to infer them from the
... |
def __set_quantity(self, value):
'''
Sets the quantity
@param value:str
'''
try:
if value < 0:
raise ValueError()
self.__quantity = Decimal(str(value))
except ValueError:
raise ValueError("Quantity must be a positive nu... | Sets the quantity
@param value:str |
def remove_lvm_physical_volume(block_device):
'''
Remove LVM PV signatures from a given block device.
:param block_device: str: Full path of block device to scrub.
'''
p = Popen(['pvremove', '-ff', block_device],
stdin=PIPE)
p.communicate(input='y\n') | Remove LVM PV signatures from a given block device.
:param block_device: str: Full path of block device to scrub. |
def _switch_tz_offset_sql(self, field_name, tzname):
"""
Returns the SQL that will convert field_name to UTC from tzname.
"""
field_name = self.quote_name(field_name)
if settings.USE_TZ:
if pytz is None:
from django.core.exceptions import ImproperlyCon... | Returns the SQL that will convert field_name to UTC from tzname. |
def __regions_russian(self, word):
"""
Return the regions RV and R2 which are used by the Russian stemmer.
In any word, RV is the region after the first vowel,
or the end of the word if it contains no vowel.
R2 is the region after the first non-vowel following
a vowel i... | Return the regions RV and R2 which are used by the Russian stemmer.
In any word, RV is the region after the first vowel,
or the end of the word if it contains no vowel.
R2 is the region after the first non-vowel following
a vowel in R1, or the end of the word if there is no such non-vo... |
def copyto_file_object(self, query, file_object):
"""
Gets data from a table into a writable file object
:param query: The "COPY { table_name [(column_name[, ...])] | (query) }
TO STDOUT [WITH(option[,...])]" query to execute
:type query: str
:param f... | Gets data from a table into a writable file object
:param query: The "COPY { table_name [(column_name[, ...])] | (query) }
TO STDOUT [WITH(option[,...])]" query to execute
:type query: str
:param file_object: A file-like object.
Normally t... |
def solve_sweep_wavelength(
self,
structure,
wavelengths,
filename="wavelength_n_effs.dat",
plot=True,
):
"""
Solve for the effective indices of a fixed structure at
different wavelengths.
Args:
structure (Slabs): The target struct... | Solve for the effective indices of a fixed structure at
different wavelengths.
Args:
structure (Slabs): The target structure to solve
for modes.
wavelengths (list): A list of wavelengths to sweep
over.
filename (str): The nominal filen... |
def floor_func(self, addr):
"""
Return the function who has the greatest address that is less than or equal to `addr`.
:param int addr: The address to query.
:return: A Function instance, or None if there is no other function before `addr`.
:rtype: Function or N... | Return the function who has the greatest address that is less than or equal to `addr`.
:param int addr: The address to query.
:return: A Function instance, or None if there is no other function before `addr`.
:rtype: Function or None |
def get(self, instance, aslist=False, **kwargs):
"""Get (multi-)references
"""
refs = self.get_versioned_references_for(instance)
if not self.multiValued:
if len(refs) > 1:
logger.warning("Found {} references for non-multivalued "
... | Get (multi-)references |
def bulk_export(self, ids, exclude_captures=False):
"""Bulk export a set of results.
:param ids: Int list of result IDs.
:rtype: tuple `(io.BytesIO, 'filename')`
"""
return self.service.bulk_export(self.base, ids, params={'exclude_captures': exclude_captures}) | Bulk export a set of results.
:param ids: Int list of result IDs.
:rtype: tuple `(io.BytesIO, 'filename')` |
def clean_regex(regex):
"""
Escape any regex special characters other than alternation.
:param regex: regex from datatables interface
:type regex: str
:rtype: str with regex to use with database
"""
# copy for return
ret_regex = regex
# these characters are escaped (all except alte... | Escape any regex special characters other than alternation.
:param regex: regex from datatables interface
:type regex: str
:rtype: str with regex to use with database |
def raw(self, sql):
"""
Execute raw sql
:Parameters:
- sql: string, sql to be executed
:Return: the result of this execution
If it's a select, return a list with each element be a DataRow instance
Otherwise return raw result from the cursor (Should be insert o... | Execute raw sql
:Parameters:
- sql: string, sql to be executed
:Return: the result of this execution
If it's a select, return a list with each element be a DataRow instance
Otherwise return raw result from the cursor (Should be insert or update or delete) |
def splitbins(t, trace=0):
"""t, trace=0 -> (t1, t2, shift). Split a table to save space.
t is a sequence of ints. This function can be useful to save space if
many of the ints are the same. t1 and t2 are lists of ints, and shift
is an int, chosen to minimize the combined size of t1 and t2 (in C
... | t, trace=0 -> (t1, t2, shift). Split a table to save space.
t is a sequence of ints. This function can be useful to save space if
many of the ints are the same. t1 and t2 are lists of ints, and shift
is an int, chosen to minimize the combined size of t1 and t2 (in C
code), and where for each i in ra... |
def connected_client(self):
"""Returns a ContextManagerFuture to be yielded in a with statement.
Returns:
A ContextManagerFuture object.
Examples:
>>> with (yield pool.connected_client()) as client:
# client is a connected tornadis.Client instance
... | Returns a ContextManagerFuture to be yielded in a with statement.
Returns:
A ContextManagerFuture object.
Examples:
>>> with (yield pool.connected_client()) as client:
# client is a connected tornadis.Client instance
# it will be automati... |
def _comp_method_SERIES(cls, op, special):
"""
Wrapper function for Series arithmetic operations, to avoid
code duplication.
"""
op_name = _get_op_name(op, special)
masker = _gen_eval_kwargs(op_name).get('masker', False)
def na_op(x, y):
# TODO:
# should have guarantess on w... | Wrapper function for Series arithmetic operations, to avoid
code duplication. |
async def _request(
self,
method: str,
endpoint: str,
*,
headers: dict = None,
params: dict = None,
json: dict = None,
ssl: bool = True) -> dict:
"""Wrap the generic request method to add access token, etc."""
... | Wrap the generic request method to add access token, etc. |
def _sentence(self, words):
"""Generate a sentence"""
db = self.database
# Generate 2 words to start a sentence with
seed = random.randint(0, db['word_count'] - 3)
seed_word, next_word = db['words'][seed], db['words'][seed + 1]
w1, w2 = seed_word, next_word
# Ge... | Generate a sentence |
def handleSubRectangles(self, images, subRectangles):
""" handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically.
"""
if isinstance(subRectang... | handleSubRectangles(images)
Handle the sub-rectangle stuff. If the rectangles are given by the
user, the values are checked. Otherwise the subrectangles are
calculated automatically. |
def set_object(self, obj, properties):
"""Add an object to the definition and set its ``properties``."""
self._objects.add(obj)
properties = set(properties)
self._properties |= properties
pairs = self._pairs
for p in self._properties:
if p in properties:
... | Add an object to the definition and set its ``properties``. |
def edge_val_set(self, graph, orig, dest, idx, key, branch, turn, tick, value):
"""Set this key of this edge to this value."""
if (branch, turn, tick) in self._btts:
raise TimeError
self._btts.add((branch, turn, tick))
graph, orig, dest, key, value = map(self.pack, (graph, or... | Set this key of this edge to this value. |
def barf(msg, exit=None, f=sys.stderr):
'''Exit with a log message (usually a fatal error)'''
exit = const('FSQ_FAIL_TMP') if exit is None else exit
shout(msg, f)
sys.exit(exit) | Exit with a log message (usually a fatal error) |
def add_batch(self, batch_id, batch_properties=None):
"""Adds batch with give ID and list of properties."""
if batch_properties is None:
batch_properties = {}
if not isinstance(batch_properties, dict):
raise ValueError('batch_properties has to be dict, however it was: '
+ ... | Adds batch with give ID and list of properties. |
def schnorr_generate_nonce_pair(self, msg, raw=False,
digest=hashlib.sha256):
"""
Generate a nonce pair deterministically for use with
schnorr_partial_sign.
"""
if not HAS_SCHNORR:
raise Exception("secp256k1_schnorr not enabled")
... | Generate a nonce pair deterministically for use with
schnorr_partial_sign. |
def energy(self, sample_like, dtype=np.float):
"""The energy of the given sample.
Args:
sample_like (samples_like):
A raw sample. `sample_like` is an extension of
NumPy's array_like structure. See :func:`.as_samples`.
dtype (:class:`numpy.dtype`,... | The energy of the given sample.
Args:
sample_like (samples_like):
A raw sample. `sample_like` is an extension of
NumPy's array_like structure. See :func:`.as_samples`.
dtype (:class:`numpy.dtype`, optional):
The data type of the returned ... |
def get_single_item(d):
"""Get an item from a dict which contains just one item."""
assert len(d) == 1, 'Single-item dict must have just one item, not %d.' % len(d)
return next(six.iteritems(d)) | Get an item from a dict which contains just one item. |
def update_sig(queue):
"""update signature"""
while True:
options, sign, vers = queue.get()
info("[+] \033[92mChecking signature version:\033[0m %s" % sign)
localver = get_local_version(options.mirrordir, sign)
remotever = vers[sign]
if localver is None or (localver and i... | update signature |
def find(self, func: Callable[[T], bool]) -> TOption[T]:
"""
Usage:
>>> TList([1, 2, 3, 4, 5]).find(lambda x: x > 3)
Option --> 4
>>> TList([1, 2, 3, 4, 5]).find(lambda x: x > 6)
Option --> None
"""
for x in self:
if func(x):
... | Usage:
>>> TList([1, 2, 3, 4, 5]).find(lambda x: x > 3)
Option --> 4
>>> TList([1, 2, 3, 4, 5]).find(lambda x: x > 6)
Option --> None |
def plot_pairwise_distance(dist, labels=None, colorbar=True, ax=None,
imshow_kwargs=None):
"""Plot a pairwise distance matrix.
Parameters
----------
dist : array_like
The distance matrix in condensed form.
labels : sequence of strings, optional
Sample labe... | Plot a pairwise distance matrix.
Parameters
----------
dist : array_like
The distance matrix in condensed form.
labels : sequence of strings, optional
Sample labels for the axes.
colorbar : bool, optional
If True, add a colorbar to the current figure.
ax : axes, optional... |
def description(filename):
"""Provide a short description."""
# This ends up in the Summary header for PKG-INFO and it should be a
# one-liner. It will get rendered on the package page just below the
# package version header but above the long_description, which ironically
# gets stuff into the Des... | Provide a short description. |
def _unary_(self, func, inplace=False):
'''
:func: unary function to apply to each coordinate
:inplace: optional boolean
:return: Point
Implementation private method.
All of the unary operations funnel thru this method
to reduce cut-and-paste code and enforce co... | :func: unary function to apply to each coordinate
:inplace: optional boolean
:return: Point
Implementation private method.
All of the unary operations funnel thru this method
to reduce cut-and-paste code and enforce consistent
behavior of unary ops.
Applies 'fu... |
def sql_dequote_string(s: str) -> str:
"""
Reverses :func:`sql_quote_string`.
"""
if len(s) < 2 or s[0] != SQUOTE or s[-1] != SQUOTE:
raise ValueError("Not an SQL string literal")
s = s[1:-1] # strip off the surrounding quotes
return s.replace(DOUBLE_SQUOTE, SQUOTE) | Reverses :func:`sql_quote_string`. |
def _create_flow(self, request_handler):
"""Create the Flow object.
The Flow is calculated lazily since we don't know where this app is
running until it receives a request, at which point redirect_uri can be
calculated and then the Flow object can be constructed.
Args:
... | Create the Flow object.
The Flow is calculated lazily since we don't know where this app is
running until it receives a request, at which point redirect_uri can be
calculated and then the Flow object can be constructed.
Args:
request_handler: webapp.RequestHandler, the requ... |
def _prune_some_if_small(self, small_size, a_or_u):
"Merge some nodes in the directory, whilst keeping others."
# Assert that we're not messing things up.
prev_app_size = self.app_size()
prev_use_size = self.use_size()
keep_nodes = []
prune_app_size = 0
prune_use... | Merge some nodes in the directory, whilst keeping others. |
def model_reaction_limits(model):
"""Yield model reaction limits as YAML dicts."""
for reaction in sorted(model.reactions, key=lambda r: r.id):
equation = reaction.properties.get('equation')
if equation is None:
continue
# Determine the default flux limits. If the value is a... | Yield model reaction limits as YAML dicts. |
def _update_triplestore(self, es_result, action_list, **kwargs):
"""
updates the triplestore with success of saves and failues of indexing
Args:
-----
es_result: the elasticsearch result list
action_list: list of elasticsearch action items that were indexed
... | updates the triplestore with success of saves and failues of indexing
Args:
-----
es_result: the elasticsearch result list
action_list: list of elasticsearch action items that were indexed |
def remove_folder(self, tree, prefix):
"""
Used to remove any empty folders
If this folder is empty then it is removed. If the parent is empty as a
result, then the parent is also removed, and so on.
"""
while True:
child = tree
tree = tree.parent... | Used to remove any empty folders
If this folder is empty then it is removed. If the parent is empty as a
result, then the parent is also removed, and so on. |
def _compute_raw_image_norm(self, data):
"""
Helper function that computes the uncorrected inverse normalization
factor of input image data. This quantity is computed as the
*sum of all pixel values*.
.. note::
This function is intended to be overriden in a subclass ... | Helper function that computes the uncorrected inverse normalization
factor of input image data. This quantity is computed as the
*sum of all pixel values*.
.. note::
This function is intended to be overriden in a subclass if one
desires to change the way the normalizatio... |
def igetattr(self, name, context=None, class_context=True):
"""Infer the possible values of the given variable.
:param name: The name of the variable to infer.
:type name: str
:returns: The inferred possible values.
:rtype: iterable(NodeNG or Uninferable)
"""
# ... | Infer the possible values of the given variable.
:param name: The name of the variable to infer.
:type name: str
:returns: The inferred possible values.
:rtype: iterable(NodeNG or Uninferable) |
def isBridgeFiltered (self):
"""
Checks if address is an IEEE 802.1D MAC Bridge Filtered MAC Group Address
This range is 01-80-C2-00-00-00 to 01-80-C2-00-00-0F. MAC frames that
have a destination MAC address within this range are not relayed by
bridges conforming to IEEE 802.1D
... | Checks if address is an IEEE 802.1D MAC Bridge Filtered MAC Group Address
This range is 01-80-C2-00-00-00 to 01-80-C2-00-00-0F. MAC frames that
have a destination MAC address within this range are not relayed by
bridges conforming to IEEE 802.1D |
def copy(self):
"""
Copy the grammar.
"""
new = self.__class__()
for dict_attr in ("symbol2number", "number2symbol", "dfas", "keywords",
"tokens", "symbol2label"):
setattr(new, dict_attr, getattr(self, dict_attr).copy())
new.labels = ... | Copy the grammar. |
def picard_sort(picard, align_bam, sort_order="coordinate",
out_file=None, compression_level=None, pipe=False):
"""Sort a BAM file by coordinates.
"""
base, ext = os.path.splitext(align_bam)
if out_file is None:
out_file = "%s-sort%s" % (base, ext)
if not file_exists(out_file... | Sort a BAM file by coordinates. |
def distance2_to_line(pt, l0, l1):
'''The perpendicular distance squared from a point to a line
pt - point in question
l0 - one point on the line
l1 - another point on the line
'''
pt = np.atleast_1d(pt)
l0 = np.atleast_1d(l0)
l1 = np.atleast_1d(l1)
reshape = pt.ndim == 1
if... | The perpendicular distance squared from a point to a line
pt - point in question
l0 - one point on the line
l1 - another point on the line |
def connect_vpc(aws_access_key_id=None, aws_secret_access_key=None, **kwargs):
"""
:type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.vpc.VPCConnectio... | :type aws_access_key_id: string
:param aws_access_key_id: Your AWS Access Key ID
:type aws_secret_access_key: string
:param aws_secret_access_key: Your AWS Secret Access Key
:rtype: :class:`boto.vpc.VPCConnection`
:return: A connection to VPC |
def write_xpm(matrix, version, out, scale=1, border=None, color='#000',
background='#fff', name='img'):
"""\
Serializes the matrix as `XPM <https://en.wikipedia.org/wiki/X_PixMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param o... | \
Serializes the matrix as `XPM <https://en.wikipedia.org/wiki/X_PixMap>`_ image.
:param matrix: The matrix to serialize.
:param int version: The (Micro) QR code version
:param out: Filename or a file-like object supporting to write binary data.
:param scale: Indicates the size of a single module (... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.