code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def cancel_inquiry (self):
"""
Call this method to cancel an inquiry in process. inquiry_complete
will still be called.
"""
self.names_to_find = {}
if self.is_inquiring:
try:
_bt.hci_send_cmd (self.sock, _bt.OGF_LINK_CTL, \
... | Call this method to cancel an inquiry in process. inquiry_complete
will still be called. |
def all_my_hosts_and_services(self):
"""Create an iterator for all my known hosts and services
:return: None
"""
for what in (self.hosts, self.services):
for item in what:
yield item | Create an iterator for all my known hosts and services
:return: None |
def _get_objects_with_same_attribute(self,
objects: Set[Object],
attribute_function: Callable[[Object], str]) -> Set[Object]:
"""
Returns the set of objects for which the attribute function returns an attribute value that
... | Returns the set of objects for which the attribute function returns an attribute value that
is most frequent in the initial set, if the frequency is greater than 1. If not, all
objects have different attribute values, and this method returns an empty set. |
async def parse_update(self, bot):
"""
Read update from stream and deserialize it.
:param bot: bot instance. You an get it from Dispatcher
:return: :class:`aiogram.types.Update`
"""
data = await self.request.json()
update = types.Update(**data)
return upd... | Read update from stream and deserialize it.
:param bot: bot instance. You an get it from Dispatcher
:return: :class:`aiogram.types.Update` |
def _apply_to_array(self, yd, y, weights, off_slices, ref_slice, dim):
"""Applies the finite differences only to slices along a given axis"""
ndims = len(y.shape)
all = slice(None, None, 1)
ref_multi_slice = [all] * ndims
ref_multi_slice[dim] = ref_slice
for w, s in z... | Applies the finite differences only to slices along a given axis |
def _get_dvs_capability(dvs_name, dvs_capability):
'''
Returns the dict representation of the DVS product_info
dvs_name
The name of the DVS
dvs_capability
The DVS capability
'''
log.trace('Building the dict of the DVS \'%s\' capability', dvs_name)
return {'operation_support... | Returns the dict representation of the DVS product_info
dvs_name
The name of the DVS
dvs_capability
The DVS capability |
def _create_identifier(rdtype, name, content):
"""
Creates hashed identifier based on full qualified record type, name & content
and returns hash.
"""
sha256 = hashlib.sha256()
sha256.update((rdtype + '/').encode('UTF-8'))
sha256.update((name + '/').encode('UTF-8'... | Creates hashed identifier based on full qualified record type, name & content
and returns hash. |
def dependencies(self, task, params={}, **options):
"""Returns the compact representations of all of the dependencies of a task.
Parameters
----------
task : {Id} The task to get dependencies on.
[params] : {Object} Parameters for the request
"""
path = "/tasks/... | Returns the compact representations of all of the dependencies of a task.
Parameters
----------
task : {Id} The task to get dependencies on.
[params] : {Object} Parameters for the request |
def get_rupdict(self):
"""
:returns: a dictionary with the parameters of the rupture
"""
assert len(self.rup_array) == 1, 'Please specify a slice of length 1'
dic = {'trt': self.trt, 'samples': self.samples}
with datastore.read(self.filename) as dstore:
rupgeo... | :returns: a dictionary with the parameters of the rupture |
def _set_vcs(self, v, load=False):
"""
Setter method for vcs, mapped from YANG variable /event_handler/event_handler_list/trigger/vcs (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_vcs is considered as a private
method. Backends looking to populate thi... | Setter method for vcs, mapped from YANG variable /event_handler/event_handler_list/trigger/vcs (enumeration)
If this variable is read-only (config: false) in the
source YANG file, then _set_vcs is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj... |
def _learnFeatureLocationPair(self, newLocation, featureLocationInput,
featureLocationGrowthCandidates):
"""
Grow / reinforce synapses between the location layer's dendrites and the
input layer's active cells.
"""
potentialOverlaps = self.featureLocationConnections.c... | Grow / reinforce synapses between the location layer's dendrites and the
input layer's active cells. |
def _check_for_pi_nodes(self, list, inheader):
'''Raise an exception if any of the list descendants are PI nodes.
'''
list = list[:]
while list:
elt = list.pop()
t = elt.nodeType
if t == _Node.PROCESSING_INSTRUCTION_NODE:
raise ParseExc... | Raise an exception if any of the list descendants are PI nodes. |
def _validate_query(query):
"""Validate and clean up a query to be sent to Search.
Cleans the query string, removes unneeded parameters, and validates for correctness.
Does not modify the original argument.
Raises an Exception on invalid input.
Arguments:
query (dict): The query to validate... | Validate and clean up a query to be sent to Search.
Cleans the query string, removes unneeded parameters, and validates for correctness.
Does not modify the original argument.
Raises an Exception on invalid input.
Arguments:
query (dict): The query to validate.
Returns:
dict: The v... |
def convert_entrez_to_uniprot(self, entrez):
"""Convert Entrez Id to Uniprot Id"""
server = "http://www.uniprot.org/uniprot/?query=%22GENEID+{0}%22&format=xml".format(entrez)
r = requests.get(server, headers={"Content-Type": "text/xml"})
if not r.ok:
r.raise_for_status()
... | Convert Entrez Id to Uniprot Id |
def getPDF(self):
'''Function that gets vectors of the pdf and target at the last design
evaluated.
:return: tuple of q values, pdf values, target values
'''
if hasattr(self, '_qplot'):
return self._qplot, self._hplot, self._tplot
else:
raise V... | Function that gets vectors of the pdf and target at the last design
evaluated.
:return: tuple of q values, pdf values, target values |
def read_config(cls, configparser):
"""Read configuration file options."""
config = dict()
section = cls.__name__
option = "warningregex"
if configparser.has_option(section, option):
value = configparser.get(section, option)
else:
value = None
... | Read configuration file options. |
def job_step_error(self, job_request_payload, message):
"""
Send message that the job step failed using payload data.
:param job_request_payload: StageJobPayload|RunJobPayload|StoreJobOutputPayload payload from job with error
:param message: description of the error
"""
p... | Send message that the job step failed using payload data.
:param job_request_payload: StageJobPayload|RunJobPayload|StoreJobOutputPayload payload from job with error
:param message: description of the error |
def new_socket():
"""
Create a new socket with OS-specific parameters
Try to set SO_REUSEPORT for BSD-flavored systems if it's an option.
Catches errors if not.
"""
new_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
new_sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
... | Create a new socket with OS-specific parameters
Try to set SO_REUSEPORT for BSD-flavored systems if it's an option.
Catches errors if not. |
def validate_replicas(self, data):
"""Validate distributed experiment"""
environment = data.get('environment')
if environment and environment.replicas:
validate_replicas(data.get('framework'), environment.replicas) | Validate distributed experiment |
def _call(self, endpoint, data=None):
"""
Make an authorized API call to specified endpoint.
:param str endpoint: API endpoint's relative URL, eg. `/account`.
:param dict data: POST request data.
:return: A dictionary or a string with response data.
"""
data = {} ... | Make an authorized API call to specified endpoint.
:param str endpoint: API endpoint's relative URL, eg. `/account`.
:param dict data: POST request data.
:return: A dictionary or a string with response data. |
def run(self):
"""Build the Fortran library, all python extensions and the docs."""
print('---- BUILDING ----')
_build.run(self)
# build documentation
print('---- BUILDING DOCS ----')
docdir = os.path.join(self.build_lib, 'pyshtools', 'doc')
self.mkpath(docdir)
... | Build the Fortran library, all python extensions and the docs. |
def _read_data(path):
"""Read Rdump output and transform to Python dictionary.
Parameters
----------
path : str
Returns
-------
Dict
key, values pairs from Rdump formatted data.
"""
data = {}
with open(path, "r") as f_obj:
var = ""
for line in f_obj:
... | Read Rdump output and transform to Python dictionary.
Parameters
----------
path : str
Returns
-------
Dict
key, values pairs from Rdump formatted data. |
def is_handler_subclass(cls, classnames=("ViewHandler", "APIHandler")):
"""Determines if ``cls`` is indeed a subclass of ``classnames``"""
if isinstance(cls, list):
return any(is_handler_subclass(c) for c in cls)
elif isinstance(cls, type):
return any(c.__name__ in classnames for c in inspec... | Determines if ``cls`` is indeed a subclass of ``classnames`` |
def analyze_cluster_size_per_scan_parameter(input_file_hits, output_file_cluster_size, parameter='GDAC', max_chunk_size=10000000, overwrite_output_files=False, output_pdf=None):
''' This method takes multiple hit files and determines the cluster size for different scan parameter values of
Parameters
-----... | This method takes multiple hit files and determines the cluster size for different scan parameter values of
Parameters
----------
input_files_hits: string
output_file_cluster_size: string
The data file with the results
parameter: string
The name of the parameter to separate the dat... |
def trace_line_numbers(filename, reload_on_change=False):
"""Return an Array of breakpoints in filename.
The list will contain an entry for each distinct line event call
so it is possible (and possibly useful) for a line number appear more
than once."""
fullname = cache_file(filename, reload_on_chan... | Return an Array of breakpoints in filename.
The list will contain an entry for each distinct line event call
so it is possible (and possibly useful) for a line number appear more
than once. |
def combine_hex(data):
''' Combine list of integer values to one big integer '''
output = 0x00
for i, value in enumerate(reversed(data)):
output |= (value << i * 8)
return output | Combine list of integer values to one big integer |
def dist_to_deg(self, distance, latitude):
"""
distance = distance in meters
latitude = latitude in degrees
at the equator, the distance of one degree is equal in latitude and longitude.
at higher latitudes, a degree longitude is shorter in length, proportional to cos(latitude)
... | distance = distance in meters
latitude = latitude in degrees
at the equator, the distance of one degree is equal in latitude and longitude.
at higher latitudes, a degree longitude is shorter in length, proportional to cos(latitude)
http://en.wikipedia.org/wiki/Decimal_degrees
T... |
def avg_receive_rate(self):
"""Average receiving rate in MB/s over the entire run. This data may
not exist if iperf was interrupted.
If the result is not from a success run, this property is None.
"""
if not self._has_data or 'sum_received' not in self.result['end']:
... | Average receiving rate in MB/s over the entire run. This data may
not exist if iperf was interrupted.
If the result is not from a success run, this property is None. |
def _prepare_for_submission(self,tempfolder, inputdict):
"""
This is the routine to be called when you want to create
the input files and related stuff with a plugin.
:param tempfolder: a aiida.common.folders.Folder subclass where
the plugin sh... | This is the routine to be called when you want to create
the input files and related stuff with a plugin.
:param tempfolder: a aiida.common.folders.Folder subclass where
the plugin should put all its files.
:param inputdict: a dictionary with the input nodes, ... |
def load_average(self):
"""
Returns the current load average.
"""
with io.open(self.load_average_file, 'r') as f:
file_columns = f.readline().strip().split()
return float(file_columns[self._load_average_file_column]) | Returns the current load average. |
def _create_package_hierarchy(prefix=settings.TEMP_DIR, book_id=None):
"""
Create hierarchy of directories, at it is required in specification.
`root_dir` is root of the package generated using :attr:`settings.TEMP_DIR`
and :func:`_get_package_name`.
`orig_dir` is path to the directory, where the ... | Create hierarchy of directories, at it is required in specification.
`root_dir` is root of the package generated using :attr:`settings.TEMP_DIR`
and :func:`_get_package_name`.
`orig_dir` is path to the directory, where the data files are stored.
`metadata_dir` is path to the directory with MODS metad... |
def all(self, query=None, **kwargs):
"""
Gets all organizations.
"""
return super(OrganizationsProxy, self).all(query=query) | Gets all organizations. |
def SetPercentageView(self, percentageView):
"""Set whether to display percentage or absolute values"""
self.percentageView = percentageView
self.percentageMenuItem.Check(self.percentageView)
self.percentageViewTool.SetValue(self.percentageView)
total = self.adapter.value( self.l... | Set whether to display percentage or absolute values |
def get(self,
variable_path: str,
default: t.Optional[t.Any] = None,
coerce_type: t.Optional[t.Type] = None,
coercer: t.Optional[t.Callable] = None,
required: bool = False,
**kwargs):
"""
Tries to read a ``variable_path`` from each ... | Tries to read a ``variable_path`` from each of the passed parsers.
It stops if read was successful and returns a retrieved value.
If none of the parsers contain a value for the specified path it returns ``default``.
:param variable_path: a path to variable in config
:param default: a de... |
def pre_build(local_root, versions):
"""Build docs for all versions to determine root directory and master_doc names.
Need to build docs to (a) avoid filename collision with files from root_ref and branch/tag names and (b) determine
master_doc config values for all versions (in case master_doc changes from... | Build docs for all versions to determine root directory and master_doc names.
Need to build docs to (a) avoid filename collision with files from root_ref and branch/tag names and (b) determine
master_doc config values for all versions (in case master_doc changes from e.g. contents.rst to index.rst between
... |
def validate_json_schema(data, schema, name="task"):
"""Given data and a jsonschema, let's validate it.
This happens for tasks and chain of trust artifacts.
Args:
data (dict): the json to validate.
schema (dict): the jsonschema to validate against.
name (str, optional): the name of... | Given data and a jsonschema, let's validate it.
This happens for tasks and chain of trust artifacts.
Args:
data (dict): the json to validate.
schema (dict): the jsonschema to validate against.
name (str, optional): the name of the json, for exception messages.
Defaults to "... |
def compress_flood_fill_regions(targets):
"""Generate a reduced set of flood fill parameters.
Parameters
----------
targets : {(x, y) : set([c, ...]), ...}
For each used chip a set of core numbers onto which an application
should be loaded. E.g., the output of
:py:func:`~rig.pl... | Generate a reduced set of flood fill parameters.
Parameters
----------
targets : {(x, y) : set([c, ...]), ...}
For each used chip a set of core numbers onto which an application
should be loaded. E.g., the output of
:py:func:`~rig.place_and_route.util.build_application_map` when in... |
def getTypeStr(_type):
r"""Gets the string representation of the given type.
"""
if isinstance(_type, CustomType):
return str(_type)
if hasattr(_type, '__name__'):
return _type.__name__
return '' | r"""Gets the string representation of the given type. |
def set_state_view(self, request):
"""
Changes the experiment state
"""
if not request.user.has_perm('experiments.change_experiment'):
return HttpResponseForbidden()
try:
state = int(request.POST.get("state", ""))
except ValueError:
re... | Changes the experiment state |
def make_dataset(self, dataset, raise_if_exists=False, body=None):
"""Creates a new dataset with the default permissions.
:param dataset:
:type dataset: BQDataset
:param raise_if_exists: whether to raise an exception if the dataset already exists.
:raises luigi.targe... | Creates a new dataset with the default permissions.
:param dataset:
:type dataset: BQDataset
:param raise_if_exists: whether to raise an exception if the dataset already exists.
:raises luigi.target.FileAlreadyExists: if raise_if_exists=True and the dataset exists |
def setup_statemachine(self):
"""Setup and start state machine"""
machine = QtCore.QStateMachine()
# _______________
# | |
# | |
# | |
# |_______________|
#
group = util.QState("group", QtCore.QState.Par... | Setup and start state machine |
def reduce_claims(query_claims):
"""
returns claims as reduced dict {P: [Q's or values]}
P = property
Q = item
"""
claims = collections.defaultdict(list)
for claim, entities in query_claims.items():
for ent in entities:
try:
snak = ent.get('main... | returns claims as reduced dict {P: [Q's or values]}
P = property
Q = item |
def parse_bool(value):
"""
Parse string to bool.
:param str value: String value to parse as bool
:return bool:
"""
boolean = parse_str(value).capitalize()
if boolean in ("True", "Yes", "On", "1"):
return True
elif boolean in ("False", "No", "Off", "0"):
return False
... | Parse string to bool.
:param str value: String value to parse as bool
:return bool: |
def render(self, message=None, css_class='alert', form_contents=None,
status=200, title="Python OpenID Consumer Example",
sreg_data=None, pape_data=None):
"""Render a page."""
self.send_response(status)
self.pageHeader(title)
if message:
self.wfi... | Render a page. |
def covertype():
"""Builds the Covertype data set."""
import sklearn.datasets # pylint: disable=g-import-not-at-top
data = sklearn.datasets.covtype.fetch_covtype()
features = data.data
labels = data.target
# Normalize features and append a column of ones for the intercept.
features -= features.mean(0)
... | Builds the Covertype data set. |
def t_fold_end(self, t):
r'\n+\ *'
column = find_column(t)
indent = self.indent_stack[-1]
if column < indent:
rollback_lexpos(t)
if column <= indent:
t.lexer.pop_state()
t.type = 'B_FOLD_END'
if column > indent:
t.type = 'SC... | r'\n+\ * |
def targets(tgt, tgt_type='glob', **kwargs):
'''
Return the targets from the directory of flat yaml files,
checks opts for location.
'''
roster_dir = __opts__.get('roster_dir', '/etc/salt/roster.d')
# Match the targets before rendering to avoid opening files unnecessarily.
raw = dict.fromkey... | Return the targets from the directory of flat yaml files,
checks opts for location. |
def decode_body(headers: MutableMapping, body: bytes) -> dict:
"""
Decode the response body
For 'application/json' content-type load the body as a dictionary
Args:
headers: Response headers
body: Response body
Returns:
decoded body
"""
type_, encoding = parse_cont... | Decode the response body
For 'application/json' content-type load the body as a dictionary
Args:
headers: Response headers
body: Response body
Returns:
decoded body |
def getNextRecord(self, useCache=True):
""" Returns next available data record from the file.
:returns: a data row (a list or tuple) if available; None, if no more
records in the table (End of Stream - EOS); empty sequence (list
or tuple) when timing out while waiting for the next r... | Returns next available data record from the file.
:returns: a data row (a list or tuple) if available; None, if no more
records in the table (End of Stream - EOS); empty sequence (list
or tuple) when timing out while waiting for the next record. |
def download_file(pk):
"""Download the file reference in `models.ReleaseFile` with the given pk.
"""
release_file = models.ReleaseFile.objects.get(pk=pk)
logging.info("Downloading %s", release_file.url)
proxies = None
if settings.LOCALSHOP_HTTP_PROXY:
proxies = settings.LOCALSHOP_HTTP_... | Download the file reference in `models.ReleaseFile` with the given pk. |
def get_file_search(self, query):
"""Performs advanced search on samples, matching certain binary/
metadata/detection criteria.
Possible queries: file size, file type, first or last submission to
VT, number of positives, bynary content, etc.
Args:
query: di... | Performs advanced search on samples, matching certain binary/
metadata/detection criteria.
Possible queries: file size, file type, first or last submission to
VT, number of positives, bynary content, etc.
Args:
query: dictionary with search arguments
Ex... |
def geo(self):
"""
General image geo information.
Returns
-------
dict
a dictionary with keys `xmin`, `xmax`, `xres`, `rotation_x`, `ymin`, `ymax`, `yres`, `rotation_y`
"""
out = dict(zip(['xmin', 'xres', 'rotation_x', 'ymax', 'rotation_y', 'yres'],
... | General image geo information.
Returns
-------
dict
a dictionary with keys `xmin`, `xmax`, `xres`, `rotation_x`, `ymin`, `ymax`, `yres`, `rotation_y` |
def set_mapper_index(self, index, mapper):
"""Set the mapper to the given index
:param index: the index to set
:type index: QtCore.QModelIndex
:param mapper: the mapper to set
:type mapper: QtGui.QDataWidgetMapper
:returns: None
:rtype: None
:raises: None... | Set the mapper to the given index
:param index: the index to set
:type index: QtCore.QModelIndex
:param mapper: the mapper to set
:type mapper: QtGui.QDataWidgetMapper
:returns: None
:rtype: None
:raises: None |
def read_config(config):
"""Read config file and return uncomment line
"""
for line in config.splitlines():
line = line.lstrip()
if line and not line.startswith("#"):
return line
return "" | Read config file and return uncomment line |
def rename(self, path, raise_if_exists=False):
"""
Does not change self.path.
Unlike ``move_dir()``, ``rename()`` might cause nested directories.
See spotify/luigi#522
"""
if isinstance(path, HdfsTarget):
path = path.path
if raise_if_exists and self.f... | Does not change self.path.
Unlike ``move_dir()``, ``rename()`` might cause nested directories.
See spotify/luigi#522 |
def html_abstract(self):
"""HTML5-formatted document abstract (`str`)."""
return self.format_abstract(format='html5', deparagraph=False,
mathjax=False, smart=True) | HTML5-formatted document abstract (`str`). |
def get_version():
"""
Return tmux version.
If tmux is built from git master, the version returned will be the latest
version appended with -master, e.g. ``2.4-master``.
If using OpenBSD's base system tmux, the version will have ``-openbsd``
appended to the latest version, e.g. ``2.4-openbsd``... | Return tmux version.
If tmux is built from git master, the version returned will be the latest
version appended with -master, e.g. ``2.4-master``.
If using OpenBSD's base system tmux, the version will have ``-openbsd``
appended to the latest version, e.g. ``2.4-openbsd``.
Returns
-------
... |
def emitError(self, level):
'''determine if a level should print to
stderr, includes all levels but INFO and QUIET'''
if level in [ABORT,
ERROR,
WARNING,
VERBOSE,
VERBOSE1,
VERBOSE2,
... | determine if a level should print to
stderr, includes all levels but INFO and QUIET |
def intersect(self, range_):
self.solver.intersection_broad_tests_count += 1
"""Remove variants whose version fall outside of the given range."""
if range_.is_any():
return self
if self.solver.optimised:
if range_ in self.been_intersected_with:
r... | Remove variants whose version fall outside of the given range. |
def handle_annotations_url(self, line: str, position: int, tokens: ParseResults) -> ParseResults:
"""Handle statements like ``DEFINE ANNOTATION X AS URL "Y"``.
:raises: RedefinedAnnotationError
"""
keyword = tokens['name']
self.raise_for_redefined_annotation(line, position, keyw... | Handle statements like ``DEFINE ANNOTATION X AS URL "Y"``.
:raises: RedefinedAnnotationError |
def Suratman(L, rho, mu, sigma):
r'''Calculates Suratman number, `Su`, for a fluid with the given
characteristic length, density, viscosity, and surface tension.
.. math::
\text{Su} = \frac{\rho\sigma L}{\mu^2}
Parameters
----------
L : float
Characteristic length [m]
rho :... | r'''Calculates Suratman number, `Su`, for a fluid with the given
characteristic length, density, viscosity, and surface tension.
.. math::
\text{Su} = \frac{\rho\sigma L}{\mu^2}
Parameters
----------
L : float
Characteristic length [m]
rho : float
Density of fluid, [kg/... |
def get_all_metadata(
self,
bucket: str,
key: str
) -> dict:
"""
Retrieves all the metadata for a given object in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which metadata is being retriev... | Retrieves all the metadata for a given object in a given bucket.
:param bucket: the bucket the object resides in.
:param key: the key of the object for which metadata is being retrieved.
:return: the metadata |
def _get_future_tasks(self):
"""Assemble a list of future alerts"""
self.alerts = {}
now = std_now()
for task in objectmodels['task'].find({'alert_time': {'$gt': now}}):
self.alerts[task.alert_time] = task
self.log('Found', len(self.alerts), 'future tasks') | Assemble a list of future alerts |
def has_next_assessment_part(self, assessment_part_id):
"""This supports the basic simple sequence case. Can be overriden in a record for other cases"""
if not self.supports_child_ordering or not self.supports_simple_child_sequencing:
raise AttributeError() # Only available through a record... | This supports the basic simple sequence case. Can be overriden in a record for other cases |
def output_filename(output_dir, key_handle, public_id):
"""
Return an output filename for a generated AEAD. Creates a hashed directory structure
using the last three bytes of the public id to get equal usage.
"""
parts = [output_dir, key_handle] + pyhsm.util.group(public_id, 2)
path = os.path.jo... | Return an output filename for a generated AEAD. Creates a hashed directory structure
using the last three bytes of the public id to get equal usage. |
def deprecate(message):
"""Loudly prints warning."""
warnings.simplefilter('default')
warnings.warn(message, category=DeprecationWarning)
warnings.resetwarnings() | Loudly prints warning. |
def _extract_apis_from_function(logical_id, function_resource, collector):
"""
Fetches a list of APIs configured for this SAM Function resource.
Parameters
----------
logical_id : str
Logical ID of the resource
function_resource : dict
Contents o... | Fetches a list of APIs configured for this SAM Function resource.
Parameters
----------
logical_id : str
Logical ID of the resource
function_resource : dict
Contents of the function resource including its properties
collector : ApiCollector
... |
def getExtn(fimg, extn=None):
"""
Returns the PyFITS extension corresponding to extension specified in
filename.
Defaults to returning the first extension with data or the primary
extension, if none have data. If a non-existent extension has been
specified, it raises a `KeyError` exception.
... | Returns the PyFITS extension corresponding to extension specified in
filename.
Defaults to returning the first extension with data or the primary
extension, if none have data. If a non-existent extension has been
specified, it raises a `KeyError` exception. |
def crosscov(x, y, axis=-1, all_lags=False, debias=True, normalize=True):
"""Returns the crosscovariance sequence between two ndarrays.
This is performed by calling fftconvolve on x, y[::-1]
Parameters
----------
x : ndarray
y : ndarray
axis : time axis
all_lags : {True/False}
w... | Returns the crosscovariance sequence between two ndarrays.
This is performed by calling fftconvolve on x, y[::-1]
Parameters
----------
x : ndarray
y : ndarray
axis : time axis
all_lags : {True/False}
whether to return all nonzero lags, or to clip the length of s_xy
to be the... |
def parse(cls, gvid, exception=True):
"""
Parse a string value into the geoid of this class.
:param gvid: String value to parse.
:param exception: If true ( default) raise an eception on parse erorrs. If False, return a
'null' geoid.
:return:
"""
if gvid... | Parse a string value into the geoid of this class.
:param gvid: String value to parse.
:param exception: If true ( default) raise an eception on parse erorrs. If False, return a
'null' geoid.
:return: |
def pull_requests(self):
'''
Looks for any of the following pull request formats in the description field:
pr12345, pr 2345, PR2345, PR 2345
'''
pr_numbers = re.findall(r"[pP][rR]\s?[0-9]+", self.description)
pr_numbers += re.findall(re.compile("pull\s?request\s?[0-9... | Looks for any of the following pull request formats in the description field:
pr12345, pr 2345, PR2345, PR 2345 |
def get_nexusvm_bindings(vlan_id, instance_id):
"""Lists nexusvm bindings."""
LOG.debug("get_nexusvm_bindings() called")
return _lookup_all_nexus_bindings(instance_id=instance_id,
vlan_id=vlan_id) | Lists nexusvm bindings. |
def path(self, value):
"""Set path
:param value: The value for path
:type value: str
:raises: None
"""
prepval = value.replace('\\', '/')
self._path = posixpath.normpath(prepval) | Set path
:param value: The value for path
:type value: str
:raises: None |
def subcorpus(self, selector):
"""
Generates a new :class:`.Corpus` using the criteria in ``selector``.
Accepts selector arguments just like :meth:`.Corpus.select`\.
.. code-block:: python
>>> corpus = Corpus(papers)
>>> subcorpus = corpus.subcorpus(('date', 199... | Generates a new :class:`.Corpus` using the criteria in ``selector``.
Accepts selector arguments just like :meth:`.Corpus.select`\.
.. code-block:: python
>>> corpus = Corpus(papers)
>>> subcorpus = corpus.subcorpus(('date', 1995))
>>> subcorpus
<tethne.cla... |
def visibility_changed(self, enable):
"""DockWidget visibility has changed"""
super(SpyderPluginWidget, self).visibility_changed(enable)
if enable and not self.pydocbrowser.is_server_running():
self.pydocbrowser.initialize() | DockWidget visibility has changed |
def _create_spec_config(self, table_name, spec_documents):
'''
Dynamo implementation of spec config creation
Called by `create_archive_table()` in
:py:class:`manager.BaseDataManager` Simply adds two rows to the spec
table
Parameters
----------
table_nam... | Dynamo implementation of spec config creation
Called by `create_archive_table()` in
:py:class:`manager.BaseDataManager` Simply adds two rows to the spec
table
Parameters
----------
table_name :
base table name (not including .spec suffix)
spec_doc... |
def _message_hostgroup_parse(self, message):
""" Parse given message and return list of group names and socket information. Socket information
is parsed in :meth:`.WBeaconGouverneurMessenger._message_address_parse` method
:param message: bytes
:return: tuple of list of group names and WIPV4SocketInfo
"""
s... | Parse given message and return list of group names and socket information. Socket information
is parsed in :meth:`.WBeaconGouverneurMessenger._message_address_parse` method
:param message: bytes
:return: tuple of list of group names and WIPV4SocketInfo |
def _cmd_line_parser():
'''
return a command line parser. It is used when generating the documentation
'''
parser = argparse.ArgumentParser()
parser.add_argument('--path',
help=('path to test files, '
'if not provided the script folder is used')... | return a command line parser. It is used when generating the documentation |
def init_environment():
"""Set environment variables that are important for the pipeline.
:returns: None
:rtype: None
:raises: None
"""
os.environ['DJANGO_SETTINGS_MODULE'] = 'jukeboxcore.djsettings'
pluginpath = os.pathsep.join((os.environ.get('JUKEBOX_PLUGIN_PATH', ''), constants.BUILTIN_... | Set environment variables that are important for the pipeline.
:returns: None
:rtype: None
:raises: None |
def send_message(self, app_mxit_id, target_user_ids, message='', contains_markup=True,
spool=None, spool_timeout=None, links=None, scope='message/send'):
"""
Send a message (from a Mxit app) to a list of Mxit users
"""
data = {
'From': app_mxit_id,
... | Send a message (from a Mxit app) to a list of Mxit users |
def parentLayer(self):
""" returns information about the parent """
if self._parentLayer is None:
from ..agol.services import FeatureService
self.__init()
url = os.path.dirname(self._url)
self._parentLayer = FeatureService(url=url,
... | returns information about the parent |
def run(main=None, argv=None):
"""Runs the program with an optional 'main' function and 'argv' list."""
flags_obj = flags.FLAGS
absl_flags_obj = absl_flags.FLAGS
# Extract the args from the optional `argv` list.
args = argv[1:] if argv else None
# Parse the known flags from that list, or from ... | Runs the program with an optional 'main' function and 'argv' list. |
def get_events(self):
"""
Returns a list of all joystick events that have occurred since the last
call to `get_events`. The list contains events in the order that they
occurred. If no events have occurred in the intervening time, the
result is an empty list.
"""
r... | Returns a list of all joystick events that have occurred since the last
call to `get_events`. The list contains events in the order that they
occurred. If no events have occurred in the intervening time, the
result is an empty list. |
def _lei16(ins):
''' Compares & pops top 2 operands out of the stack, and checks
if the 1st operand <= 2nd operand (top of the stack).
Pushes 0 if False, 1 if True.
16 bit signed version
'''
output = _16bit_oper(ins.quad[2], ins.quad[3])
output.append('call __LEI16')
output.... | Compares & pops top 2 operands out of the stack, and checks
if the 1st operand <= 2nd operand (top of the stack).
Pushes 0 if False, 1 if True.
16 bit signed version |
def fetch(self, key: object, default=None):
""" Retrieves the related value from the stored user data. """
return self._user_data.get(key, default) | Retrieves the related value from the stored user data. |
def generate(str, alg):
"""Generates an PIL image avatar based on the given
input String. Acts as the main accessor to pagan."""
img = Image.new(IMAGE_MODE, IMAGE_SIZE, BACKGROUND_COLOR)
hashcode = hash_input(str, alg)
pixelmap = setup_pixelmap(hashcode)
draw_image(pixelmap, img)
return img | Generates an PIL image avatar based on the given
input String. Acts as the main accessor to pagan. |
def _get_user_data(self):
"""
Base method for retrieving user data from a viz.
"""
url = self.session.host + '/sessions/' + str(self.session.id) + '/visualizations/' + str(self.id) + '/settings/'
r = requests.get(url)
if r.status_code == 200:
content = r.json... | Base method for retrieving user data from a viz. |
def open(self, filename, mode='r', **kwargs):
'''
Open the file and return a file-like object.
:param str filename: The storage root-relative filename
:param str mode: The open mode (``(r|w)b?``)
:raises FileNotFound: If trying to read a file that does not exists
'''
... | Open the file and return a file-like object.
:param str filename: The storage root-relative filename
:param str mode: The open mode (``(r|w)b?``)
:raises FileNotFound: If trying to read a file that does not exists |
def position(self):
"""Returns (line, col) of the current position in the stream."""
line, col = self._position(self.chunkOffset)
return (line + 1, col) | Returns (line, col) of the current position in the stream. |
def parseReaderConfig(self, confdict):
"""Parse a reader configuration dictionary.
Examples:
{
Type: 23,
Data: b'\x00'
}
{
Type: 1023,
Vendor: 25882,
Subtype: 21,
Data: b'\x00'
}
"""
... | Parse a reader configuration dictionary.
Examples:
{
Type: 23,
Data: b'\x00'
}
{
Type: 1023,
Vendor: 25882,
Subtype: 21,
Data: b'\x00'
} |
def count_sources(edge_iter: EdgeIterator) -> Counter:
"""Count the source nodes in an edge iterator with keys and data.
:return: A counter of source nodes in the iterable
"""
return Counter(u for u, _, _ in edge_iter) | Count the source nodes in an edge iterator with keys and data.
:return: A counter of source nodes in the iterable |
def ordered_expected_layers(self):
"""Get an ordered list of layers according to users input.
From top to bottom in the legend:
[
('FromCanvas', layer name, full layer URI, QML),
('FromAnalysis', layer purpose, layer group, None),
...
]
The f... | Get an ordered list of layers according to users input.
From top to bottom in the legend:
[
('FromCanvas', layer name, full layer URI, QML),
('FromAnalysis', layer purpose, layer group, None),
...
]
The full layer URI is coming from our helper.
... |
def batch_predict_async(training_dir, prediction_input_file, output_dir,
mode, batch_size=16, shard_files=True, output_format='csv', cloud=False):
"""Local and cloud batch prediction.
Args:
training_dir: The output folder of training.
prediction_input_file: csv file pattern to a fil... | Local and cloud batch prediction.
Args:
training_dir: The output folder of training.
prediction_input_file: csv file pattern to a file. File must be on GCS if
running cloud prediction
output_dir: output location to save the results. Must be a GSC path if
running cloud prediction.
mode... |
def toc(self, depth=6, lowest_level=6):
"""
Get table of content of currently fed HTML string.
:param depth: the depth of TOC
:param lowest_level: the allowed lowest level of header tag
:return: a list representing the TOC
"""
depth = min(max(depth, 0), 6)
... | Get table of content of currently fed HTML string.
:param depth: the depth of TOC
:param lowest_level: the allowed lowest level of header tag
:return: a list representing the TOC |
def initialize(name='', pool_size=10, host='localhost', password='', port=5432, user=''):
"""Initialize a new database connection and return the pool object.
Saves a reference to that instance in a module-level variable, so applications with only one database
can just call this function and not worry about... | Initialize a new database connection and return the pool object.
Saves a reference to that instance in a module-level variable, so applications with only one database
can just call this function and not worry about pool objects. |
def _invoke_callbacks(self, *args, **kwargs):
"""Invoke all done callbacks."""
for callback in self._done_callbacks:
_helpers.safe_invoke_callback(callback, *args, **kwargs) | Invoke all done callbacks. |
def create_table(
data,
meta=None,
fields=None,
skip_header=True,
import_fields=None,
samples=None,
force_types=None,
max_rows=None,
*args,
**kwargs
):
"""Create a rows.Table object based on data rows and some configurations
- `skip_header` is only used if `fields` is se... | Create a rows.Table object based on data rows and some configurations
- `skip_header` is only used if `fields` is set
- `samples` is only used if `fields` is `None`. If samples=None, all data
is filled in memory - use with caution.
- `force_types` is only used if `fields` is `None`
- `import_fiel... |
def file_renamed_in_data_in_editorstack(self, editorstack_id_str,
original_filename, filename):
"""A file was renamed in data in editorstack, this notifies others"""
for editorstack in self.editorstacks:
if str(id(editorstack)) != editorstack_i... | A file was renamed in data in editorstack, this notifies others |
def pkcs7_pad(buf):
# type: (bytes) -> bytes
"""Appends PKCS7 padding to an input buffer
:param bytes buf: buffer to add padding
:rtype: bytes
:return: buffer with PKCS7_PADDING
"""
padder = cryptography.hazmat.primitives.padding.PKCS7(
cryptography.hazmat.primitives.ciphers.
... | Appends PKCS7 padding to an input buffer
:param bytes buf: buffer to add padding
:rtype: bytes
:return: buffer with PKCS7_PADDING |
def _make_names_unique(animations):
"""
Given a list of animations, some of which might have duplicate names, rename
the first one to be <duplicate>_0, the second <duplicate>_1,
<duplicate>_2, etc."""
counts = {}
for a in animations:
c = counts.get(a['name'], 0) + 1
counts[a['nam... | Given a list of animations, some of which might have duplicate names, rename
the first one to be <duplicate>_0, the second <duplicate>_1,
<duplicate>_2, etc. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.