code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def validate_key(key: str):
"""
Validates the given key.
:param key: the key to validate
:raises InvalidKeyError: raised if the given key is invalid
"""
if "//" in key:
raise DoubleSlashKeyError(key)
elif normpath(key) != key:
raise NonNorm... | Validates the given key.
:param key: the key to validate
:raises InvalidKeyError: raised if the given key is invalid |
def _init_loaders(self) -> None:
"""
This creates the loaders instances and subscribes to their updates.
"""
for loader in settings.I18N_TRANSLATION_LOADERS:
loader_class = import_class(loader['loader'])
instance = loader_class()
instance.on_update(se... | This creates the loaders instances and subscribes to their updates. |
def _parse_remote_model(self, context):
"""
parse the remote resource model and adds its full name
:type context: models.QualiDriverModels.ResourceRemoteCommandContext
"""
if not context.remote_endpoints:
raise Exception('no remote resources found in context: {0}', j... | parse the remote resource model and adds its full name
:type context: models.QualiDriverModels.ResourceRemoteCommandContext |
def add_permission_view_menu(self, permission_name, view_menu_name):
"""
Adds a permission on a view or menu to the backend
:param permission_name:
name of the permission to add: 'can_add','can_edit' etc...
:param view_menu_name:
name of the v... | Adds a permission on a view or menu to the backend
:param permission_name:
name of the permission to add: 'can_add','can_edit' etc...
:param view_menu_name:
name of the view menu to add |
def firmware_manifest_destroy(self, manifest_id, **kwargs): # noqa: E501
"""Delete a manifest # noqa: E501
Delete a firmware manifest. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>>... | Delete a manifest # noqa: E501
Delete a firmware manifest. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.firmware_manifest_destroy(manifest_id, asynchronous=True)
>>> r... |
def get_copy(dict_, key, default=None):
"""
Looks for a key in a dictionary, if found returns
a deepcopied value, otherwise returns default value
"""
value = dict_.get(key, default)
if value:
return deepcopy(value)
return value | Looks for a key in a dictionary, if found returns
a deepcopied value, otherwise returns default value |
def _get_tables(self, base_dir):
"""Load the contents of meta_file and the corresponding data.
If fields containing Personally Identifiable Information are detected in the metadata
they are anonymized before asign them into `table_dict`.
Args:
base_dir(str): Root folder of ... | Load the contents of meta_file and the corresponding data.
If fields containing Personally Identifiable Information are detected in the metadata
they are anonymized before asign them into `table_dict`.
Args:
base_dir(str): Root folder of the dataset files.
Returns:
... |
def non_increasing(values):
"""True if values are not increasing."""
return all(x >= y for x, y in zip(values, values[1:])) | True if values are not increasing. |
def _edit(self, filename, line=None):
""" Opens a Python script for editing.
Parameters:
-----------
filename : str
A path to a local system file.
line : int, optional
A line of interest in the file.
"""
if self.custom_edit:
s... | Opens a Python script for editing.
Parameters:
-----------
filename : str
A path to a local system file.
line : int, optional
A line of interest in the file. |
async def ssh_exec(server, cmd, timeout=10, **ssh_kwargs):
"""Execute a command on a given server using asynchronous SSH-connection.
The connection to the server is wrapped in :func:`asyncio.wait_for` and
given :attr:`timeout` is applied to it. If the server is not reachable
before timeout expires, :ex... | Execute a command on a given server using asynchronous SSH-connection.
The connection to the server is wrapped in :func:`asyncio.wait_for` and
given :attr:`timeout` is applied to it. If the server is not reachable
before timeout expires, :exc:`asyncio.TimeoutError` is raised.
:param str server: Addres... |
def RemoveScanNode(self, path_spec):
"""Removes a scan node of a certain path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
SourceScanNode: parent scan node or None if not available.
Raises:
RuntimeError: if the scan node has sub nodes.
"""
scan_nod... | Removes a scan node of a certain path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
SourceScanNode: parent scan node or None if not available.
Raises:
RuntimeError: if the scan node has sub nodes. |
def _config_net_topology(self, conf):
"""
Initialize and populate all the network related elements, like
reserving ips and populating network specs of the given confiiguration
spec
Args:
conf (dict): Configuration spec to initalize
Returns:
None
... | Initialize and populate all the network related elements, like
reserving ips and populating network specs of the given confiiguration
spec
Args:
conf (dict): Configuration spec to initalize
Returns:
None |
def get_sum_w2(self, ix, iy=0, iz=0):
"""
Obtain the true number of entries in the bin weighted by w^2
"""
if self.GetSumw2N() == 0:
raise RuntimeError(
"Attempting to access Sumw2 in histogram "
"where weights were not stored")
xl = se... | Obtain the true number of entries in the bin weighted by w^2 |
def paginate_announcements_list(request, context, items):
"""
***TODO*** Migrate to django Paginator (see lostitems)
"""
# pagination
if "start" in request.GET:
try:
start_num = int(request.GET.get("start"))
except ValueError:
start_num = 0
else:
... | ***TODO*** Migrate to django Paginator (see lostitems) |
def _make_renderer(self, at_paths, at_encoding, **kwargs):
"""
:param at_paths: Template search paths
:param at_encoding: Template encoding
:param kwargs: Keyword arguments passed to the template engine to
render templates with specific features enabled.
"""
f... | :param at_paths: Template search paths
:param at_encoding: Template encoding
:param kwargs: Keyword arguments passed to the template engine to
render templates with specific features enabled. |
def get_object_info(self):
"""
Returns object info in following form <module.class object at address>
"""
objectinfo = str(self.__class__).replace(">", "")
objectinfo = objectinfo.replace("class ", "")
objectinfo = objectinfo.replace("'", "")
objectinfo += " objec... | Returns object info in following form <module.class object at address> |
def get_name_deadlines( self, name_rec, namespace_rec, block_number ):
"""
Get the expiry and renewal deadlines for a (registered) name.
NOTE: expire block here is NOT the block at which the owner loses the name, but the block at which lookups fail.
The name owner has until renewal_dead... | Get the expiry and renewal deadlines for a (registered) name.
NOTE: expire block here is NOT the block at which the owner loses the name, but the block at which lookups fail.
The name owner has until renewal_deadline to renew the name.
Return {'expire_block': ..., 'renewal_deadline': ...} on s... |
def minion_publish(self, load):
'''
Publish a command initiated from a minion, this method executes minion
restrictions so that the minion publication will only work if it is
enabled in the config.
The configuration on the master allows minions to be matched to
salt funct... | Publish a command initiated from a minion, this method executes minion
restrictions so that the minion publication will only work if it is
enabled in the config.
The configuration on the master allows minions to be matched to
salt functions, so the minions can only publish allowed salt f... |
def compute_tensor(self, x):
"""
:param x: (batch, time, vec)
"""
# Target class
class_matrix = self.target_tensor // self.output_size
class_vector = class_matrix.reshape((-1,))
# Target index
target_matrix = self.target_tensor % self.output_size
t... | :param x: (batch, time, vec) |
def purge_db(self):
"""
Purge all database records for the current user.
"""
with self.engine.begin() as db:
purge_remote_checkpoints(db, self.user_id) | Purge all database records for the current user. |
def delete(self):
"""Delete template config for specified template name.
.. __: https://api.go.cd/current/#delete-a-template
Returns:
Response: :class:`gocd.api.response.Response` object
"""
headers = self._default_headers()
return self._request(self.name,
... | Delete template config for specified template name.
.. __: https://api.go.cd/current/#delete-a-template
Returns:
Response: :class:`gocd.api.response.Response` object |
def _recompute_transform(self):
"""NOTE: This cannot be called until after this has been added
to an Axes, otherwise unit conversion will fail. This
maxes it very important to call the accessor method and
not directly access the transformation member variable.
"""
center = (self.convert_xunits(self... | NOTE: This cannot be called until after this has been added
to an Axes, otherwise unit conversion will fail. This
maxes it very important to call the accessor method and
not directly access the transformation member variable. |
def hla_choices(orig_hla, min_parts=2):
"""Provide a range of options for HLA type, with decreasing resolution.
"""
yield orig_hla
try:
int(orig_hla[-1])
except ValueError:
yield orig_hla[:-1]
hla_parts = orig_hla.split(":")
for sub_i in range(len(hla_parts) - min_parts + 1):... | Provide a range of options for HLA type, with decreasing resolution. |
def process_belrdf(rdf_str, print_output=True):
"""Return a BelRdfProcessor for a BEL/RDF string.
Parameters
----------
rdf_str : str
A BEL/RDF string to be processed. This will usually come from reading
a .rdf file.
Returns
-------
bp : BelRdfProcessor
A BelRdfProc... | Return a BelRdfProcessor for a BEL/RDF string.
Parameters
----------
rdf_str : str
A BEL/RDF string to be processed. This will usually come from reading
a .rdf file.
Returns
-------
bp : BelRdfProcessor
A BelRdfProcessor object which contains INDRA Statements in
... |
def morlet(freq, s_freq, ratio=5, sigma_f=None, dur_in_sd=4, dur_in_s=None,
normalization='peak', zero_mean=False):
"""Create a Morlet wavelet.
Parameters
----------
freq : float
central frequency of the wavelet
s_freq : int
sampling frequency
ratio : float
ra... | Create a Morlet wavelet.
Parameters
----------
freq : float
central frequency of the wavelet
s_freq : int
sampling frequency
ratio : float
ratio for a wavelet family ( = freq / sigma_f)
sigma_f : float
standard deviation of the wavelet in frequency domain
dur... |
def post(cls, payload):
"""
A wrapper over Model.post() that handles the case where a Library has a PairedBarcode
and the user may have supplied the PairedBarcode in the form of index1-index2, i.e.
GATTTCCA-GGCGTCGA. This isn't the PairedBarcode's record name or a record ID, thus
... | A wrapper over Model.post() that handles the case where a Library has a PairedBarcode
and the user may have supplied the PairedBarcode in the form of index1-index2, i.e.
GATTTCCA-GGCGTCGA. This isn't the PairedBarcode's record name or a record ID, thus
Model.post() won't be able to figure out ... |
def maketrans(fromstr, tostr):
"""maketrans(frm, to) -> string
Return a translation table (a string of 256 bytes long)
suitable for use in string.translate. The strings frm and to
must be of the same length.
"""
if len(fromstr) != len(tostr):
raise ValueError, "maketrans arguments mus... | maketrans(frm, to) -> string
Return a translation table (a string of 256 bytes long)
suitable for use in string.translate. The strings frm and to
must be of the same length. |
def build_image(self, conf, pushing=False):
"""Build this image"""
with conf.make_context() as context:
try:
stream = BuildProgressStream(conf.harpoon.silent_build)
with self.remove_replaced_images(conf) as info:
cached = NormalBuilder().bu... | Build this image |
def get_dependencies(ireq, sources=None, parent=None):
# type: (Union[InstallRequirement, InstallationCandidate], Optional[List[Dict[S, Union[S, bool]]]], Optional[AbstractDependency]) -> Set[S, ...]
"""Get all dependencies for a given install requirement.
:param ireq: A single InstallRequirement
:type... | Get all dependencies for a given install requirement.
:param ireq: A single InstallRequirement
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
:param sources: Pipfile-formatted sources, defaults to None
:type sources: list[dict], optional
:param parent: The parent of this lis... |
def words_for_language(language_code):
"""
Return the math words for a language code.
The language_code should be an ISO 639-2 language code.
https://www.loc.gov/standards/iso639-2/php/code_list.php
"""
word_groups = word_groups_for_language(language_code)
words = []
for group in word_g... | Return the math words for a language code.
The language_code should be an ISO 639-2 language code.
https://www.loc.gov/standards/iso639-2/php/code_list.php |
def _load_single_patient_cufflinks(self, patient, filter_ok):
"""
Load Cufflinks gene quantification given a patient
Parameters
----------
patient : Patient
filter_ok : bool, optional
If true, filter Cufflinks data to row with FPKM_status == "OK"
Ret... | Load Cufflinks gene quantification given a patient
Parameters
----------
patient : Patient
filter_ok : bool, optional
If true, filter Cufflinks data to row with FPKM_status == "OK"
Returns
-------
data: Pandas dataframe
Pandas dataframe o... |
def get_unread_message_count_between(parser, token):
"""
Returns the unread message count between two users.
Syntax::
{% get_unread_message_count_between [user] and [user] as [var_name] %}
Example usage::
{% get_unread_message_count_between funky and wunki as message_count %}
""... | Returns the unread message count between two users.
Syntax::
{% get_unread_message_count_between [user] and [user] as [var_name] %}
Example usage::
{% get_unread_message_count_between funky and wunki as message_count %} |
def setup(self):
"""
copies default stylesheets and javascript files if necessary, and
appends them to the options
"""
from javatools import cheetah
options = self.options
datadir = getattr(options, "html_copy_data", None)
if getattr(options, "html_data... | copies default stylesheets and javascript files if necessary, and
appends them to the options |
def Copy(self, field_number=None):
"""Returns descriptor copy, optionally changing field number."""
new_args = self._kwargs.copy()
if field_number is not None:
new_args["field_number"] = field_number
return ProtoRDFValue(
rdf_type=self.original_proto_type_name,
default=getattr(sel... | Returns descriptor copy, optionally changing field number. |
def _path_to_baton_json(self, path: str) -> Dict:
"""
Converts a path to the type of iRODS entity the mapper deals with, to its JSON representation.
:param path: the path to convert
:return: the JSON representation of the path
"""
entity = self._create_entity_with_path(pa... | Converts a path to the type of iRODS entity the mapper deals with, to its JSON representation.
:param path: the path to convert
:return: the JSON representation of the path |
def header(self):
'''
Format this element's metadata as it would appear in a PLY
header.
'''
lines = ['element %s %d' % (self.name, self.count)]
# Some information is lost here, since all comments are placed
# between the 'element' line and the first property de... | Format this element's metadata as it would appear in a PLY
header. |
def _copytoscratch(self, maps):
"""Copies the data in maps to the scratch space.
If the maps contain arrays that are not the same shape as the scratch
space, a new scratch space will be created.
"""
try:
for p in self.inputs:
self._scratch[p][:] = map... | Copies the data in maps to the scratch space.
If the maps contain arrays that are not the same shape as the scratch
space, a new scratch space will be created. |
def _get_grain(name, proxy=None):
'''
Retrieves the grain value from the cached dictionary.
'''
grains = _retrieve_grains_cache(proxy=proxy)
if grains.get('result', False) and grains.get('out', {}):
return grains.get('out').get(name) | Retrieves the grain value from the cached dictionary. |
def add_alignment_errors(self,ae):
"""If you alread have thealignment errors, add them for profile construction."""
self._target_context_errors = None
self._query_context_errors = None
self._alignment_errors.append(ae)
self._general_errors.add_alignment_errors(ae) | If you alread have thealignment errors, add them for profile construction. |
def height(self):
"""Get the height of a bounding box encapsulating the line."""
if len(self.coords) <= 1:
return 0
return np.max(self.yy) - np.min(self.yy) | Get the height of a bounding box encapsulating the line. |
def _determine_tool(files):
"""Yields tuples in the form of (linker file, tool the file links for"""
for file in files:
linker_ext = file.split('.')[-1]
if "sct" in linker_ext or "lin" in linker_ext:
yield (str(file),"uvision")
elif "ld" in linker_ext:
yield (str(... | Yields tuples in the form of (linker file, tool the file links for |
def follow_path(file_path, buffering=-1, encoding=None, errors='strict'):
"""
Similar to follow, but also looks up if inode of file is changed
e.g. if it was re-created.
Returned generator yields strings encoded by using encoding.
If encoding is not specified, it defaults to locale.getpreferredenco... | Similar to follow, but also looks up if inode of file is changed
e.g. if it was re-created.
Returned generator yields strings encoded by using encoding.
If encoding is not specified, it defaults to locale.getpreferredencoding()
>>> import io
>>> import os
>>> f = io.open('test_follow_path.txt'... |
def get_class_from_settings_from_apps(settings_key):
"""Try and get a class from a settings path by lookin in installed apps.
"""
cls_path = getattr(settings, settings_key, None)
if not cls_path:
raise NotImplementedError()
try:
app_label = cls_path.split('.')[-2]
model_nam... | Try and get a class from a settings path by lookin in installed apps. |
def init(ctx):
"""Initialize the project for use with EasyCI. This installs the necessary
git hooks (pre-commit + pre-push) and add a config file if one does not
already exists.
"""
# install hooks
git = ctx.obj['vcs']
click.echo("Installing hooks...", nl=False)
for old in ['commit-msg']... | Initialize the project for use with EasyCI. This installs the necessary
git hooks (pre-commit + pre-push) and add a config file if one does not
already exists. |
def present_params(paramlist, spacing = 0, maxchars=90, linecont=", &"):
"""Creates the (paramlist) for a method call formatted nicely for calls
with lots of parameters."""
#The +2 is spacing is for the tab indent at the start of the line.
#The +3 is for indent and the extra parenthesis at the start of ... | Creates the (paramlist) for a method call formatted nicely for calls
with lots of parameters. |
def _create_filter(self, condition):
""" Create a filter object from a textual condition.
"""
# "Normal" comparison operators?
comparison = re.match(r"^(%s)(<[>=]?|>=?|!=|~)(.*)$" % self.ident_re, condition)
if comparison:
name, comparison, values = comparison.groups(... | Create a filter object from a textual condition. |
def serialize(self, private=True):
"""Serialize this key.
:param private: Whether or not the serialized key should contain
private information. Set to False for a public-only representation
that cannot spend funds but can create children. You want
private=False if yo... | Serialize this key.
:param private: Whether or not the serialized key should contain
private information. Set to False for a public-only representation
that cannot spend funds but can create children. You want
private=False if you are, for example, running an e-commerce
... |
def check_grid_mapping(self, ds):
"""
5.6 When the coordinate variables for a horizontal grid are not
longitude and latitude, it is required that the true latitude and
longitude coordinates be supplied via the coordinates attribute. If in
addition it is desired to describe the ma... | 5.6 When the coordinate variables for a horizontal grid are not
longitude and latitude, it is required that the true latitude and
longitude coordinates be supplied via the coordinates attribute. If in
addition it is desired to describe the mapping between the given
coordinate variables a... |
def parse(self, xmp):
"""Run parser and return a dictionary of all the parsed metadata."""
tree = etree.fromstring(xmp)
rdf_tree = tree.find(RDF_NS + 'RDF')
meta = defaultdict(dict)
for desc in rdf_tree.findall(RDF_NS + 'Description'):
for el in desc.getchildren():
... | Run parser and return a dictionary of all the parsed metadata. |
def check_permission(permission, obj):
"""
Returns if the current user has rights for the permission passed in against
the obj passed in
:param permission: name of the permission
:param obj: the object to check the permission against for the current user
:return: 1 if the user has rights for thi... | Returns if the current user has rights for the permission passed in against
the obj passed in
:param permission: name of the permission
:param obj: the object to check the permission against for the current user
:return: 1 if the user has rights for this permission for the passed in obj |
def visitLexerTerminal(self, ctx: jsgParser.LexerTerminalContext):
""" terminal: LEXER_ID | STRING """
if ctx.LEXER_ID():
# Substitute LEXER_ID with its string equivalent - "{LEXER_ID}".format(LEXER_ID=LEXER_ID.pattern)
idtoken = as_token(ctx)
self._rulePattern += '(... | terminal: LEXER_ID | STRING |
def lag_plot(data, lag=1, kind="scatter", **kwds):
"""Lag plot for time series.
Parameters
----------
data: pandas.Series
the time series to plot
lag: integer
The lag of the scatter plot, default=1
kind: string
The kind of plot to use (e.g. 'scatter', 'line')
**kwds:... | Lag plot for time series.
Parameters
----------
data: pandas.Series
the time series to plot
lag: integer
The lag of the scatter plot, default=1
kind: string
The kind of plot to use (e.g. 'scatter', 'line')
**kwds:
Additional keywords passed to data.vgplot.scatter... |
def _fetch_pageviews(self, storage, year, week, ip_users=False):
"""
Fetch PageViews from Elasticsearch.
:param time_from: Staring at timestamp.
:param time_to: To timestamp
"""
prefix = 'Pageviews'
if ip_users:
query_add = "AND !(bot:True) AND (id_us... | Fetch PageViews from Elasticsearch.
:param time_from: Staring at timestamp.
:param time_to: To timestamp |
def _get_num_locations(d):
"""
Find out how many locations are being parsed. Compare lengths of each
coordinate list and return the max
:param dict d: Geo metadata
:return int: Max number of locations
"""
lengths = []
for key in EXCEL_GEO:
try:
if key != "siteName":
... | Find out how many locations are being parsed. Compare lengths of each
coordinate list and return the max
:param dict d: Geo metadata
:return int: Max number of locations |
def _get_derived_feature_types(self, limit):
"""
Make a pass through the feature table in order to properly type
the FBal (allele) features, which are derived either from other
sequence features (which can be things like RNAi products)
or transgenic-transposons. We'll save the a... | Make a pass through the feature table in order to properly type
the FBal (allele) features, which are derived either from other
sequence features (which can be things like RNAi products)
or transgenic-transposons. We'll save the allele type into a hasmap.
:param limit:
:return: |
async def save_changes(self, turn_context: TurnContext, force: bool = False) -> None:
"""
If it has changed, writes to storage the state object that is cached in the current context object for this turn.
:param turn_context: The context object for this turn.
:param force: Optional. True ... | If it has changed, writes to storage the state object that is cached in the current context object for this turn.
:param turn_context: The context object for this turn.
:param force: Optional. True to save state to storage whether or not there are changes. |
def rotate(self, angle, direction='z', axis=None):
"""
Returns a new Place which is the same but rotated about a
given axis.
If the axis given is ``None``, the rotation will be computed
about the Place's centroid.
:param angle: Rotation angle (i... | Returns a new Place which is the same but rotated about a
given axis.
If the axis given is ``None``, the rotation will be computed
about the Place's centroid.
:param angle: Rotation angle (in radians)
:type angle: float
:param direction: Axis di... |
def to_native_types(self, slicer=None, na_rep='nan', quoting=None,
**kwargs):
""" convert to our native types format, slicing if desired """
values = self.get_values()
if slicer is not None:
values = values[:, slicer]
mask = isna(values)
if ... | convert to our native types format, slicing if desired |
def get_parser():
"""Return the parser object for this script."""
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-n",
d... | Return the parser object for this script. |
def _vec_alpha(self, donor_catchments):
"""
Return vector alpha which is the weights for donor model errors
Methodology source: Kjeldsen, Jones & Morris 2014, eq 10
:param donor_catchments: Catchments to use as donors
:type donor_catchments: list of :class:`Catchment`
:... | Return vector alpha which is the weights for donor model errors
Methodology source: Kjeldsen, Jones & Morris 2014, eq 10
:param donor_catchments: Catchments to use as donors
:type donor_catchments: list of :class:`Catchment`
:return: Vector of donor weights
:rtype: :class:`nump... |
def loop(self, intro=None):
""" TODO as heck.
See Python's cmd.Cmd.cmdloop for some (somewhat horrifying)
example loops - that's what we're working similarly to.
"""
self.fire("preloop")
if intro is not None:
self.intro = intro
if self.intro is... | TODO as heck.
See Python's cmd.Cmd.cmdloop for some (somewhat horrifying)
example loops - that's what we're working similarly to. |
def writeAMF3(self, data):
"""
Writes an element in L{AMF3<pyamf.amf3>} format.
"""
self.writeType(TYPE_AMF3)
self.context.getAMF3Encoder(self).writeElement(data) | Writes an element in L{AMF3<pyamf.amf3>} format. |
def get_cachedir_csig(self):
"""
Fetch a Node's content signature for purposes of computing
another Node's cachesig.
This is a wrapper around the normal get_csig() method that handles
the somewhat obscure case of using CacheDir with the -n option.
Any files that don't ex... | Fetch a Node's content signature for purposes of computing
another Node's cachesig.
This is a wrapper around the normal get_csig() method that handles
the somewhat obscure case of using CacheDir with the -n option.
Any files that don't exist would normally be "built" by fetching
... |
def _datatype_size(datatype, numElms): # @NoSelf
'''
Gets datatype size
Parameters:
datatype : int
CDF variable data type
numElms : int
number of elements
Returns:
numBytes : int
The number of bytes ... | Gets datatype size
Parameters:
datatype : int
CDF variable data type
numElms : int
number of elements
Returns:
numBytes : int
The number of bytes for the data |
def filter_resp(self, action_resp, filter_params):
"""Filter response of action. Used to make printed results more
specific
:param action_resp: named tuple (CommandsResponse)
containing response from action.
:param filter_params: params used after '|' specific for given filt... | Filter response of action. Used to make printed results more
specific
:param action_resp: named tuple (CommandsResponse)
containing response from action.
:param filter_params: params used after '|' specific for given filter
:return: filtered response. |
def get_info(self, params={}):
"""
Gets mailbox info.
@param params: params to retrieve
@return: AccountInfo
"""
res = self.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.GetInfoRequest,
params)
return res | Gets mailbox info.
@param params: params to retrieve
@return: AccountInfo |
def restore_scoped_package_version_from_recycle_bin(self, package_version_details, feed_id, package_scope, unscoped_package_name, package_version):
"""RestoreScopedPackageVersionFromRecycleBin.
[Preview API] Restore a package version with an npm scope from the recycle bin to its feed.
:param :cl... | RestoreScopedPackageVersionFromRecycleBin.
[Preview API] Restore a package version with an npm scope from the recycle bin to its feed.
:param :class:`<NpmRecycleBinPackageVersionDetails> <azure.devops.v5_0.npm.models.NpmRecycleBinPackageVersionDetails>` package_version_details:
:param str feed_i... |
def export(self, name, columns, points):
"""Write the points to the ZeroMQ server."""
logger.debug("Export {} stats to ZeroMQ".format(name))
# Create DB input
data = dict(zip(columns, points))
# Do not publish empty stats
if data == {}:
return False
... | Write the points to the ZeroMQ server. |
def get_logger(cls, *name, **kwargs):
"""Construct a new :class:`KvLoggerAdapter` which encapsulates
the :class:`logging.Logger` specified by ``name``.
:param name:
Any amount of symbols. Will be concatenated and normalized
to form the logger name. Can also be empty.
... | Construct a new :class:`KvLoggerAdapter` which encapsulates
the :class:`logging.Logger` specified by ``name``.
:param name:
Any amount of symbols. Will be concatenated and normalized
to form the logger name. Can also be empty.
:param extra:
Additional conte... |
def _indent(indent=0, quote='', indent_char=' '):
"""Indent util function, compute new indent_string"""
if indent > 0:
indent_string = ''.join((
str(quote),
(indent_char * (indent - len(quote)))
))
else:
indent_string = ''.join((
('\x08' * (-1 * (i... | Indent util function, compute new indent_string |
def blend_alpha(image_fg, image_bg, alpha, eps=1e-2):
"""
Blend two images using an alpha blending.
In an alpha blending, the two images are naively mixed. Let ``A`` be the foreground image
and ``B`` the background image and ``a`` is the alpha value. Each pixel intensity is then
computed as ``a * A... | Blend two images using an alpha blending.
In an alpha blending, the two images are naively mixed. Let ``A`` be the foreground image
and ``B`` the background image and ``a`` is the alpha value. Each pixel intensity is then
computed as ``a * A_ij + (1-a) * B_ij``.
dtype support::
* ``uint8``: y... |
def product_id_change(self):
'''
- @param self: object pointer
- '''
context = dict(self._context)
if not context:
context = {}
if context.get('folio', False):
if self.product_id and self.folio_id.partner_id:
self.name = self.produc... | - @param self: object pointer
- |
def apply_to_image(self, image, reference=None, interpolation='linear'):
"""
Apply transform to an image
Arguments
---------
image : ANTsImage
image to which the transform will be applied
reference : ANTsImage
target space for transforming image
... | Apply transform to an image
Arguments
---------
image : ANTsImage
image to which the transform will be applied
reference : ANTsImage
target space for transforming image
interpolation : string
type of interpolation to use
Returns
... |
def fetch(self, url, open_graph=None, twitter_card=None, touch_icon=None,
favicon=None, all_images=None, parser=None, handle_file_content=None,
canonical=None):
"""Retrieves content from the specified url, parses it, and returns
a beautifully crafted dictionary of important i... | Retrieves content from the specified url, parses it, and returns
a beautifully crafted dictionary of important information about that
web page.
Priority tree is as follows:
1. OEmbed
2. Open Graph
3. Twitter Card
4. Other meta content (i.e. descri... |
def _register_function(name: str, func, universe: bool, in_place: bool):
"""Register a transformation function under the given name.
:param name: Name to register the function under
:param func: A function
:param universe:
:param in_place:
:return: The same function, with additional properties ... | Register a transformation function under the given name.
:param name: Name to register the function under
:param func: A function
:param universe:
:param in_place:
:return: The same function, with additional properties added |
def get_skeletons(self, component_info=None, data=None, component_position=None):
"""Get skeletons
"""
components = []
append_components = components.append
for _ in range(component_info.skeleton_count):
component_position, info = QRTPacket._get_exact(
... | Get skeletons |
def train(self, data, epochs, autostop=False):
"""!
@brief Trains self-organized feature map (SOM).
@param[in] data (list): Input data - list of points where each point is represented by list of features, for example coordinates.
@param[in] epochs (uint): Number of epochs for train... | !
@brief Trains self-organized feature map (SOM).
@param[in] data (list): Input data - list of points where each point is represented by list of features, for example coordinates.
@param[in] epochs (uint): Number of epochs for training.
@param[in] autostop (bool): Automatic... |
def main():
"""The simplest usage of watershed delineation based on TauDEM."""
dem = '../tests/data/Jamaica_dem.tif'
num_proc = 2
wp = '../tests/data/tmp_results/wtsd_delineation'
TauDEMWorkflow.watershed_delineation(num_proc, dem, workingdir=wp) | The simplest usage of watershed delineation based on TauDEM. |
def to_detach(b:Tensors, cpu:bool=True):
"Recursively detach lists of tensors in `b `; put them on the CPU if `cpu=True`."
if is_listy(b): return [to_detach(o, cpu) for o in b]
if not isinstance(b,Tensor): return b
b = b.detach()
return b.cpu() if cpu else b | Recursively detach lists of tensors in `b `; put them on the CPU if `cpu=True`. |
def get_fragment(self, offset):
"""
Get the part of the source which is causing a problem.
"""
fragment_len = 10
s = '%r' % (self.source[offset:offset + fragment_len])
if offset + fragment_len < len(self.source):
s += '...'
return s | Get the part of the source which is causing a problem. |
def avg_bp_from_range(self, bp):
""" Helper function - FastQC often gives base pair ranges (eg. 10-15)
which are not helpful when plotting. This returns the average from such
ranges as an int, which is helpful. If not a range, just returns the int """
try:
if '-' in bp:
... | Helper function - FastQC often gives base pair ranges (eg. 10-15)
which are not helpful when plotting. This returns the average from such
ranges as an int, which is helpful. If not a range, just returns the int |
def get_column(name, model=None):
"""
get table column according to name, the name can be like `model.column`
"""
if '.' in name:
m, name = name.split('.')
model = get_model(m)
if model:
return model.c.get(name) | get table column according to name, the name can be like `model.column` |
def first_arg_to_level_name(arg):
"""Decide what level the argument specifies and return it. The argument
must contain (case-insensitive) one of the values in LEVELS or be an integer
constant. Otherwise None will be returned."""
try:
return int(arg)
except ValueError:
arg = arg.upp... | Decide what level the argument specifies and return it. The argument
must contain (case-insensitive) one of the values in LEVELS or be an integer
constant. Otherwise None will be returned. |
def get_jira_key_from_scenario(scenario):
"""Extract Jira Test Case key from scenario tags.
Two tag formats are allowed:
@jira('PROJECT-32')
@jira=PROJECT-32
:param scenario: behave scenario
:returns: Jira test case key
"""
jira_regex = re.compile('jira[=\(\']*([A-Z]+\-[0-9]+)[\'\)]*$')... | Extract Jira Test Case key from scenario tags.
Two tag formats are allowed:
@jira('PROJECT-32')
@jira=PROJECT-32
:param scenario: behave scenario
:returns: Jira test case key |
def Search(self, artifact, os_name=None, cpe=None, label=None):
"""Whether the condition contains the specified values.
Args:
artifact: A string identifier for the artifact.
os_name: An OS string.
cpe: A CPE string.
label: A label string.
Returns:
True if the values match the... | Whether the condition contains the specified values.
Args:
artifact: A string identifier for the artifact.
os_name: An OS string.
cpe: A CPE string.
label: A label string.
Returns:
True if the values match the non-empty query attributes.
Empty query attributes are ignored i... |
def filter_entries(entries, filters, exclude):
"""
Filters a list of host entries according to the given filters.
:param entries: A list of host entries.
:type entries: [:py:class:`HostEntry`]
:param filters: Regexes that must match a `HostEntry`.
:type filters: [``str``]
:param exclude: Re... | Filters a list of host entries according to the given filters.
:param entries: A list of host entries.
:type entries: [:py:class:`HostEntry`]
:param filters: Regexes that must match a `HostEntry`.
:type filters: [``str``]
:param exclude: Regexes that must NOT match a `HostEntry`.
:type exclude:... |
def to_string(self, endpoints):
# type: (List[EndpointDescription]) -> str
"""
Converts the given endpoint description beans into a string
:param endpoints: A list of EndpointDescription beans
:return: A string containing an XML document
"""
# Make the ElementTre... | Converts the given endpoint description beans into a string
:param endpoints: A list of EndpointDescription beans
:return: A string containing an XML document |
def manage_initial_host_status_brok(self, b):
"""Prepare the known hosts cache"""
host_name = b.data['host_name']
logger.debug("got initial host status: %s", host_name)
self.hosts_cache[host_name] = {
'realm_name':
sanitize_name(b.data.get('realm_name', b.dat... | Prepare the known hosts cache |
def enc(x, codec='ascii'):
"""Encodes a string for SGML/XML/HTML"""
x = x.replace('&', '&').replace('>', '>').replace('<', '<').replace('"', '"')
return x.encode(codec, 'xmlcharrefreplace') | Encodes a string for SGML/XML/HTML |
def DeleteSnapshot(self,names=None):
"""Removes an existing Hypervisor level snapshot.
Supply one or more snapshot names to delete them concurrently.
If no snapshot names are supplied will delete all existing snapshots.
>>> clc.v2.Server(alias='BTDI',id='WA1BTDIKRT02').DeleteSnapshot().WaitUntilComplete()
0... | Removes an existing Hypervisor level snapshot.
Supply one or more snapshot names to delete them concurrently.
If no snapshot names are supplied will delete all existing snapshots.
>>> clc.v2.Server(alias='BTDI',id='WA1BTDIKRT02').DeleteSnapshot().WaitUntilComplete()
0 |
def deploy(provider=None):
"""
Deploys your project
"""
if os.path.exists(DEPLOY_YAML):
site = yaml.safe_load(_read_file(DEPLOY_YAML))
provider_class = PROVIDERS[site['provider']]
provider_class.deploy() | Deploys your project |
def mark_offer_as_win(self, offer_id):
"""
Mark offer as win
:param offer_id: the offer id
:return Response
"""
return self._create_put_request(
resource=OFFERS,
billomat_id=offer_id,
command=WIN,
) | Mark offer as win
:param offer_id: the offer id
:return Response |
def _read_uaa_cache(self):
"""
Read cache of UAA client/user details.
"""
self._cache_path = os.path.expanduser('~/.predix/uaa.json')
if not os.path.exists(self._cache_path):
return self._initialize_uaa_cache()
with open(self._cache_path, 'r') as data:
... | Read cache of UAA client/user details. |
def validate_metadata(self, xml):
"""
Validates an XML SP Metadata.
:param xml: Metadata's XML that will be validate
:type xml: string
:returns: The list of found errors
:rtype: list
"""
assert isinstance(xml, compat.text_types)
if len(xml) == ... | Validates an XML SP Metadata.
:param xml: Metadata's XML that will be validate
:type xml: string
:returns: The list of found errors
:rtype: list |
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPSConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTPS connection (%d): %s"
% (self.num_connections, self.host))
if not self.ConnectionCls or self.ConnectionCls is DummyConnect... | Return a fresh :class:`httplib.HTTPSConnection`. |
def search(d, recursive=True, store_meta=True):
'''
Search for DICOM files within a given directory and receive back a
dictionary of {StudyInstanceUID: {SeriesNumber: [files]}}
Example usage::
>>> import yaxil.dicom
>>> yaxil.dicom.search("~/dicoms").keys()
['1.2.340.500067... | Search for DICOM files within a given directory and receive back a
dictionary of {StudyInstanceUID: {SeriesNumber: [files]}}
Example usage::
>>> import yaxil.dicom
>>> yaxil.dicom.search("~/dicoms").keys()
['1.2.340.500067.8.9.10.11012.13000001401516017181900000200']
:... |
def graph_repo(repo_url, output_loc, format='graphml'):
""" generates a graph for a git repository """
log = logging.getLogger("graphgit")
# repo type
local_repo = os.path.isabs(repo_url)
# repo name
repo_name = repo_url[repo_url.rfind('/')+1:repo_url.rfind('.git')] \
if not local_repo else repo_url[r... | generates a graph for a git repository |
def hostgroup_exists(name=None, groupid=None, node=None, nodeids=None, **kwargs):
'''
Checks if at least one host group that matches the given filter criteria exists.
.. versionadded:: 2016.3.0
:param name: names of the host groups
:param groupid: host group IDs
:param node: name of the node t... | Checks if at least one host group that matches the given filter criteria exists.
.. versionadded:: 2016.3.0
:param name: names of the host groups
:param groupid: host group IDs
:param node: name of the node the host groups must belong to (zabbix API < 2.4)
:param nodeids: IDs of the nodes the host... |
def _range_check(self, value, min_value, max_value):
'''
Utility method to check that the given value is between min_value and max_value.
'''
if value < min_value or value > max_value:
raise ValueError('%s out of range - %s is not between %s and %s' % (self.__class__.__name__... | Utility method to check that the given value is between min_value and max_value. |
def _has_nested(self, relations, operator='>=', count=1, boolean='and', extra=None):
"""
Add nested relationship count conditions to the query.
:param relations: nested relations
:type relations: str
:param operator: The operator
:type operator: str
:param coun... | Add nested relationship count conditions to the query.
:param relations: nested relations
:type relations: str
:param operator: The operator
:type operator: str
:param count: The count
:type count: int
:param boolean: The boolean value
:type boolean: s... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.