code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def memory_usage(self):
"""
Get the combined memory usage of the field data and field values.
"""
data = super(Field, self).memory_usage()
values = 0
for value in self.field_values:
values += value.memory_usage()
data['field_values'] = values
r... | Get the combined memory usage of the field data and field values. |
def _query_nsot(url, headers, device=None):
'''
if a device is given, query nsot for that specific device, otherwise return
all devices
:param url: str
:param headers: dict
:param device: None or str
:return:
'''
url = urlparse.urljoin(url, 'devices')
ret = {}
if not device:... | if a device is given, query nsot for that specific device, otherwise return
all devices
:param url: str
:param headers: dict
:param device: None or str
:return: |
def error_perturbation(C, S):
r"""Error perturbation for given sensitivity matrix.
Parameters
----------
C : (M, M) ndarray
Count matrix
S : (M, M) ndarray or (K, M, M) ndarray
Sensitivity matrix (for scalar observable) or sensitivity
tensor for vector observable
Return... | r"""Error perturbation for given sensitivity matrix.
Parameters
----------
C : (M, M) ndarray
Count matrix
S : (M, M) ndarray or (K, M, M) ndarray
Sensitivity matrix (for scalar observable) or sensitivity
tensor for vector observable
Returns
-------
X : float or (K,... |
async def disable(self, reason=None):
"""Enters maintenance mode
Parameters:
reason (str): Reason of disabling
Returns:
bool: ``True`` on success
"""
params = {"enable": True, "reason": reason}
response = await self._api.put("/v1/agent/maintenance... | Enters maintenance mode
Parameters:
reason (str): Reason of disabling
Returns:
bool: ``True`` on success |
def get_cookies_for_class(session, class_name,
cookies_file=None,
username=None,
password=None):
"""
Get the cookies for the given class.
We do not validate the cookies if they are loaded from a cookies file
because this is i... | Get the cookies for the given class.
We do not validate the cookies if they are loaded from a cookies file
because this is intended for debugging purposes or if the coursera
authentication process has changed. |
def register_chooser(self, chooser, **kwargs):
"""Adds a model chooser definition to the registry."""
if not issubclass(chooser, Chooser):
return self.register_simple_chooser(chooser, **kwargs)
self.choosers[chooser.model] = chooser(**kwargs)
return chooser | Adds a model chooser definition to the registry. |
def tz_convert(dt, to_tz, from_tz=None) -> str:
"""
Convert to tz
Args:
dt: date time
to_tz: to tz
from_tz: from tz - will be ignored if tz from dt is given
Returns:
str: date & time
Examples:
>>> dt_1 = pd.Timestamp('2018-09-10 16:00', tz='Asia/Hong_Kong')... | Convert to tz
Args:
dt: date time
to_tz: to tz
from_tz: from tz - will be ignored if tz from dt is given
Returns:
str: date & time
Examples:
>>> dt_1 = pd.Timestamp('2018-09-10 16:00', tz='Asia/Hong_Kong')
>>> tz_convert(dt_1, to_tz='NY')
'2018-09-1... |
def _set_size_code(self):
"""Set the code for a size operation.
"""
if not self._op.startswith(self.SIZE):
self._size_code = None
return
if len(self._op) == len(self.SIZE):
self._size_code = self.SZ_EQ
else:
suffix = self._op[len(s... | Set the code for a size operation. |
def get(self, path):
""" Get a transform from the cache that maps along *path*, which must
be a list of Transforms to apply in reverse order (last transform is
applied first).
Accessed items have their age reset to 0.
"""
key = tuple(map(id, path))
item = self._c... | Get a transform from the cache that maps along *path*, which must
be a list of Transforms to apply in reverse order (last transform is
applied first).
Accessed items have their age reset to 0. |
def doMove(self, orgresource, dstresource, dummy = 56184, stresource = 'F', bShareFireCopy = 'false'):
"""DoMove
Args:
dummy: ???
orgresource: Path for a file which you want to move
dstresource: Destination path
bShareFireCopy: ???
Returns:
... | DoMove
Args:
dummy: ???
orgresource: Path for a file which you want to move
dstresource: Destination path
bShareFireCopy: ???
Returns:
True: Move success
False: Move failed |
def home_wins(self):
"""
Returns an ``int`` of the number of games the home team won after the
conclusion of the game.
"""
try:
wins, losses = re.findall(r'\d+', self._home_record)
return wins
except ValueError:
return 0 | Returns an ``int`` of the number of games the home team won after the
conclusion of the game. |
def from_string(cls, s, name=None, modules=None, active=None):
"""
Instantiate a REPP from a string.
Args:
name (str, optional): the name of the REPP module
modules (dict, optional): a mapping from identifiers to
REPP modules
active (iterable,... | Instantiate a REPP from a string.
Args:
name (str, optional): the name of the REPP module
modules (dict, optional): a mapping from identifiers to
REPP modules
active (iterable, optional): an iterable of default module
activations |
def magic_write(ofile, Recs, file_type):
"""
Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Return :
[True,False] : True if successful
ofile : same as input
Effects :
writes a M... | Parameters
_________
ofile : path to output file
Recs : list of dictionaries in MagIC format
file_type : MagIC table type (e.g., specimens)
Return :
[True,False] : True if successful
ofile : same as input
Effects :
writes a MagIC formatted file from Recs |
def occupied_by_sort(self, address):
"""
Check if an address belongs to any segment, and if yes, returns the sort of the segment
:param int address: The address to check
:return: Sort of the segment that occupies this address
:rtype: str
"""
idx = self._search(a... | Check if an address belongs to any segment, and if yes, returns the sort of the segment
:param int address: The address to check
:return: Sort of the segment that occupies this address
:rtype: str |
def simBirth(self,which_agents):
'''
Makes new consumers for the given indices. Initialized variables include aNrm and pLvl, as
well as time variables t_age and t_cycle. Normalized assets and persistent income levels
are drawn from lognormal distributions given by aNrmInitMean and aNrm... | Makes new consumers for the given indices. Initialized variables include aNrm and pLvl, as
well as time variables t_age and t_cycle. Normalized assets and persistent income levels
are drawn from lognormal distributions given by aNrmInitMean and aNrmInitStd (etc).
Parameters
----------... |
def loads(cls, s: str) -> 'Money':
"""Parse from a string representation (repr)"""
try:
currency, amount = s.strip().split()
return cls(amount, currency)
except ValueError as err:
raise ValueError("failed to parse string "
" '{}': ... | Parse from a string representation (repr) |
def main(arguments=None):
"""
*The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
"""
# setup the command-line util settings
su = tools(
arguments=arguments,
docString=__doc__,
logLevel="ERROR",
option... | *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command* |
def create_response_object(self, service_id, version_number, name, status="200", response="OK", content="", request_condition=None, cache_condition=None):
"""Creates a new Response Object."""
body = self._formdata({
"name": name,
"status": status,
"response": response,
"content": content,
"request_co... | Creates a new Response Object. |
def guessFormat(self):
'''return quality score format -
might return several if ambiguous.'''
c = [ord(x) for x in self.quals]
mi, ma = min(c), max(c)
r = []
for entry_format, v in iteritems(RANGES):
m1, m2 = v
if mi >= m1 and ma < m2:
... | return quality score format -
might return several if ambiguous. |
def export_sleep_stats(self, filename, lights_off, lights_on):
"""Create CSV with sleep statistics.
Parameters
----------
filename: str
Filename for csv export
lights_off: float
Initial time when sleeper turns off the light (or their phone) to
... | Create CSV with sleep statistics.
Parameters
----------
filename: str
Filename for csv export
lights_off: float
Initial time when sleeper turns off the light (or their phone) to
go to sleep, in seconds from recording start
lights_on: float
... |
def __clear_break(self, pid, address):
"""
Used by L{dont_break_at} and L{dont_stalk_at}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integ... | Used by L{dont_break_at} and L{dont_stalk_at}.
@type pid: int
@param pid: Process global ID.
@type address: int or str
@param address:
Memory address of code instruction to break at. It can be an
integer value for the actual address or a string with a label
... |
async def play(self, author, text_channel, query, index=None, stop_current=False, shuffle=False):
"""
The play command
Args:
author (discord.Member): The member that called the command
text_channel (discord.Channel): The channel where the command was called
q... | The play command
Args:
author (discord.Member): The member that called the command
text_channel (discord.Channel): The channel where the command was called
query (str): The argument that was passed with the command
index (str): Whether to play next or at the end ... |
def vote_cast(vote: Vote, choice_index: int, inputs: dict,
change_address: str) -> bytes:
'''vote cast transaction'''
network_params = net_query(vote.deck.network)
vote_cast_addr = vote.vote_choice_address[choice_index]
tx_fee = network_params.min_tx_fee # settle for min tx fee for now
... | vote cast transaction |
def _generate_style(self):
"""
Create new Style instance.
(We don't want to do this on every key press, because each time the
renderer receives a new style class, he will redraw everything.)
"""
return generate_style(self.code_styles[self._current_code_style_name],
... | Create new Style instance.
(We don't want to do this on every key press, because each time the
renderer receives a new style class, he will redraw everything.) |
def StreamingCommand(cls, usb, service, command='', timeout_ms=None):
"""One complete set of USB packets for a single command.
Sends service:command in a new connection, reading the data for the
response. All the data is held in memory, large responses will be slow and
can fill up memor... | One complete set of USB packets for a single command.
Sends service:command in a new connection, reading the data for the
response. All the data is held in memory, large responses will be slow and
can fill up memory.
Args:
usb: USB device handle with BulkRead and BulkWrite me... |
def get_published_events(self, process=True) -> List[Event]:
"""Get a list of published (pending) events.
Return a list of Event objects which have been published
and are therefore pending to be processed. If the process argument
is set to true, any events returned from this method will... | Get a list of published (pending) events.
Return a list of Event objects which have been published
and are therefore pending to be processed. If the process argument
is set to true, any events returned from this method will also be
marked as processed by moving them to the processed eve... |
def move_dir(
src_fs, # type: Union[Text, FS]
src_path, # type: Text
dst_fs, # type: Union[Text, FS]
dst_path, # type: Text
workers=0, # type: int
):
# type: (...) -> None
"""Move a directory from one filesystem to another.
Arguments:
src_fs (FS or str): Source filesystem (... | Move a directory from one filesystem to another.
Arguments:
src_fs (FS or str): Source filesystem (instance or URL).
src_path (str): Path to a directory on ``src_fs``
dst_fs (FS or str): Destination filesystem (instance or URL).
dst_path (str): Path to a directory on ``dst_fs``.
... |
def get(self,url,
headers=None,
token=None,
data=None,
return_json=True,
default_headers=True,
quiet=False):
'''get will use requests to get a particular url
'''
bot.debug("GET %s" %url)
return self._call(url,
... | get will use requests to get a particular url |
def m_c(mcmc, scale, f, alphasMZ=0.1185, loop=3):
r"""Get running c quark mass in the MSbar scheme at the scale `scale`
in the theory with `f` dynamical quark flavours starting from $m_c(m_c)$"""
if scale == mcmc:
return mcmc # nothing to do
_sane(scale, f)
crd = rundec.CRunDec()
alphas... | r"""Get running c quark mass in the MSbar scheme at the scale `scale`
in the theory with `f` dynamical quark flavours starting from $m_c(m_c)$ |
def Save(self, token=None):
"""Generate a histogram object and store in the specified attribute."""
graph_series_by_label = {}
for active_time in self.active_days:
for label in self.categories[active_time]:
graphs_for_label = graph_series_by_label.setdefault(
label, rdf_stats.Clien... | Generate a histogram object and store in the specified attribute. |
def add_cli_to_bel(main: click.Group) -> click.Group: # noqa: D202
"""Add several command to main :mod:`click` function related to export to BEL."""
@main.command()
@click.option('-o', '--output', type=click.File('w'), default=sys.stdout)
@click.option('-f', '--fmt', default='bel', show_default=True, ... | Add several command to main :mod:`click` function related to export to BEL. |
async def update(self):
'''
reload all cached information
|coro|
Notes
-----
This is a slow process, and will remove the cache before updating.
Thus it is recomended to use the `*_force` properties, which will
only update the cache after data is retrived.
'''
keys = self.extras... | reload all cached information
|coro|
Notes
-----
This is a slow process, and will remove the cache before updating.
Thus it is recomended to use the `*_force` properties, which will
only update the cache after data is retrived. |
def find_central_module(self): # type: () -> Optional[str]
"""
Get the module that is the sole module, or the module
that matches the package name/version
:return:
"""
# find modules.
mf = ModuleFinder(self.file_opener)
candidates = mf.find_by_any_method... | Get the module that is the sole module, or the module
that matches the package name/version
:return: |
def _clones(self):
"""Yield all machines under this pool"""
vbox = VirtualBox()
machines = []
for machine in vbox.machines:
if machine.name == self.machine_name:
continue
if machine.name.startswith(self.machine_name):
machines.appen... | Yield all machines under this pool |
def _get_prepare_env(self, script, job_descriptor, inputs, outputs, mounts):
"""Return a dict with variables for the 'prepare' action."""
# Add the _SCRIPT_REPR with the repr(script) contents
# Add the _META_YAML_REPR with the repr(meta) contents
# Add variables for directories that need to be created... | Return a dict with variables for the 'prepare' action. |
def update(self, friendly_name=values.unset, target_workers=values.unset,
reservation_activity_sid=values.unset,
assignment_activity_sid=values.unset,
max_reserved_workers=values.unset, task_order=values.unset):
"""
Update the TaskQueueInstance
:para... | Update the TaskQueueInstance
:param unicode friendly_name: Human readable description of this TaskQueue
:param unicode target_workers: A string describing the Worker selection criteria for any Tasks that enter this TaskQueue.
:param unicode reservation_activity_sid: ActivitySID that will be ass... |
def get_player(self, name=None, platform=None, uid=None):
"""|coro|
Calls get_players and returns the first element,
exactly one of uid and name must be given, platform must be given
Parameters
----------
name : str
the name of the player you're searching fo... | |coro|
Calls get_players and returns the first element,
exactly one of uid and name must be given, platform must be given
Parameters
----------
name : str
the name of the player you're searching for
platform : str
the name of the platform you're ... |
def get(self, **options):
"""Run this query and get the first result.
Parameters:
\**options(QueryOptions, optional)
Returns:
Model: An entity or None if there were no results.
"""
sub_query = self.with_limit(1)
options = QueryOptions(sub_query).repl... | Run this query and get the first result.
Parameters:
\**options(QueryOptions, optional)
Returns:
Model: An entity or None if there were no results. |
def get_notifications(self, all=github.GithubObject.NotSet, participating=github.GithubObject.NotSet, since=github.GithubObject.NotSet, before=github.GithubObject.NotSet):
"""
:calls: `GET /notifications <http://developer.github.com/v3/activity/notifications>`_
:param all: bool
:param pa... | :calls: `GET /notifications <http://developer.github.com/v3/activity/notifications>`_
:param all: bool
:param participating: bool
:param since: datetime.datetime
:param before: datetime.datetime
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Notification.No... |
def graft(func=None, *, namespace=None):
"""Decorator for marking a function as a graft.
Parameters:
namespace (str): namespace of data, same format as targeting.
Returns:
Graft
For example, these grafts::
@graft
def foo_data:
return {'foo', True}
... | Decorator for marking a function as a graft.
Parameters:
namespace (str): namespace of data, same format as targeting.
Returns:
Graft
For example, these grafts::
@graft
def foo_data:
return {'foo', True}
@graft(namespace='bar')
def bar_data:
... |
def create_time_from_text(text):
"""
Parse a time in the form ``hh:mm`` or ``hhmm`` (or even ``hmm``) and return a :class:`datetime.time` object. If no
valid time can be extracted from the given string, :exc:`ValueError` will be raised.
"""
text = text.replace(':', '')
if not re.match('^\d{3,}$... | Parse a time in the form ``hh:mm`` or ``hhmm`` (or even ``hmm``) and return a :class:`datetime.time` object. If no
valid time can be extracted from the given string, :exc:`ValueError` will be raised. |
def process(self, candidates):
"""
:arg list candidates: list of Candidates
:returns: list of Candidates where score is at least min_score,
if and only if one or more Candidates have at least min_score.
Otherwise, returns original list of Candidates.
"... | :arg list candidates: list of Candidates
:returns: list of Candidates where score is at least min_score,
if and only if one or more Candidates have at least min_score.
Otherwise, returns original list of Candidates. |
def _line_iter(self, in_handle):
"""Read tab delimited file, handling ISA-Tab special case headers.
"""
reader = csv.reader(in_handle, dialect="excel-tab")
for line in reader:
if len(line) > 0 and line[0]:
# check for section headers; all uppercase and a singl... | Read tab delimited file, handling ISA-Tab special case headers. |
def to_graphviz(booster, fmap='', num_trees=0, rankdir='UT',
yes_color='#0000FF', no_color='#FF0000',
condition_node_params=None, leaf_node_params=None, **kwargs):
"""Convert specified tree to graphviz instance. IPython can automatically plot the
returned graphiz instance. Otherw... | Convert specified tree to graphviz instance. IPython can automatically plot the
returned graphiz instance. Otherwise, you should call .render() method
of the returned graphiz instance.
Parameters
----------
booster : Booster, XGBModel
Booster or XGBModel instance
fmap: str (optional)
... |
def _list_records_in_zone(self, zone, rdtype=None, name=None, content=None):
"""
Iterates over all records of the zone and returns a list of records filtered
by record type, name and content. The list is empty if no records found.
"""
records = []
rrsets = zone.iterate_rd... | Iterates over all records of the zone and returns a list of records filtered
by record type, name and content. The list is empty if no records found. |
def main(argv=None):
'''
Main entry-point for calling layouts directly as a program.
'''
# Prep argparse
ap = argparse.ArgumentParser(
description='Basic query options for Python HID-IO Layouts repository',
)
ap.add_argument('--list', action='store_true', help='List available layout ... | Main entry-point for calling layouts directly as a program. |
def get(self):
"""
Return the referer aka the WHOIS server of the current domain extension.
"""
if not PyFunceble.CONFIGURATION["local"]:
# We are not running a test in a local network.
if self.domain_extension not in self.ignored_extension:
# Th... | Return the referer aka the WHOIS server of the current domain extension. |
def warn( callingClass, astr_key, astr_extraMsg="" ):
'''
Convenience dispatcher to the error_exit() method.
Will raise "warning" error, i.e. script processing continues.
'''
b_exitToOS = False
report( callingClass, astr_key, b_exitToOS, astr_extraMsg ) | Convenience dispatcher to the error_exit() method.
Will raise "warning" error, i.e. script processing continues. |
def _reproject(self, eopatch, src_raster):
"""
Reprojects the raster data from Geopedia's CRS (POP_WEB) to EOPatch's CRS.
"""
height, width = src_raster.shape
dst_raster = np.ones((height, width), dtype=self.raster_dtype)
src_bbox = transform_bbox(eopatch.bbox, CRS.POP_... | Reprojects the raster data from Geopedia's CRS (POP_WEB) to EOPatch's CRS. |
def validate_permission(self, key, permission):
""" validates if group can get assigned with permission"""
if permission.perm_name not in self.__possible_permissions__:
raise AssertionError(
"perm_name is not one of {}".format(self.__possible_permissions__)
)
... | validates if group can get assigned with permission |
def list_templates(self, extensions=None, filter_func=None):
"""Returns a list of templates for this environment. This requires
that the loader supports the loader's
:meth:`~BaseLoader.list_templates` method.
If there are other files in the template folder besides the
actual te... | Returns a list of templates for this environment. This requires
that the loader supports the loader's
:meth:`~BaseLoader.list_templates` method.
If there are other files in the template folder besides the
actual templates, the returned list can be filtered. There are two
ways:... |
def search_customer(self, limit=100, offset=0, email_pattern=None, last_name_pattern=None,
company_name_pattern=None, with_additional_data=False):
"""Search the list of customers."""
response = self.request(E.searchCustomerRequest(
E.limit(limit),
E.offset(offset),
... | Search the list of customers. |
def red_workshift(request, message=None):
'''
Redirects to the base workshift page for users who are logged in
'''
if message:
messages.add_message(request, messages.ERROR, message)
return HttpResponseRedirect(reverse('workshift:view_semester')) | Redirects to the base workshift page for users who are logged in |
def provideObjectsToLearn(self, objectNames=None):
"""
Returns the objects in a canonical format to be sent to an experiment.
The returned format is a a dictionary where the keys are object names, and
values are lists of sensations, each sensation being a mapping from
cortical column index to a pai... | Returns the objects in a canonical format to be sent to an experiment.
The returned format is a a dictionary where the keys are object names, and
values are lists of sensations, each sensation being a mapping from
cortical column index to a pair of SDR's (one location and one feature).
returnDict = {
... |
def is_prelinked_bytecode(bytecode: bytes, link_refs: List[Dict[str, Any]]) -> bool:
"""
Returns False if all expected link_refs are unlinked, otherwise returns True.
todo support partially pre-linked bytecode (currently all or nothing)
"""
for link_ref in link_refs:
for offset in link_ref["... | Returns False if all expected link_refs are unlinked, otherwise returns True.
todo support partially pre-linked bytecode (currently all or nothing) |
def _parse(self, timestr, dayfirst=None, yearfirst=None, fuzzy=False,
fuzzy_with_tokens=False):
"""
Private method which performs the heavy lifting of parsing, called from
``parse()``, which passes on its ``kwargs`` to this function.
:param timestr:
The string... | Private method which performs the heavy lifting of parsing, called from
``parse()``, which passes on its ``kwargs`` to this function.
:param timestr:
The string to parse.
:param dayfirst:
Whether to interpret the first value in an ambiguous 3-integer date
(e... |
def runner(parallel, config):
"""Run functions, provided by string name, on multiple cores on the current machine.
"""
def run_parallel(fn_name, items):
items = [x for x in items if x is not None]
if len(items) == 0:
return []
items = diagnostics.track_parallel(items, fn_... | Run functions, provided by string name, on multiple cores on the current machine. |
def updated_topology_description(topology_description, server_description):
"""Return an updated copy of a TopologyDescription.
:Parameters:
- `topology_description`: the current TopologyDescription
- `server_description`: a new ServerDescription that resulted from
an ismaster call
Cal... | Return an updated copy of a TopologyDescription.
:Parameters:
- `topology_description`: the current TopologyDescription
- `server_description`: a new ServerDescription that resulted from
an ismaster call
Called after attempting (successfully or not) to call ismaster on the
server at se... |
def process(self, ast): # type: (Dict[str, Any]) -> None
""" Build a scope tree and links between scopes and identifiers by the
specified ast. You can access the built scope tree and the built links
by .scope_tree and .link_registry.
"""
id_classifier = IdentifierClassifier()
... | Build a scope tree and links between scopes and identifiers by the
specified ast. You can access the built scope tree and the built links
by .scope_tree and .link_registry. |
def dataset_create_new_cli(self,
folder=None,
public=False,
quiet=False,
convert_to_csv=True,
dir_mode='skip'):
""" client wrapper for creating a new dataset... | client wrapper for creating a new dataset
Parameters
==========
folder: the folder to initialize the metadata file in
public: should the dataset be public?
quiet: suppress verbose output (default is False)
convert_to_csv: if True, convert data to ... |
def value(self):
"""Returns the positive value to subtract from the total."""
originalPrice = self.lineItem.totalPrice
if self.flatRate == 0:
return originalPrice * self.percent
return self.flatRate | Returns the positive value to subtract from the total. |
def _make_compile_argv(self, compile_request):
"""Return a list of arguments to use to compile sources. Subclasses can override and append."""
sources_minus_headers = list(self._iter_sources_minus_headers(compile_request))
if len(sources_minus_headers) == 0:
raise self._HeaderOnlyLibrary()
compi... | Return a list of arguments to use to compile sources. Subclasses can override and append. |
def parse_input_samples(job, inputs):
"""
Parses config file to pull sample information.
Stores samples as tuples of (uuid, URL)
:param JobFunctionWrappingJob job: passed by Toil automatically
:param Namespace inputs: Stores input arguments (see main)
"""
job.fileStore.logToMaster('Parsing ... | Parses config file to pull sample information.
Stores samples as tuples of (uuid, URL)
:param JobFunctionWrappingJob job: passed by Toil automatically
:param Namespace inputs: Stores input arguments (see main) |
def overlay_gateway_access_lists_ipv6_in_cg_ipv6_acl_in_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
overlay_gateway = ET.SubElement(config, "overlay-gateway", xmlns="urn:brocade.com:mgmt:brocade-tunnels")
name_key = ET.SubElement(overlay_gateway... | Auto Generated Code |
def _is_text_data(self, data_type):
"""Private method for testing text data types."""
dt = DATA_TYPES[data_type]
if type(self.data) is dt['type'] and len(self.data) < dt['max'] and all(type(char) == str for char in self.data):
self.type = data_type.upper()
self.len = len(... | Private method for testing text data types. |
def _parse_xml(self, xml):
"""Extracts the attributes from the XMLElement instance."""
from re import split
vms("Parsing <cron> XML child tag.", 2)
self.frequency = get_attrib(xml, "frequency", default=5, cast=int)
self.emails = split(",\s*", get_attrib(xml, "emails", default="")... | Extracts the attributes from the XMLElement instance. |
def swipe(self):
'''
Perform swipe action. if device platform greater than API 18, percent can be used and value between 0 and 1
Usages:
d().swipe.right()
d().swipe.left(steps=10)
d().swipe.up(steps=10)
d().swipe.down()
d().swipe("right", steps=20)
... | Perform swipe action. if device platform greater than API 18, percent can be used and value between 0 and 1
Usages:
d().swipe.right()
d().swipe.left(steps=10)
d().swipe.up(steps=10)
d().swipe.down()
d().swipe("right", steps=20)
d().swipe("right", steps=20, percent... |
def tag_ner(lang, input_text, output_type=list):
"""Run NER for chosen language.
Choosing output_type=list, returns a list of tuples:
>>> tag_ner('latin', input_text='ut Venus, ut Sirius, ut Spica', output_type=list)
[('ut',), ('Venus',), (',',), ('ut',), ('Sirius', 'Entity'), (',',), ('ut',), ('Spica',... | Run NER for chosen language.
Choosing output_type=list, returns a list of tuples:
>>> tag_ner('latin', input_text='ut Venus, ut Sirius, ut Spica', output_type=list)
[('ut',), ('Venus',), (',',), ('ut',), ('Sirius', 'Entity'), (',',), ('ut',), ('Spica', 'Entity')] |
def _get_phi_al_regional(self, C, mag, vs30measured, rrup):
"""
Returns intra-event (Tau) standard deviation (equation 26, page 1046)
"""
phi_al = np.ones((len(vs30measured)))
idx = rrup < 30
phi_al[idx] *= C['s5']
idx = ((rrup <= 80) & (rrup >= 30.))
ph... | Returns intra-event (Tau) standard deviation (equation 26, page 1046) |
def generate_tokens(readline):
"""
The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately,... | The generate_tokens() generator requires one argment, readline, which
must be a callable object which provides the same interface as the
readline() method of built-in file objects. Each call to the function
should return one line of input as a string. Alternately, readline
can be a callable function te... |
def create(cls, path_name=None, name=None, project_id=None,
log_modified_at=None, crawlable=True):
"""Initialize an instance and save it to db."""
result = cls(path_name, name, project_id, log_modified_at, crawlable)
db.session.add(result)
db.session.commit()
cra... | Initialize an instance and save it to db. |
def list_gemeenten_by_provincie(self, provincie):
'''
List all `gemeenten` in a `provincie`.
:param provincie: The :class:`Provincie` for which the \
`gemeenten` are wanted.
:rtype: A :class:`list` of :class:`Gemeente`.
'''
try:
gewest = provincie... | List all `gemeenten` in a `provincie`.
:param provincie: The :class:`Provincie` for which the \
`gemeenten` are wanted.
:rtype: A :class:`list` of :class:`Gemeente`. |
def create_paired_dir(output_dir, meta_id, static=False, needwebdir=True):
"""Creates the meta or static dirs.
Adds an "even" or "odd" subdirectory to the static path
based on the meta-id.
"""
# get the absolute root path
root_path = os.path.abspath(output_dir)
# if it's a static directory,... | Creates the meta or static dirs.
Adds an "even" or "odd" subdirectory to the static path
based on the meta-id. |
def writeFITSTable(filename, table):
"""
Convert a table into a FITSTable and then write to disk.
Parameters
----------
filename : str
Filename to write.
table : Table
Table to write.
Returns
-------
None
Notes
-----
Due to a bug in numpy, `int32` and ... | Convert a table into a FITSTable and then write to disk.
Parameters
----------
filename : str
Filename to write.
table : Table
Table to write.
Returns
-------
None
Notes
-----
Due to a bug in numpy, `int32` and `float32` are converted to `int64` and `float64` ... |
def send_error(self, code, message=None):
"""
Send and log plain text error reply.
:param code:
:param message:
"""
message = message.strip()
self.log_error("code %d, message %s", code, message)
self.send_response(code)
self.send_header("Content-Ty... | Send and log plain text error reply.
:param code:
:param message: |
def _create_fw_fab_dev(self, tenant_id, drvr_name, fw_dict):
"""This routine calls the Tenant Edge routine if FW Type is TE. """
if fw_dict.get('fw_type') == fw_constants.FW_TENANT_EDGE:
self._create_fw_fab_dev_te(tenant_id, drvr_name, fw_dict) | This routine calls the Tenant Edge routine if FW Type is TE. |
def add_callbacks(self, future, callback, errback):
"""
callback or errback may be None, but at least one must be
non-None.
"""
def done(f):
try:
res = f.result()
if callback:
callback(res)
except Excepti... | callback or errback may be None, but at least one must be
non-None. |
def _parse_feature(self, info):
"""Parse a feature command."""
parts = info.split(b'=', 1)
name = parts[0]
if len(parts) > 1:
value = self._path(parts[1])
else:
value = None
self.features[name] = value
return commands.FeatureCommand(name, v... | Parse a feature command. |
def connect_functions(self):
"""
Connects all events to the functions which should be called
:return:
"""
# Lambda is sometimes used to prevent passing the event parameter.
self.cfg_load_pushbutton.clicked.connect(lambda: self.load_overall_config())
self.cfg_save_... | Connects all events to the functions which should be called
:return: |
def complete_previous(self, count=1, disable_wrap_around=False):
"""
Browse to the previous completions.
(Does nothing if there are no completion.)
"""
if self.complete_state:
if self.complete_state.complete_index == 0:
index = None
if... | Browse to the previous completions.
(Does nothing if there are no completion.) |
def ucast_ip_mask(ip_addr_and_mask, return_tuple=True):
"""
Function to check if a address is unicast and that the CIDR mask is good
Args:
ip_addr_and_mask: Unicast IP address and mask in the following format 192.168.1.1/24
return_tuple: Set to True it returns a IP and mask in a tuple, set t... | Function to check if a address is unicast and that the CIDR mask is good
Args:
ip_addr_and_mask: Unicast IP address and mask in the following format 192.168.1.1/24
return_tuple: Set to True it returns a IP and mask in a tuple, set to False returns True or False
Returns: see return_tuple for ret... |
def append_data(self, len_tag, val_tag, data, header=False):
"""Append raw data, possibly including a embedded SOH.
:param len_tag: Tag number for length field.
:param val_tag: Tag number for value field.
:param data: Raw data byte string.
:param header: Append to header if True... | Append raw data, possibly including a embedded SOH.
:param len_tag: Tag number for length field.
:param val_tag: Tag number for value field.
:param data: Raw data byte string.
:param header: Append to header if True; default to body.
Appends two pairs: a length pair, followed b... |
def build_query_uri(self, uri=None, start=0, count=-1, filter='', query='', sort='', view='', fields='', scope_uris=''):
"""Builds the URI from given parameters.
More than one request can be send to get the items, regardless the query parameter 'count', because the actual
number of items in the... | Builds the URI from given parameters.
More than one request can be send to get the items, regardless the query parameter 'count', because the actual
number of items in the response might differ from the requested count. Some types of resource have a limited
number of items returned on each call... |
def option(name, help=""):
"""Decorator that add an option to the wrapped command or function."""
def decorator(func):
options = getattr(func, "options", [])
_option = Param(name, help)
# Insert at the beginning so the apparent order is preserved
options.insert(0, _option)
... | Decorator that add an option to the wrapped command or function. |
def political_views(self) -> str:
"""Get a random political views.
:return: Political views.
:Example:
Liberal.
"""
views = self._data['political_views']
return self.random.choice(views) | Get a random political views.
:return: Political views.
:Example:
Liberal. |
def do_proxy_failover(self, proxy_url, for_url):
"""
:param str proxy_url: Proxy to ban.
:param str for_url: The URL being requested.
:returns: The next proxy config to try, or 'DIRECT'.
:raises ProxyConfigExhaustedError: If the PAC file provided no usable proxy configuratio... | :param str proxy_url: Proxy to ban.
:param str for_url: The URL being requested.
:returns: The next proxy config to try, or 'DIRECT'.
:raises ProxyConfigExhaustedError: If the PAC file provided no usable proxy configuration. |
def showOperandLines(rh):
"""
Produce help output related to operands.
Input:
Request Handle
"""
if rh.function == 'HELP':
rh.printLn("N", " For the GetHost function:")
else:
rh.printLn("N", "Sub-Functions(s):")
rh.printLn("N", " diskpoolnames - " +
"Re... | Produce help output related to operands.
Input:
Request Handle |
def get_valid_examples(self):
"""Return a list of valid examples for the given schema."""
path = os.path.join(self._get_schema_folder(), "examples", "valid")
return list(_get_json_content_from_folder(path)) | Return a list of valid examples for the given schema. |
def tradepileDelete(self, trade_id): # item_id instead of trade_id?
"""Remove card from tradepile.
:params trade_id: Trade id.
"""
method = 'DELETE'
url = 'trade/%s' % trade_id
self.__request__(method, url) # returns nothing
# TODO: validate status code
... | Remove card from tradepile.
:params trade_id: Trade id. |
def read(self, num_bytes=None):
"""Read and return the specified bytes from the buffer."""
res = self.get_next(num_bytes)
self.skip(len(res))
return res | Read and return the specified bytes from the buffer. |
def set_file_path(self, filePath):
"""
Set the file path that needs to be locked.
:Parameters:
#. filePath (None, path): The file that needs to be locked. When given and a lock
is acquired, the file will be automatically opened for writing or reading
de... | Set the file path that needs to be locked.
:Parameters:
#. filePath (None, path): The file that needs to be locked. When given and a lock
is acquired, the file will be automatically opened for writing or reading
depending on the given mode. If None is given, the locker... |
def merge_selected_cells(self, selection):
"""Merges or unmerges cells that are in the selection bounding box
Parameters
----------
selection: Selection object
\tSelection for which attr toggle shall be returned
"""
tab = self.grid.current_table
# Get ... | Merges or unmerges cells that are in the selection bounding box
Parameters
----------
selection: Selection object
\tSelection for which attr toggle shall be returned |
def load_steps_impl(self, registry, path, module_names=None):
"""
Load the step implementations at the given path, with the given module names. If
module_names is None then the module 'steps' is searched by default.
"""
if not module_names:
module_names = ['steps']
... | Load the step implementations at the given path, with the given module names. If
module_names is None then the module 'steps' is searched by default. |
def execute(func: types.FunctionType):
"""
>>> from Redy.Magic.Classic import execute
>>> x = 1
>>> @execute
>>> def f(x = x) -> int:
>>> return x + 1
>>> assert f is 2
"""
spec = getfullargspec(func)
default = spec.defaults
arg_cursor = 0
def get_item(name):
... | >>> from Redy.Magic.Classic import execute
>>> x = 1
>>> @execute
>>> def f(x = x) -> int:
>>> return x + 1
>>> assert f is 2 |
def aggregate_count_over_time(self, metric_store, groupby_name, aggregate_timestamp):
"""
Organize and store the count of data from the log line into the metric store by columnm, group name, timestamp
:param dict metric_store: The metric store used to store all the parsed the log data
:param string gro... | Organize and store the count of data from the log line into the metric store by columnm, group name, timestamp
:param dict metric_store: The metric store used to store all the parsed the log data
:param string groupby_name: the group name that the log line belongs to
:param string aggregate_timestamp: time... |
def get_request_feature(self, name):
"""Parses the request for a particular feature.
Arguments:
name: A feature name.
Returns:
A feature parsed from the URL if the feature is supported, or None.
"""
if '[]' in name:
# array-type
retur... | Parses the request for a particular feature.
Arguments:
name: A feature name.
Returns:
A feature parsed from the URL if the feature is supported, or None. |
def get_themes(templates_path):
"""Returns available themes list."""
themes = os.listdir(templates_path)
if '__common__' in themes:
themes.remove('__common__')
return themes | Returns available themes list. |
def log_request(handler):
"""
Logging request is opposite to response, sometime its necessary,
feel free to enable it.
"""
block = 'Request Infomations:\n' + _format_headers_log(handler.request.headers)
if handler.request.arguments:
block += '+----Arguments----+\n'
for k, v in h... | Logging request is opposite to response, sometime its necessary,
feel free to enable it. |
def fault_sets(self):
"""
You can only create and configure Fault Sets before adding SDSs to the system, and configuring them incorrectly
may prevent the creation of volumes. An SDS can only be added to a Fault Set during the creation of the SDS.
:rtype: list of Faultset objects
... | You can only create and configure Fault Sets before adding SDSs to the system, and configuring them incorrectly
may prevent the creation of volumes. An SDS can only be added to a Fault Set during the creation of the SDS.
:rtype: list of Faultset objects |
def _parse_depot_section(f):
"""Parse TSPLIB DEPOT_SECTION data part from file descriptor f
Args
----
f : str
File descriptor
Returns
-------
int
an array of depots
"""
depots = []
for line in f:
line = strip(line)
if line == '-1' or line == ... | Parse TSPLIB DEPOT_SECTION data part from file descriptor f
Args
----
f : str
File descriptor
Returns
-------
int
an array of depots |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.