code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def safe_evaluate(command, glob, local):
"""
Continue to attempt to execute the given command, importing objects which
cause a NameError in the command
:param command: command for eval
:param glob: globals dict for eval
:param local: locals dict for eval
:return: command result
"""
... | Continue to attempt to execute the given command, importing objects which
cause a NameError in the command
:param command: command for eval
:param glob: globals dict for eval
:param local: locals dict for eval
:return: command result |
def _remove_summary(self):
"""Removed packge size summary
"""
if self.size > 0:
print("\nRemoved summary")
print("=" * 79)
print("{0}Size of removed packages {1} {2}.{3}".format(
self.meta.color["GREY"], round(self.size, 2), self.unit,
... | Removed packge size summary |
def show_instance(name, call=None):
'''
List the a single node, return dict of grains.
'''
local = salt.client.LocalClient()
ret = local.cmd(name, 'grains.items')
ret.update(_build_required_items(ret))
return ret | List the a single node, return dict of grains. |
def set_alarm_mode(self, mode):
"""
:param mode: one of [None, "activity", "tamper", "forced_entry"]
:return: nothing
"""
values = {"desired_state": {"alarm_mode": mode}}
response = self.api_interface.set_device_state(self, values)
self._update_state_from_r... | :param mode: one of [None, "activity", "tamper", "forced_entry"]
:return: nothing |
def add_service(self, service_type, service_endpoint=None, values=None):
"""
Add a service to the list of services on the DDO.
:param service_type: Service
:param service_endpoint: Service endpoint, str
:param values: Python dict with serviceDefinitionId, templateId, serviceAgre... | Add a service to the list of services on the DDO.
:param service_type: Service
:param service_endpoint: Service endpoint, str
:param values: Python dict with serviceDefinitionId, templateId, serviceAgreementContract,
list of conditions and consume endpoint. |
def arity_evaluation_checker(function):
"""Build an evaluation checker that will return True when it is
guaranteed that all positional arguments have been accounted for.
"""
is_class = inspect.isclass(function)
if is_class:
function = function.__init__
functio... | Build an evaluation checker that will return True when it is
guaranteed that all positional arguments have been accounted for. |
def cached_property(func):
"""Special property decorator that caches the computed
property value in the object's instance dict the first
time it is accessed.
"""
name = func.__name__
doc = func.__doc__
def getter(self, name=name):
try:
return self.__dict__[name]
... | Special property decorator that caches the computed
property value in the object's instance dict the first
time it is accessed. |
def _parse_name(self, team_data):
"""
Parses the team's name.
On the pages being parsed, the team's name doesn't follow the standard
parsing algorithm that we use for the fields, and requires a special
one-off algorithm. The name is attached in the 'title' attribute from
... | Parses the team's name.
On the pages being parsed, the team's name doesn't follow the standard
parsing algorithm that we use for the fields, and requires a special
one-off algorithm. The name is attached in the 'title' attribute from
within 'team_ID'. A few simple regex subs captures th... |
def deploy_s3app(self):
"""Deploys artifacts contents to S3 bucket"""
utils.banner("Deploying S3 App")
primary_region = self.configs['pipeline']['primary_region']
s3obj = s3.S3Deployment(
app=self.app,
env=self.env,
region=self.region,
prop... | Deploys artifacts contents to S3 bucket |
def render_js_template(self, template_path, element_id, context=None):
"""
Render a js template.
"""
context = context or {}
return u"<script type='text/template' id='{}'>\n{}\n</script>".format(
element_id,
self.render_template(template_path, context)
... | Render a js template. |
def color_pack2rgb(packed):
"""Returns r, g, b tuple from packed wx.ColourGetRGB value"""
r = packed & 255
g = (packed & (255 << 8)) >> 8
b = (packed & (255 << 16)) >> 16
return r, g, b | Returns r, g, b tuple from packed wx.ColourGetRGB value |
def calculate_ecef_velocity(inst):
"""
Calculates spacecraft velocity in ECEF frame.
Presumes that the spacecraft velocity in ECEF is in
the input instrument object as position_ecef_*. Uses a symmetric
difference to calculate the velocity thus endpoints will be
set to NaN. Routine should b... | Calculates spacecraft velocity in ECEF frame.
Presumes that the spacecraft velocity in ECEF is in
the input instrument object as position_ecef_*. Uses a symmetric
difference to calculate the velocity thus endpoints will be
set to NaN. Routine should be run using pysat data padding feature
to c... |
def nfa_json_importer(input_file: str) -> dict:
""" Imports a NFA from a JSON file.
:param str input_file: path+filename to JSON file;
:return: *(dict)* representing a NFA.
"""
file = open(input_file)
json_file = json.load(file)
transitions = {} # key [state in states, action in alphabet]... | Imports a NFA from a JSON file.
:param str input_file: path+filename to JSON file;
:return: *(dict)* representing a NFA. |
def get_processor_status(self, p, x, y):
"""Get the status of a given core and the application executing on it.
Returns
-------
:py:class:`.ProcessorStatus`
Representation of the current state of the processor.
"""
# Get the VCPU base
address = (self.... | Get the status of a given core and the application executing on it.
Returns
-------
:py:class:`.ProcessorStatus`
Representation of the current state of the processor. |
def get_out_streamids(self):
"""Returns a set of output stream ids registered for this component"""
if self.outputs is None:
return set()
if not isinstance(self.outputs, (list, tuple)):
raise TypeError("Argument to outputs must be either list or tuple, given: %s"
% str(typ... | Returns a set of output stream ids registered for this component |
def parse_time_trigger_string(trigger_frequency):
"""
:param trigger_frequency: human-readable and editable string in one of two formats:
- 'at Day_of_Week-HH:MM, ..., Day_of_Week-HH:MM'
- 'every NNN'
:return: return tuple (parsed_trigger_frequency, timer_klass)
"""
# replace multiple spac... | :param trigger_frequency: human-readable and editable string in one of two formats:
- 'at Day_of_Week-HH:MM, ..., Day_of_Week-HH:MM'
- 'every NNN'
:return: return tuple (parsed_trigger_frequency, timer_klass) |
def altitude(SCALED_PRESSURE, ground_pressure=None, ground_temp=None):
'''calculate barometric altitude'''
from . import mavutil
self = mavutil.mavfile_global
if ground_pressure is None:
if self.param('GND_ABS_PRESS', None) is None:
return 0
ground_pressure = self.param('GND_... | calculate barometric altitude |
def get_nvr(self, epoch=None):
"""get NVR string from .spec Name, Version, Release and Epoch"""
name = self.get_tag('Name', expand_macros=True)
vr = self.get_vr(epoch=epoch)
return '%s-%s' % (name, vr) | get NVR string from .spec Name, Version, Release and Epoch |
def rowlengths(table):
"""
Report on row lengths found in the table. E.g.::
>>> import petl as etl
>>> table = [['foo', 'bar', 'baz'],
... ['A', 1, 2],
... ['B', '2', '3.4'],
... [u'B', u'3', u'7.8', True],
... ['D', 'xyz', 9.0... | Report on row lengths found in the table. E.g.::
>>> import petl as etl
>>> table = [['foo', 'bar', 'baz'],
... ['A', 1, 2],
... ['B', '2', '3.4'],
... [u'B', u'3', u'7.8', True],
... ['D', 'xyz', 9.0],
... ['E', None]... |
def cmd_antenna(self, args):
'''set gcs location'''
if len(args) != 2:
if self.gcs_location is None:
print("GCS location not set")
else:
print("GCS location %s" % str(self.gcs_location))
return
self.gcs_location = (float(args[0]... | set gcs location |
def bandpass_filter(data, k, w1, w2):
"""
This function will apply a bandpass filter to data. It will be kth
order and will select the band between w1 and w2.
Parameters
----------
data: array, dtype=float
The data you wish to filter
k: number, int
The order ... | This function will apply a bandpass filter to data. It will be kth
order and will select the band between w1 and w2.
Parameters
----------
data: array, dtype=float
The data you wish to filter
k: number, int
The order of approximation for the filter. A max value for
... |
def list_modules(desc=False):
'''
List currently installed PSGet Modules on the system.
:param desc: If ``True``, the verbose description will be returned.
:type desc: ``bool``
CLI Example:
.. code-block:: bash
salt 'win01' psget.list_modules
salt 'win01' psget.list_modules ... | List currently installed PSGet Modules on the system.
:param desc: If ``True``, the verbose description will be returned.
:type desc: ``bool``
CLI Example:
.. code-block:: bash
salt 'win01' psget.list_modules
salt 'win01' psget.list_modules desc=True |
def unsetenv(key):
"""Like `os.unsetenv` but takes unicode under Windows + Python 2
Args:
key (pathlike): The env var to unset
"""
key = path2fsn(key)
if is_win:
# python 3 has no unsetenv under Windows -> use our ctypes one as well
try:
del_windows_env_var(key)... | Like `os.unsetenv` but takes unicode under Windows + Python 2
Args:
key (pathlike): The env var to unset |
def n_at_a_time(
items: List[int], n: int, fillvalue: str
) -> Iterator[Tuple[Union[int, str]]]:
"""Returns an iterator which groups n items at a time.
Any final partial tuple will be padded with the fillvalue
>>> list(n_at_a_time([1, 2, 3, 4, 5], 2, 'X'))
[(1, 2), (3, 4), (5, 'X')]
"""
it ... | Returns an iterator which groups n items at a time.
Any final partial tuple will be padded with the fillvalue
>>> list(n_at_a_time([1, 2, 3, 4, 5], 2, 'X'))
[(1, 2), (3, 4), (5, 'X')] |
def simplex_select_entering_arc(self, t, pivot):
'''
API:
simplex_select_entering_arc(self, t, pivot)
Description:
Decides and returns entering arc using pivot rule.
Input:
t: current spanning tree solution
pivot: May be one of the followin... | API:
simplex_select_entering_arc(self, t, pivot)
Description:
Decides and returns entering arc using pivot rule.
Input:
t: current spanning tree solution
pivot: May be one of the following; 'first_eligible' or 'dantzig'.
'dantzig' is the defaul... |
def add_index(self, mode, blob_id, path):
"""
Add new entry to the current index
:param tree:
:return:
"""
self.command_exec(['update-index', '--add', '--cacheinfo', mode, blob_id, path]) | Add new entry to the current index
:param tree:
:return: |
def split_qs(string, delimiter='&'):
"""Split a string by the specified unquoted, not enclosed delimiter"""
open_list = '[<{('
close_list = ']>})'
quote_chars = '"\''
level = index = last_index = 0
quoted = False
result = []
for index, letter in enumerate(string):
if letter in... | Split a string by the specified unquoted, not enclosed delimiter |
def members(self):
""" -> #set of all members in the set """
if self.serialized:
return set(map(
self._loads, self._client.smembers(self.key_prefix)))
else:
return set(map(
self._decode, self._client.smembers(self.key_prefix))) | -> #set of all members in the set |
def _comm_tensor_data(device_name,
node_name,
maybe_base_expanded_node_name,
output_slot,
debug_op,
tensor_value,
wall_time):
"""Create a dict() as the outgoing data in the tensor data c... | Create a dict() as the outgoing data in the tensor data comm route.
Note: The tensor data in the comm route does not include the value of the
tensor in its entirety in general. Only if a tensor satisfies the following
conditions will its entire value be included in the return value of this
method:
1. Has a n... |
def property_data_zpool():
'''
Return a dict of zpool properties
.. note::
Each property will have an entry with the following info:
- edit : boolean - is this property editable after pool creation
- type : str - either bool, bool_alt, size, numeric, or string
-... | Return a dict of zpool properties
.. note::
Each property will have an entry with the following info:
- edit : boolean - is this property editable after pool creation
- type : str - either bool, bool_alt, size, numeric, or string
- values : str - list of possible values... |
def get_language_description(grammar_file):
"""
Gets the language description from given language grammar file.
:param grammar_file: Language grammar.
:type grammar_file: unicode
:return: Language description.
:rtype: Language
"""
LOGGER.debug("> Processing '{0}' grammar file.".format(... | Gets the language description from given language grammar file.
:param grammar_file: Language grammar.
:type grammar_file: unicode
:return: Language description.
:rtype: Language |
async def get_updates(self, offset: typing.Union[base.Integer, None] = None,
limit: typing.Union[base.Integer, None] = None,
timeout: typing.Union[base.Integer, None] = None,
allowed_updates:
typing.Union[typing.List... | Use this method to receive incoming updates using long polling (wiki).
Notes
1. This method will not work if an outgoing webhook is set up.
2. In order to avoid getting duplicate updates, recalculate offset after each server response.
Source: https://core.telegram.org/bots/api#getupdat... |
def get_position_i(self):
""" Get the I value of the current PID for position
"""
data = []
data.append(0x09)
data.append(self.servoid)
data.append(RAM_READ_REQ)
data.append(POSITION_KI_RAM)
data.append(BYTE2)
send_data(data)
rxdata = []
... | Get the I value of the current PID for position |
def create(self, email, tos=1, options=None):
""" Creates an account with Zencoder, no API Key necessary.
https://app.zencoder.com/docs/api/accounts/create
"""
data = {'email': email,
'terms_of_service': str(tos)}
if options:
data.update(options)
... | Creates an account with Zencoder, no API Key necessary.
https://app.zencoder.com/docs/api/accounts/create |
def save(self, name, content):
"""
Saves new content to the file specified by name. The content should be a
proper File object, ready to be read from the beginning.
"""
# Get the proper name for the file, as it will actually be saved.
if name is None:
name = c... | Saves new content to the file specified by name. The content should be a
proper File object, ready to be read from the beginning. |
def updatej9DB(dbname = abrevDBname, saveRawHTML = False):
"""Updates the database of Journal Title Abbreviations. Requires an internet connection. The data base is saved relative to the source file not the working directory.
# Parameters
_dbname_ : `optional [str]`
> The name of the database file, d... | Updates the database of Journal Title Abbreviations. Requires an internet connection. The data base is saved relative to the source file not the working directory.
# Parameters
_dbname_ : `optional [str]`
> The name of the database file, default is "j9Abbreviations.db"
_saveRawHTML_ : `optional [boo... |
def execute(self):
"""
params = {
"ApexCode" : "None",
"ApexProfiling" : "01pd0000001yXtYAAU",
"Callout" : True,
"Database" : 1,
"ExpirationDate" : 3,
"ScopeId" ... | params = {
"ApexCode" : "None",
"ApexProfiling" : "01pd0000001yXtYAAU",
"Callout" : True,
"Database" : 1,
"ExpirationDate" : 3,
"ScopeId" : "",
"System" ... |
def tie_properties(self, class_list):
""" Runs through the classess and ties the properties to the class
args:
class_list: a list of class names to run
"""
log.setLevel(self.log_level)
start = datetime.datetime.now()
log.info(" Tieing properties to the class"... | Runs through the classess and ties the properties to the class
args:
class_list: a list of class names to run |
def change_email(self, email):
"""
Changes the email address for a user.
A user needs to verify this new email address before it becomes
active. By storing the new email address in a temporary field
-- ``temporary_email`` -- we are able to set this email address
after ... | Changes the email address for a user.
A user needs to verify this new email address before it becomes
active. By storing the new email address in a temporary field
-- ``temporary_email`` -- we are able to set this email address
after the user has verified it by clicking on the verific... |
def readBIM(fileName):
"""Reads a BIM file.
:param fileName: the name of the BIM file to read.
:type fileName: str
:returns: the set of markers in the BIM file.
Reads a Plink BIM file and extract the name of the markers. There is one
marker per line, and the name of the marker is in the seco... | Reads a BIM file.
:param fileName: the name of the BIM file to read.
:type fileName: str
:returns: the set of markers in the BIM file.
Reads a Plink BIM file and extract the name of the markers. There is one
marker per line, and the name of the marker is in the second column. There
is no hea... |
def get_dataset(self, key, info):
"""Get calibrated channel data."""
if self.mdrs is None:
self._read_all(self.filename)
if key.name in ['longitude', 'latitude']:
lons, lats = self.get_full_lonlats()
if key.name == 'longitude':
dataset = creat... | Get calibrated channel data. |
def put(self, namespacePrefix):
"""Update a specific configuration namespace"""
self.reqparse.add_argument('name', type=str, required=True)
self.reqparse.add_argument('sortOrder', type=int, required=True)
args = self.reqparse.parse_args()
ns = db.ConfigNamespace.find_one(ConfigN... | Update a specific configuration namespace |
def AddExtraShapes(extra_shapes_txt, graph):
"""
Add extra shapes into our input set by parsing them out of a GTFS-formatted
shapes.txt file. Useful for manually adding lines to a shape file, since it's
a pain to edit .shp files.
"""
print("Adding extra shapes from %s" % extra_shapes_txt)
try:
tmpdi... | Add extra shapes into our input set by parsing them out of a GTFS-formatted
shapes.txt file. Useful for manually adding lines to a shape file, since it's
a pain to edit .shp files. |
def multicat(data, samples, ipyclient):
"""
Runs singlecat and cleanup jobs for each sample.
For each sample this fills its own hdf5 array with catg data & indels.
This is messy, could use simplifiying.
"""
## progress ticker
start = time.time()
printstr = " indexing clusters | {} |... | Runs singlecat and cleanup jobs for each sample.
For each sample this fills its own hdf5 array with catg data & indels.
This is messy, could use simplifiying. |
def error_messages(self, driver_id=None):
"""Get the error messages for all drivers or a specific driver.
Args:
driver_id: The specific driver to get the errors for. If this is
None, then this method retrieves the errors for all drivers.
Returns:
A dicti... | Get the error messages for all drivers or a specific driver.
Args:
driver_id: The specific driver to get the errors for. If this is
None, then this method retrieves the errors for all drivers.
Returns:
A dictionary mapping driver ID to a list of the error messag... |
def check_pdb_status(pdbid):
"""Returns the status and up-to-date entry in the PDB for a given PDB ID"""
url = 'http://www.rcsb.org/pdb/rest/idStatus?structureId=%s' % pdbid
xmlf = urlopen(url)
xml = et.parse(xmlf)
xmlf.close()
status = None
current_pdbid = pdbid
for df in xml.xpath('//r... | Returns the status and up-to-date entry in the PDB for a given PDB ID |
def run(self, node, client):
"""
Upload the file, retaining permissions
See also L{Deployment.run}
"""
perms = os.stat(self.source).st_mode
client.put(path=self.target, chmod=perms,
contents=open(self.source, 'rb').read())
return node | Upload the file, retaining permissions
See also L{Deployment.run} |
def strptime(cls, value, format):
""" Parse a datetime string using the provided format.
This also emulates `%z` support on Python 2.
:param value: Datetime string
:type value: str
:param format: Format to use for parsing
:type format: str
:rtype: datetime
... | Parse a datetime string using the provided format.
This also emulates `%z` support on Python 2.
:param value: Datetime string
:type value: str
:param format: Format to use for parsing
:type format: str
:rtype: datetime
:raises ValueError: Invalid format
... |
def _opbend_transform_mean(rs, fn_low, deriv=0):
"""Compute the mean of the 3 opbends
"""
v = 0.0
d = np.zeros((4,3), float)
dd = np.zeros((4,3,4,3), float)
#loop over the 3 cyclic permutations
for p in np.array([[0,1,2], [2,0,1], [1,2,0]]):
opbend = _opbend_transform([rs[p[0]], rs[p... | Compute the mean of the 3 opbends |
def build_next_url(self, url):
"""Builds next url in a format compatible with cousteau. Path + query"""
if not url:
if self.split_urls: # If we had a long request give the next part
self.total_count_flag = False # Reset flag for count
return self.split_urls.... | Builds next url in a format compatible with cousteau. Path + query |
def variant(case_id, variant_id):
"""Show a single variant."""
case_obj = app.db.case(case_id)
variant = app.db.variant(case_id, variant_id)
if variant is None:
return abort(404, "variant not found")
comments = app.db.comments(variant_id=variant.md5)
template = 'sv_variant.html' if app.... | Show a single variant. |
def is_valid_path(path):
"""
:return: True if the path is valid, else raise a ValueError with the
specific error
"""
if not path.startswith('/'):
msg = 'Invalid path "%s". Paths need to start with "/".'
raise ValueError(msg % path[:40])
for c in ' \t':
if c in p... | :return: True if the path is valid, else raise a ValueError with the
specific error |
def docker(ctx, docker_run_args, docker_image, nvidia, digest, jupyter, dir, no_dir, shell, port, cmd, no_tty):
"""W&B docker lets you run your code in a docker image ensuring wandb is configured. It adds the WANDB_DOCKER and WANDB_API_KEY
environment variables to your container and mounts the current directory... | W&B docker lets you run your code in a docker image ensuring wandb is configured. It adds the WANDB_DOCKER and WANDB_API_KEY
environment variables to your container and mounts the current directory in /app by default. You can pass additional
args which will be added to `docker run` before the image name is dec... |
def _LogProgressUpdateIfReasonable(self):
"""Prints a progress update if enough time has passed."""
next_log_time = (
self._time_of_last_status_log +
self.SECONDS_BETWEEN_STATUS_LOG_MESSAGES)
current_time = time.time()
if current_time < next_log_time:
return
completion_time = t... | Prints a progress update if enough time has passed. |
def dependencies(request, ident, stateless=False, **kwargs):
'Return the dependencies'
_, app = DashApp.locate_item(ident, stateless)
with app.app_context():
view_func = app.locate_endpoint_function('dash-dependencies')
resp = view_func()
return HttpResponse(resp.data,
... | Return the dependencies |
def save_imglist(self, fname=None, root=None, shuffle=False):
"""
save imglist to disk
Parameters:
----------
fname : str
saved filename
"""
def progress_bar(count, total, suffix=''):
import sys
bar_len = 24
filled_... | save imglist to disk
Parameters:
----------
fname : str
saved filename |
def name(self):
"""
Get the module name
:return: Module name
:rtype: str | unicode
"""
res = type(self).__name__
if self._id:
res += ".{}".format(self._id)
return res | Get the module name
:return: Module name
:rtype: str | unicode |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'status') and self.status is not None:
_dict['status'] = self.status
if hasattr(self, 'last_updated') and self.last_updated is not None:
_dict['last_updated'] =... | Return a json dictionary representing this model. |
def del_option(self, section, option):
""" Deletes an option if the section and option exist """
if self.config.has_section(section):
if self.config.has_option(section, option):
self.config.remove_option(section, option)
return (True, self.config.options(secti... | Deletes an option if the section and option exist |
def load_method(path,method,class_name = None,instance_creator = None):
'''
Returns an instance of the method specified.
Args :
path : The path to the module contianing the method or function.
method : The name of the function.
class_name ... | Returns an instance of the method specified.
Args :
path : The path to the module contianing the method or function.
method : The name of the function.
class_name : The name of the class if the funtion is a method.
instance_creator: The na... |
def rollback(self):
"""Rollback the changes previously made by remove()."""
if self.save_dir is None:
logger.error(
"Can't roll back %s; was not uninstalled",
self.dist.project_name,
)
return False
logger.info('Rolling back unin... | Rollback the changes previously made by remove(). |
def filter_by_moys(self, moys):
"""Filter the Data Collection based on a list of minutes of the year.
Args:
moys: A List of minutes of the year [0..8759 * 60]
Return:
A new Data Collection with filtered data
"""
_filt_values, _filt_datetimes = self._filte... | Filter the Data Collection based on a list of minutes of the year.
Args:
moys: A List of minutes of the year [0..8759 * 60]
Return:
A new Data Collection with filtered data |
def _build_vocab(filename, vocab_dir, vocab_name):
"""Reads a file to build a vocabulary.
Args:
filename: file to read list of words from.
vocab_dir: directory where to save the vocabulary.
vocab_name: vocab file name.
Returns:
text encoder.
"""
vocab_path = os.path.join(vocab_dir, vocab_nam... | Reads a file to build a vocabulary.
Args:
filename: file to read list of words from.
vocab_dir: directory where to save the vocabulary.
vocab_name: vocab file name.
Returns:
text encoder. |
def load(self, *objs, consistent=False):
"""Populate objects from DynamoDB.
:param objs: objects to delete.
:param bool consistent: Use `strongly consistent reads`__ if True. Default is False.
:raises bloop.exceptions.MissingKey: if any object doesn't provide a value for a key column.
... | Populate objects from DynamoDB.
:param objs: objects to delete.
:param bool consistent: Use `strongly consistent reads`__ if True. Default is False.
:raises bloop.exceptions.MissingKey: if any object doesn't provide a value for a key column.
:raises bloop.exceptions.MissingObjects: if ... |
def on_copy_local(self, pair):
"""Called when the local resource should be copied to remote."""
status = pair.remote_classification
self._log_action("copy", status, ">", pair.local) | Called when the local resource should be copied to remote. |
def _TypecheckDecorator(subject=None, **kwargs):
"""Dispatches type checks based on what the subject is.
Functions or methods are annotated directly. If this method is called
with keyword arguments only, return a decorator.
"""
if subject is None:
return _TypecheckDecoratorFactory(kwargs)
elif inspect.... | Dispatches type checks based on what the subject is.
Functions or methods are annotated directly. If this method is called
with keyword arguments only, return a decorator. |
def polygonVertices(x, y, radius, sides, rotationDegrees=0, stretchHorizontal=1.0, stretchVertical=1.0):
"""
Returns a generator that produces the (x, y) points of the vertices of a regular polygon.
`x` and `y` mark the center of the polygon, `radius` indicates the size,
`sides` specifies what kind of p... | Returns a generator that produces the (x, y) points of the vertices of a regular polygon.
`x` and `y` mark the center of the polygon, `radius` indicates the size,
`sides` specifies what kind of polygon it is.
Odd-sided polygons have a pointed corner at the top and flat horizontal
side at the bottom. Th... |
def transitivity_wu(W):
'''
Transitivity is the ratio of 'triangles to triplets' in the network.
(A classical version of the clustering coefficient).
Parameters
----------
W : NxN np.ndarray
weighted undirected connection matrix
Returns
-------
T : int
transitivity ... | Transitivity is the ratio of 'triangles to triplets' in the network.
(A classical version of the clustering coefficient).
Parameters
----------
W : NxN np.ndarray
weighted undirected connection matrix
Returns
-------
T : int
transitivity scalar |
def require_remote_ref_path(func):
"""A decorator raising a TypeError if we are not a valid remote, based on the path"""
def wrapper(self, *args):
if not self.is_remote():
raise ValueError("ref path does not point to a remote reference: %s" % self.path)
return func(self, *args)
... | A decorator raising a TypeError if we are not a valid remote, based on the path |
def build_parser():
"""
_build_parser_
Set up CLI parser options, parse the
CLI options an return the parsed results
"""
parser = argparse.ArgumentParser(
description='dockerstache templating util'
)
parser.add_argument(
'--output', '-o',
help='Working directory... | _build_parser_
Set up CLI parser options, parse the
CLI options an return the parsed results |
def local_replace(self, dt, use_dst=True, _recurse=False, **kwds):
"""Return pywws timestamp (utc, no tzinfo) for the most recent
local time before the pywws timestamp dt, with datetime replace
applied.
"""
local_time = dt + self.standard_offset
if use_dst:
d... | Return pywws timestamp (utc, no tzinfo) for the most recent
local time before the pywws timestamp dt, with datetime replace
applied. |
def read(file, system):
"""Parse an ANDES card file into internal variables"""
try:
fid = open(file, 'r')
raw_file = fid.readlines()
except IOError:
print('* IOError while reading input card file.')
return
ret_dict = dict()
ret_dict['outfile'] = file.split('.')[0].lo... | Parse an ANDES card file into internal variables |
def call(command, silent=False):
""" Runs a bash command safely, with shell=false, catches any non-zero
return codes. Raises slightly modified CalledProcessError exceptions
on failures.
Note: command is a string and cannot include pipes."""
try:
if silent:
with open(... | Runs a bash command safely, with shell=false, catches any non-zero
return codes. Raises slightly modified CalledProcessError exceptions
on failures.
Note: command is a string and cannot include pipes. |
def find_mecab_dictionary(names):
"""
Find a MeCab dictionary with a given name. The dictionary has to be
installed separately -- see wordfreq's README for instructions.
"""
suggested_pkg = names[0]
paths = [
os.path.expanduser('~/.local/lib/mecab/dic'),
'/var/lib/mecab/dic',
... | Find a MeCab dictionary with a given name. The dictionary has to be
installed separately -- see wordfreq's README for instructions. |
def plot_shade_mask(ax, ind, mask, facecolor='gray', alpha=0.5):
'''Shade across x values where boolean mask is `True`
Args
----
ax: pyplot.ax
Axes object to plot with a shaded region
ind: ndarray
The indices to use for the x-axis values of the data
mask: ndarray
Boolean... | Shade across x values where boolean mask is `True`
Args
----
ax: pyplot.ax
Axes object to plot with a shaded region
ind: ndarray
The indices to use for the x-axis values of the data
mask: ndarray
Boolean mask array to determine which regions should be shaded
facecolor: m... |
def _is_accepted(self, element_tag):
'''Return if the link is accepted by the filters.'''
element_tag = element_tag.lower()
if self._ignored_tags is not None \
and element_tag in self._ignored_tags:
return False
if self._followed_tags is not None:
ret... | Return if the link is accepted by the filters. |
def in_builddir(sub='.'):
"""
Decorate a project phase with a local working directory change.
Args:
sub: An optional subdirectory to change into.
"""
from functools import wraps
def wrap_in_builddir(func):
"""Wrap the function for the new build directory."""
@wraps(fun... | Decorate a project phase with a local working directory change.
Args:
sub: An optional subdirectory to change into. |
def system(self) -> 'EFBChat':
"""
Set the chat as a system chat.
Only set for channel-level and group-level system chats.
Returns:
EFBChat: This object.
"""
self.chat_name = "System"
self.chat_alias = None
self.chat_uid = EFBChat.SYSTEM_ID
... | Set the chat as a system chat.
Only set for channel-level and group-level system chats.
Returns:
EFBChat: This object. |
def _fake_openreferenceinstances(self, namespace, **params):
"""
Implements WBEM server responder for
:meth:`~pywbem.WBEMConnection.OpenReferenceInstances`
with data from the instance repository.
"""
self._validate_namespace(namespace)
self._validate_open_params(*... | Implements WBEM server responder for
:meth:`~pywbem.WBEMConnection.OpenReferenceInstances`
with data from the instance repository. |
def pre_check(self, data):
"""Count chars, words and sentences in the text."""
sentences = len(re.findall('[\.!?]+\W+', data)) or 1
chars = len(data) - len(re.findall('[^a-zA-Z0-9]', data))
num_words = len(re.findall('\s+', data))
data = re.split('[^a-zA-Z]+', data)
retur... | Count chars, words and sentences in the text. |
def get(self, cycle_list, dataitem=None, isotope=None, sparse=1):
'''
Get Data from HDF5 files.
There are three ways to call this function
1. get(dataitem)
Fetches the datatiem for all cycles. If dataitem is a header
attribute or list of attributes then the dat... | Get Data from HDF5 files.
There are three ways to call this function
1. get(dataitem)
Fetches the datatiem for all cycles. If dataitem is a header
attribute or list of attributes then the data is retured.
If detaitem an individulal or list of column attributes,
... |
def __recognize_scalar(self, node: yaml.Node,
expected_type: Type) -> RecResult:
"""Recognize a node that we expect to be a scalar.
Args:
node: The node to recognize.
expected_type: The type it is expected to be.
Returns:
A list of... | Recognize a node that we expect to be a scalar.
Args:
node: The node to recognize.
expected_type: The type it is expected to be.
Returns:
A list of recognized types and an error message |
def add_arguments(cls, parser, sys_arg_list=None):
"""
Arguments for the TCP health monitor plugin.
"""
parser.add_argument('--tcp_check_interval',
dest='tcp_check_interval',
required=False, default=2, type=float,
... | Arguments for the TCP health monitor plugin. |
def generate(self, information, timeout=-1):
"""
Generates a self signed certificate or an internal CA signed certificate for RabbitMQ clients.
Args:
information (dict): Information to generate the certificate for RabbitMQ clients.
timeout:
Timeout in sec... | Generates a self signed certificate or an internal CA signed certificate for RabbitMQ clients.
Args:
information (dict): Information to generate the certificate for RabbitMQ clients.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not a... |
def pull_guest_properties(self):
"""Get the list of the guest properties matching a set of patterns along
with their values, timestamps and flags and give responsibility for
managing properties to the console.
out names of type str
The names of the properties returned.
... | Get the list of the guest properties matching a set of patterns along
with their values, timestamps and flags and give responsibility for
managing properties to the console.
out names of type str
The names of the properties returned.
out values of type str
The v... |
def compile_mof_string(self, mof_str, namespace=None, search_paths=None,
verbose=None):
"""
Compile the MOF definitions in the specified string and add the
resulting CIM objects to the specified CIM namespace of the mock
repository.
If the namespace do... | Compile the MOF definitions in the specified string and add the
resulting CIM objects to the specified CIM namespace of the mock
repository.
If the namespace does not exist, :exc:`~pywbem.CIMError` with status
CIM_ERR_INVALID_NAMESPACE is raised.
This method supports all MOF pr... |
def setup_database(config_data):
"""
Run the migrate command to create the database schema
:param config_data: configuration data
"""
with chdir(config_data.project_directory):
env = deepcopy(dict(os.environ))
env[str('DJANGO_SETTINGS_MODULE')] = str('{0}.settings'.format(config_dat... | Run the migrate command to create the database schema
:param config_data: configuration data |
def filter_update(self, id, phrase = None, context = None, irreversible = None, whole_word = None, expires_in = None):
"""
Updates the filter with the given `id`. Parameters are the same
as in `filter_create()`.
Returns the `filter dict`_ of the updated filter.
"""
... | Updates the filter with the given `id`. Parameters are the same
as in `filter_create()`.
Returns the `filter dict`_ of the updated filter. |
def requires_user(fn):
"""
Requires that the calling Subject be *either* authenticated *or* remembered
via RememberMe services before allowing access.
This method essentially ensures that subject.identifiers IS NOT None
:raises UnauthenticatedException: indicating that the deco... | Requires that the calling Subject be *either* authenticated *or* remembered
via RememberMe services before allowing access.
This method essentially ensures that subject.identifiers IS NOT None
:raises UnauthenticatedException: indicating that the decorated method is
... |
def find_dups(file_dict):
'''takes output from :meth:`scan_dir` and returns list of duplicate files'''
found_hashes = {}
for f in file_dict:
if file_dict[f]['md5'] not in found_hashes:
found_hashes[file_dict[f]['md5']] = []
found_hashes[file_dict[f]['md5']].append(f)
final_ha... | takes output from :meth:`scan_dir` and returns list of duplicate files |
def refresh(self):
"""Refresh the cache by deleting the old one and creating a new one.
"""
if self.exists:
self.delete()
self.populate()
self.open() | Refresh the cache by deleting the old one and creating a new one. |
def prepare_dispatch(self):
# pylint:disable=too-many-branches, too-many-statements, too-many-locals
"""
Prepare dispatch, so prepare for each daemon (schedulers, brokers, receivers, reactionners,
pollers)
This function will only prepare something if self.new_to_dispatch is Fals... | Prepare dispatch, so prepare for each daemon (schedulers, brokers, receivers, reactionners,
pollers)
This function will only prepare something if self.new_to_dispatch is False
It will reset the first_dispatch_done flag
A DispatcherError exception is raised if a configuration is already... |
def _commit_handler(self, cmd):
"""
Special handler for hostname change on commit operation. Also handles username removal
which prompts for confirmation (username removal prompts for each user...)
"""
current_prompt = self.device.find_prompt().strip()
terminating_char = ... | Special handler for hostname change on commit operation. Also handles username removal
which prompts for confirmation (username removal prompts for each user...) |
def _fix_permissions(self):
"""
Because docker run as root we need to fix permission and ownership to allow user to interact
with it from their filesystem and do operation like file delete
"""
state = yield from self._get_container_state()
if state == "stopped" or state ... | Because docker run as root we need to fix permission and ownership to allow user to interact
with it from their filesystem and do operation like file delete |
def pad_to(unpadded, target_len):
"""
Pad a string to the target length in characters, or return the original
string if it's longer than the target length.
"""
under = target_len - len(unpadded)
if under <= 0:
return unpadded
return unpadded + (' ' * under) | Pad a string to the target length in characters, or return the original
string if it's longer than the target length. |
def single_discriminator(x, filters=128, kernel_size=8,
strides=4, pure_mean=False):
"""A simple single-layer convolutional discriminator."""
with tf.variable_scope("discriminator"):
net = layers().Conv2D(
filters, kernel_size, strides=strides, padding="SAME", name="conv1")(x)
... | A simple single-layer convolutional discriminator. |
def _histogram_data(iterator):
""" Yields only the row contents that contain the histogram entries """
histogram_started = False
header_passed = False
for l in iterator:
if '## HISTOGRAM' in l:
histogram_started = True
elif histogram_started:
if header_passed:
... | Yields only the row contents that contain the histogram entries |
def _compute_nfps_uniform(cum_counts, sizes):
"""Computes the matrix of expected false positives for all possible
sub-intervals of the complete domain of set sizes, assuming uniform
distribution of set_sizes within each sub-intervals.
Args:
cum_counts: the complete cummulative distribution of s... | Computes the matrix of expected false positives for all possible
sub-intervals of the complete domain of set sizes, assuming uniform
distribution of set_sizes within each sub-intervals.
Args:
cum_counts: the complete cummulative distribution of set sizes.
sizes: the complete domain of set s... |
def discovery_print(pkt):
"""Scandevice callback. Register src mac to avoid src repetition.
Print device on screen.
:param scapy.packet.Packet pkt: Scapy Packet
:return: None
"""
if pkt.src in mac_id_list:
return
mac_id_list.append(pkt.src)
text = pkt_text(pkt)
click.secho(t... | Scandevice callback. Register src mac to avoid src repetition.
Print device on screen.
:param scapy.packet.Packet pkt: Scapy Packet
:return: None |
def deallocate_ip(self, hostipaddress):
"""
Object method takes in input of hostip address,removes them from the parent ip scope.
:param hostid: str of the hostid of the target host ip record
:return:
"""
delete_host_from_segment(hostipaddress, self.netaddr, self.auth, ... | Object method takes in input of hostip address,removes them from the parent ip scope.
:param hostid: str of the hostid of the target host ip record
:return: |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.