code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def initializePhase(self, features, targets):
"""
Step 1: Initialization phase
:param features feature matrix with dimension (numSamples, numInputs)
:param targets target matrix with dimension (numSamples, numOutputs)
"""
assert features.shape[0] == targets.shape[0]
assert features.shape[1] ... | Step 1: Initialization phase
:param features feature matrix with dimension (numSamples, numInputs)
:param targets target matrix with dimension (numSamples, numOutputs) |
def append(self, other):
"""Appends another array to this array.
The returned array will have all of the class methods and virutal
fields of this array, including any that were added using `add_method`
or `add_virtualfield`. If this array and other array have one or more
string ... | Appends another array to this array.
The returned array will have all of the class methods and virutal
fields of this array, including any that were added using `add_method`
or `add_virtualfield`. If this array and other array have one or more
string fields, the dtype for those fields a... |
def __update(self, row):
"""Update rows in table
"""
expr = self.__table.update().values(row)
for key in self.__update_keys:
expr = expr.where(getattr(self.__table.c, key) == row[key])
if self.__autoincrement:
expr = expr.returning(getattr(self.__table.c, ... | Update rows in table |
def chdir(self, path=None):
"""
Change the "current directory" of this SFTP session. Since SFTP
doesn't really have the concept of a current working directory, this is
emulated by Paramiko. Once you use this method to set a working
directory, all operations on this `.SFTPClient... | Change the "current directory" of this SFTP session. Since SFTP
doesn't really have the concept of a current working directory, this is
emulated by Paramiko. Once you use this method to set a working
directory, all operations on this `.SFTPClient` object will be relative
to that path. ... |
def _delete_iapp(self, iapp_name, deploying_device):
'''Delete an iapp service and template on the root device.
:param iapp_name: str -- name of iapp
:param deploying_device: ManagementRoot object -- device where the
iapp will be deleted
'''
iap... | Delete an iapp service and template on the root device.
:param iapp_name: str -- name of iapp
:param deploying_device: ManagementRoot object -- device where the
iapp will be deleted |
def get_first_pos_of_char(char, string):
'''
:param char: The character to find
:type char: string
:param string: The string in which to search for *char*
:type string: string
:returns: Index in *string* where *char* last appears (unescaped by a preceding "\\"), -1 if not found
:rtype: int
... | :param char: The character to find
:type char: string
:param string: The string in which to search for *char*
:type string: string
:returns: Index in *string* where *char* last appears (unescaped by a preceding "\\"), -1 if not found
:rtype: int
Finds the first occurrence of *char* in *string* ... |
def _wrpy_ncbi_gene_nts(fout_py, geneid2nt, log):
"""Write namedtuples to a dict in a Python module."""
num_genes = len(geneid2nt)
with open(fout_py, 'w') as ofstrm:
docstr = "Data downloaded from NCBI Gene converted into Python namedtuples."
ofstrm.write('"""{PYDOC}"""\n... | Write namedtuples to a dict in a Python module. |
def printc(cls, txt, color=colors.red):
"""Print in color."""
print(cls.color_txt(txt, color)) | Print in color. |
def next(self):
""" #TODO: docstring
:returns: #TODO: docstring
"""
try:
self.event, self.element = next(self.iterator)
self.elementTag = clearTag(self.element.tag)
except StopIteration:
clearParsedElements(self.element)
raise Stop... | #TODO: docstring
:returns: #TODO: docstring |
def _siftdown_max(heap, startpos, pos):
'Maxheap variant of _siftdown'
newitem = heap[pos]
# Follow the path to the root, moving parents down until finding a place
# newitem fits.
while pos > startpos:
parentpos = (pos - 1) >> 1
parent = heap[parentpos]
if parent < newitem:
... | Maxheap variant of _siftdown |
def list_subgroups_global(self, id):
"""
List subgroups.
List the immediate OutcomeGroup children of the outcome group. Paginated.
"""
path = {}
data = {}
params = {}
# REQUIRED - PATH - id
"""ID"""
path["id"] = id
... | List subgroups.
List the immediate OutcomeGroup children of the outcome group. Paginated. |
def getUsers(context, roles, allow_empty=True):
""" Present a DisplayList containing users in the specified
list of roles
"""
mtool = getToolByName(context, 'portal_membership')
pairs = allow_empty and [['', '']] or []
users = mtool.searchForMembers(roles=roles)
for user in users:
... | Present a DisplayList containing users in the specified
list of roles |
def get_data_info(self):
"""
imports er tables and places data into Data_info data structure
outlined bellow:
Data_info - {er_samples: {er_samples.txt info}
er_sites: {er_sites.txt info}
er_locations: {er_locations.txt info}
... | imports er tables and places data into Data_info data structure
outlined bellow:
Data_info - {er_samples: {er_samples.txt info}
er_sites: {er_sites.txt info}
er_locations: {er_locations.txt info}
er_ages: {er_ages.txt info}} |
def get_sideplot_ranges(plot, element, main, ranges):
"""
Utility to find the range for an adjoined
plot given the plot, the element, the
Element the plot is adjoined to and the
dictionary of ranges.
"""
key = plot.current_key
dims = element.dimensions()
dim = dims[0] if 'frequency' ... | Utility to find the range for an adjoined
plot given the plot, the element, the
Element the plot is adjoined to and the
dictionary of ranges. |
def _checkremove_que(self, word):
"""If word ends in -que and if word is not in pass list, strip -que"""
in_que_pass_list = False
que_pass_list = ['atque',
'quoque',
'neque',
'itaque',
'absque',
... | If word ends in -que and if word is not in pass list, strip -que |
def streamify(self, state, frame):
"""Prepare frame for output as a byte-stuffed stream."""
# Split the frame apart for stuffing...
pieces = frame.split(self.prefix)
return '%s%s%s%s%s' % (self.prefix, self.begin,
(self.prefix + self.nop).join(pieces),
... | Prepare frame for output as a byte-stuffed stream. |
def stop_gradient(self, stop_layers, bigdl_type="float"):
"""
stop the input gradient of layers that match the given ```names```
their input gradient are not computed.
And they will not contributed to the input gradient computation of
layers that depend on them.
:param st... | stop the input gradient of layers that match the given ```names```
their input gradient are not computed.
And they will not contributed to the input gradient computation of
layers that depend on them.
:param stop_layers: an array of layer names
:param bigdl_type:
:return... |
def format_tb(tb=None, limit=None, allLocals=None, allGlobals=None, withTitle=False, with_color=None, with_vars=None):
"""
:param types.TracebackType|types.FrameType|StackSummary tb: traceback. if None, will use sys._getframe
:param int|None limit: limit the traceback to this number of frames. by default, w... | :param types.TracebackType|types.FrameType|StackSummary tb: traceback. if None, will use sys._getframe
:param int|None limit: limit the traceback to this number of frames. by default, will look at sys.tracebacklimit
:param dict[str]|None allLocals: if set, will update it with all locals from all frames
:par... |
def set(self, name, default=0, editable=True, description=""):
'''Define a variable in DB and in memory'''
var, created = ConfigurationVariable.objects.get_or_create(name=name)
if created:
var.value = default
if not editable:
var.value = default
var.ed... | Define a variable in DB and in memory |
def fnFromDate(self, date):
"""Get filename from date."""
fn = time.strftime('comics-%Y%m%d', date)
fn = os.path.join(self.basepath, 'html', fn + ".html")
fn = os.path.abspath(fn)
return fn | Get filename from date. |
def from_node(index, value):
"""
>>> h = TimelineHistory.from_node(1, 2)
>>> h.lines
[]
"""
try:
lines = json.loads(value)
except (TypeError, ValueError):
lines = None
if not isinstance(lines, list):
lines = []
r... | >>> h = TimelineHistory.from_node(1, 2)
>>> h.lines
[] |
def run_top_task(self, task_name=None, sort=None, **kwargs):
"""Finds and runs a pending task that in the first of the sorting list.
Parameters
-----------
task_name : str
The task name.
sort : List of tuple
PyMongo sort comment, search "PyMongo find one ... | Finds and runs a pending task that in the first of the sorting list.
Parameters
-----------
task_name : str
The task name.
sort : List of tuple
PyMongo sort comment, search "PyMongo find one sorting" and `collection level operations <http://api.mongodb.com/python... |
def logReload(options):
"""
encompasses all the logic for reloading observer.
"""
event_handler = Reload(options)
observer = Observer()
observer.schedule(event_handler, path='.', recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterr... | encompasses all the logic for reloading observer. |
def set_log_file_maximum_size(self, logFileMaxSize):
"""
Set the log file maximum size in megabytes
:Parameters:
#. logFileMaxSize (number): The maximum size in Megabytes of a logging file.
Once exceeded, another logging file as logFileBasename_N.logFileExtension
... | Set the log file maximum size in megabytes
:Parameters:
#. logFileMaxSize (number): The maximum size in Megabytes of a logging file.
Once exceeded, another logging file as logFileBasename_N.logFileExtension
will be created. Where N is an automatically incremented number. |
def create_refresh_token(self, access_token_value):
# type: (str) -> str
"""
Creates an refresh token bound to the specified access token.
"""
if access_token_value not in self.access_tokens:
raise InvalidAccessToken('{} unknown'.format(access_token_value))
i... | Creates an refresh token bound to the specified access token. |
def parse_expmethodresponse(self, tup_tree):
# pylint: disable=unused-argument
"""
This function not implemented.
"""
raise CIMXMLParseError(
_format("Internal Error: Parsing support for element {0!A} is not "
"implemented", name(tup_tree)),
... | This function not implemented. |
def normalize_uri(u: URI) -> URIRef:
""" Return a URIRef for a str or URIRef """
return u if isinstance(u, URIRef) else URIRef(str(u)) | Return a URIRef for a str or URIRef |
def serialize_raw_master_key_prefix(raw_master_key):
"""Produces the prefix that a RawMasterKey will always use for the
key_info value of keys which require additional information.
:param raw_master_key: RawMasterKey for which to produce a prefix
:type raw_master_key: aws_encryption_sdk.key_providers.r... | Produces the prefix that a RawMasterKey will always use for the
key_info value of keys which require additional information.
:param raw_master_key: RawMasterKey for which to produce a prefix
:type raw_master_key: aws_encryption_sdk.key_providers.raw.RawMasterKey
:returns: Serialized key_info prefix
... |
def docs(context: Context):
"""
Generates static documentation
"""
try:
from sphinx.application import Sphinx
except ImportError:
context.pip_command('install', 'Sphinx')
from sphinx.application import Sphinx
context.shell('cp', 'README.rst', 'docs/README.rst')
app =... | Generates static documentation |
def Add(self, service, method, request, global_params=None):
"""Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request:... | Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.me... |
def _read_fd(file_descr):
"""
Read incoming data from file handle.
Then find the matching StreamDescriptor by file_descr value.
:param file_descr: file object
:return: Return number of bytes read
"""
try:
line = os.read(file_descr, 1024 * 1024)
... | Read incoming data from file handle.
Then find the matching StreamDescriptor by file_descr value.
:param file_descr: file object
:return: Return number of bytes read |
def extra_context(self, request, context):
"""Call the PAGE_EXTRA_CONTEXT function if there is one."""
if settings.PAGE_EXTRA_CONTEXT:
context.update(settings.PAGE_EXTRA_CONTEXT()) | Call the PAGE_EXTRA_CONTEXT function if there is one. |
def posthoc_tamhane(a, val_col=None, group_col=None, welch=True, sort=False):
'''Tamhane's T2 all-pairs comparison test for normally distributed data with
unequal variances. Tamhane's T2 test can be performed for all-pairs
comparisons in an one-factorial layout with normally distributed residuals
but u... | Tamhane's T2 all-pairs comparison test for normally distributed data with
unequal variances. Tamhane's T2 test can be performed for all-pairs
comparisons in an one-factorial layout with normally distributed residuals
but unequal groups variances. A total of m = k(k-1)/2 hypotheses can be
tested. The nul... |
def invokeCompletionIfAvailable(self, requestedByUser=False):
"""Invoke completion, if available. Called after text has been typed in qpart
Returns True, if invoked
"""
if self._qpart.completionEnabled and self._wordSet is not None:
wordBeforeCursor = self._wordBeforeCursor()... | Invoke completion, if available. Called after text has been typed in qpart
Returns True, if invoked |
def get_current_desktop(self):
"""
Get the current desktop.
Uses ``_NET_CURRENT_DESKTOP`` of the EWMH spec.
"""
desktop = ctypes.c_long(0)
_libxdo.xdo_get_current_desktop(self._xdo, ctypes.byref(desktop))
return desktop.value | Get the current desktop.
Uses ``_NET_CURRENT_DESKTOP`` of the EWMH spec. |
def crashlog_status(**kwargs):
"""
Show crashlogs status.
"""
ctx = Context(**kwargs)
ctx.execute_action('crashlog:status', **{
'storage': ctx.repo.create_secure_service('storage'),
}) | Show crashlogs status. |
def upload(self, resource_id, data):
"""Update the request URI to upload the a document to this resource.
Args:
resource_id (integer): The group id.
data (any): The raw data to upload.
"""
self.body = data
self.content_type = 'application/octet-stream'
... | Update the request URI to upload the a document to this resource.
Args:
resource_id (integer): The group id.
data (any): The raw data to upload. |
def on_plugin_install(plugin_directory, ostream=sys.stdout):
'''
Run ``on_plugin_install`` script for specified plugin directory (if
available).
**TODO** Add support for Linux, OSX.
Parameters
----------
plugin_directory : str
File system to plugin directory.
ostream :file-like... | Run ``on_plugin_install`` script for specified plugin directory (if
available).
**TODO** Add support for Linux, OSX.
Parameters
----------
plugin_directory : str
File system to plugin directory.
ostream :file-like
Output stream for status messages (default: ``sys.stdout``). |
def ResolvePrefix(self, subject, attribute_prefix, timestamp=None,
limit=None):
"""Retrieve a set of value matching for this subject's attribute.
Args:
subject: The subject that we will search.
attribute_prefix: The attribute prefix.
timestamp: A range of times for conside... | Retrieve a set of value matching for this subject's attribute.
Args:
subject: The subject that we will search.
attribute_prefix: The attribute prefix.
timestamp: A range of times for consideration (In microseconds). Can be a
constant such as ALL_TIMESTAMPS or NEWEST_TIMESTAMP or a tuple o... |
def decode(self, data: bytes) -> bytes:
"""Decodes data according the specified Content-Encoding
or Content-Transfer-Encoding headers value.
"""
if CONTENT_TRANSFER_ENCODING in self.headers:
data = self._decode_content_transfer(data)
if CONTENT_ENCODING in self.header... | Decodes data according the specified Content-Encoding
or Content-Transfer-Encoding headers value. |
def apply_calibration(df, calibration_df, calibration):
'''
Apply calibration values from `fit_fb_calibration` result to `calibration`
object.
'''
from dmf_control_board_firmware import FeedbackResults
for i, (fb_resistor, R_fb, C_fb) in calibration_df[['fb_resistor', 'R_fb', 'C_fb']].iterrows(... | Apply calibration values from `fit_fb_calibration` result to `calibration`
object. |
def from_config(cls, cp, data=None, delta_f=None, delta_t=None,
gates=None, recalibration=None, **kwargs):
"""Initializes an instance of this class from the given config file.
Parameters
----------
cp : WorkflowConfigParser
Config file parser to read.
... | Initializes an instance of this class from the given config file.
Parameters
----------
cp : WorkflowConfigParser
Config file parser to read.
data : dict
A dictionary of data, in which the keys are the detector names and
the values are the data. This ... |
def omit_deep(omit_props, dct):
"""
Implementation of omit that recurses. This tests the same keys at every level of dict and in lists
:param omit_props:
:param dct:
:return:
"""
omit_partial = omit_deep(omit_props)
if isinstance(dict, dct):
# Filter out keys and then recurse o... | Implementation of omit that recurses. This tests the same keys at every level of dict and in lists
:param omit_props:
:param dct:
:return: |
def update_event_types(self):
"""Update event types in event type box."""
self.idx_evt_type.clear()
self.idx_evt_type.setSelectionMode(QAbstractItemView.ExtendedSelection)
event_types = sorted(self.parent.notes.annot.event_types,
key=str.lower)
for t... | Update event types in event type box. |
def predictions(self, stpid="", rt="", vid="", maxpredictions=""):
"""
Retrieve predictions for 1+ stops or 1+ vehicles.
Arguments:
`stpid`: unique ID number for bus stop (single or comma-seperated list or iterable)
or
`vid`: vehicle ID number (single... | Retrieve predictions for 1+ stops or 1+ vehicles.
Arguments:
`stpid`: unique ID number for bus stop (single or comma-seperated list or iterable)
or
`vid`: vehicle ID number (single or comma-seperated list or iterable)
or
`stpid` and `rt`
... |
def hav_dist(locs1, locs2):
"""
Return a distance matrix between two set of coordinates.
Use geometric distance (default) or haversine distance (if longlat=True).
Parameters
----------
locs1 : numpy.array
The first set of coordinates as [(long, lat), (long, lat)].
locs2 : numpy.arra... | Return a distance matrix between two set of coordinates.
Use geometric distance (default) or haversine distance (if longlat=True).
Parameters
----------
locs1 : numpy.array
The first set of coordinates as [(long, lat), (long, lat)].
locs2 : numpy.array
The second set of coordinates ... |
def getSequenceCombinaisons(polymorphipolymorphicDnaSeqSeq, pos = 0) :
"""Takes a dna sequence with polymorphismes and returns all the possible sequences that it can yield"""
if type(polymorphipolymorphicDnaSeqSeq) is not types.ListType :
seq = list(polymorphipolymorphicDnaSeqSeq)
else :
seq = polymorphipolymor... | Takes a dna sequence with polymorphismes and returns all the possible sequences that it can yield |
def _get_model_table(self, part):
"""
Returns a list that represents the table.
:param part: The table header, table footer or table body.
:type part: hatemile.util.html.htmldomelement.HTMLDOMElement
:return: The list that represents the table.
:rtype: list(list(hatemile... | Returns a list that represents the table.
:param part: The table header, table footer or table body.
:type part: hatemile.util.html.htmldomelement.HTMLDOMElement
:return: The list that represents the table.
:rtype: list(list(hatemile.util.html.htmldomelement.HTMLDOMElement)) |
def calcNewEdges(wcs, shape):
"""
This method will compute sky coordinates for all the pixels around
the edge of an image AFTER applying the geometry model.
Parameters
----------
wcs : obj
HSTWCS object for image
shape : tuple
numpy shape tuple for size of image
Return... | This method will compute sky coordinates for all the pixels around
the edge of an image AFTER applying the geometry model.
Parameters
----------
wcs : obj
HSTWCS object for image
shape : tuple
numpy shape tuple for size of image
Returns
-------
border : arr
arr... |
def get_asset_lookup_session_for_repository(self, repository_id, proxy, *args, **kwargs):
"""Gets the OsidSession associated with the asset lookup service
for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
... | Gets the OsidSession associated with the asset lookup service
for the given repository.
arg: repository_id (osid.id.Id): the Id of the repository
arg proxy (osid.proxy.Proxy): a proxy
return: (osid.repository.AssetLookupSession) - the new
AssetLookupSession
... |
def schemaValidCtxtGetParserCtxt(self):
"""allow access to the parser context of the schema validation
context """
ret = libxml2mod.xmlSchemaValidCtxtGetParserCtxt(self._o)
if ret is None:raise parserError('xmlSchemaValidCtxtGetParserCtxt() failed')
__tmp = parserCtxt(_obj=ret... | allow access to the parser context of the schema validation
context |
def function_call_prepare_action(self, text, loc, fun):
"""Code executed after recognising a function call (type and function name)"""
exshared.setpos(loc, text)
if DEBUG > 0:
print("FUN_PREP:",fun)
if DEBUG == 2: self.symtab.display()
if DEBUG > 2: retu... | Code executed after recognising a function call (type and function name) |
def profile_remove(name, **kwargs):
"""
Remove profile from the storage.
"""
ctx = Context(**kwargs)
ctx.execute_action('profile:remove', **{
'storage': ctx.repo.create_secure_service('storage'),
'name': name,
}) | Remove profile from the storage. |
def domain_delete(auth=None, **kwargs):
'''
Delete a domain
CLI Example:
.. code-block:: bash
salt '*' keystoneng.domain_delete name=domain1
salt '*' keystoneng.domain_delete name=b62e76fbeeff4e8fb77073f591cf211e
'''
cloud = get_operator_cloud(auth)
kwargs = _clean_kwargs(... | Delete a domain
CLI Example:
.. code-block:: bash
salt '*' keystoneng.domain_delete name=domain1
salt '*' keystoneng.domain_delete name=b62e76fbeeff4e8fb77073f591cf211e |
def get_related_galleries(gallery, count=5):
"""
Gets latest related galleries from same section as originating gallery.
Count defaults to five but can be overridden.
Usage: {% get_related_galleries gallery <10> %}
"""
# just get the first cat. If they assigned to more than one, tough
try:... | Gets latest related galleries from same section as originating gallery.
Count defaults to five but can be overridden.
Usage: {% get_related_galleries gallery <10> %} |
def create_examples_train(candidate_dialog_paths, rng, positive_probability=0.5, max_context_length=20):
"""
Creates single training example.
:param candidate_dialog_paths:
:param rng:
:param positive_probability: probability of selecting positive training example
:return:
"""
i = 0
... | Creates single training example.
:param candidate_dialog_paths:
:param rng:
:param positive_probability: probability of selecting positive training example
:return: |
def __build_lxml(target, source, env):
"""
General XSLT builder (HTML/FO), using the lxml module.
"""
from lxml import etree
xslt_ac = etree.XSLTAccessControl(read_file=True,
write_file=True,
create_dir=True,
... | General XSLT builder (HTML/FO), using the lxml module. |
def get(self, client_method, get_params, is_json=True):
"""Make a GET request"""
url = self._wa.apollo_url + self.CLIENT_BASE + client_method
headers = {}
response = requests.get(url, headers=headers,
verify=self.__verify, params=get_params,
... | Make a GET request |
def checkRequirements(sender,**kwargs):
'''
Check that the customer meets all prerequisites for the items in the registration.
'''
if not getConstant('requirements__enableRequirements'):
return
logger.debug('Signal to check RegistrationContactForm handled by prerequisites app.')
... | Check that the customer meets all prerequisites for the items in the registration. |
def refresh_lock(lock_file):
"""'Refresh' an existing lock.
'Refresh' an existing lock by re-writing the file containing the
owner's unique id, using a new (randomly generated) id, which is also
returned.
"""
unique_id = '%s_%s_%s' % (
os.getpid(),
''.join([str(random.randint(0... | Refresh' an existing lock.
'Refresh' an existing lock by re-writing the file containing the
owner's unique id, using a new (randomly generated) id, which is also
returned. |
def as_tuple(obj):
" Given obj return a tuple "
if not obj:
return tuple()
if isinstance(obj, (tuple, set, list)):
return tuple(obj)
if hasattr(obj, '__iter__') and not isinstance(obj, dict):
return obj
return obj, | Given obj return a tuple |
def main(args=sys.argv[1:]):
'''Processes command line arguments and file i/o'''
if not args:
sys.stderr.write(_usage() + '\n')
sys.exit(4)
else:
parsed = _parse_args(args)
# Set delim based on whether or not regex is desired by user
delim = parsed.delimiter if parsed.regex ... | Processes command line arguments and file i/o |
def get(self, request, slug):
"""Basic functionality for GET request to view.
"""
matching_datasets = self.generate_matching_datasets(slug)
if matching_datasets is None:
raise Http404("Datasets meeting these criteria do not exist.")
base_context = {
'da... | Basic functionality for GET request to view. |
def clean_proc_dir(opts):
'''
Loop through jid files in the minion proc directory (default /var/cache/salt/minion/proc)
and remove any that refer to processes that no longer exist
'''
for basefilename in os.listdir(salt.minion.get_proc_dir(opts['cachedir'])):
fn_ = os.path.join(salt.minion... | Loop through jid files in the minion proc directory (default /var/cache/salt/minion/proc)
and remove any that refer to processes that no longer exist |
def _clean_value(key, val):
'''
Clean out well-known bogus values.
If it isn't clean (for example has value 'None'), return None.
Otherwise, return the original value.
NOTE: This logic also exists in the smbios module. This function is
for use when not using smbios to retrieve the value.
... | Clean out well-known bogus values.
If it isn't clean (for example has value 'None'), return None.
Otherwise, return the original value.
NOTE: This logic also exists in the smbios module. This function is
for use when not using smbios to retrieve the value. |
def eventFilter(self, object, event):
"""
Processes when the window is moving to update the position for the
popup if in popup mode.
:param object | <QObject>
event | <QEvent>
"""
if not self.isVisible():
return Fals... | Processes when the window is moving to update the position for the
popup if in popup mode.
:param object | <QObject>
event | <QEvent> |
def compile(pattern, namespaces=None, flags=0, **kwargs): # noqa: A001
"""Compile CSS pattern."""
if namespaces is not None:
namespaces = ct.Namespaces(**namespaces)
custom = kwargs.get('custom')
if custom is not None:
custom = ct.CustomSelectors(**custom)
if isinstance(pattern, ... | Compile CSS pattern. |
def stats(self):
"""
Return statistics calculated overall samples of all utterances in the corpus.
Returns:
DataStats: A DataStats object containing statistics overall samples in the corpus.
"""
per_utt_stats = self.stats_per_utterance()
return stats.DataSta... | Return statistics calculated overall samples of all utterances in the corpus.
Returns:
DataStats: A DataStats object containing statistics overall samples in the corpus. |
def _list_files(root):
"""
Lists all of the files in a directory, taking into account any .gitignore
file that is present
:param root:
A unicode filesystem path
:return:
A list of unicode strings, containing paths of all files not ignored
by .gitignore with root, using rela... | Lists all of the files in a directory, taking into account any .gitignore
file that is present
:param root:
A unicode filesystem path
:return:
A list of unicode strings, containing paths of all files not ignored
by .gitignore with root, using relative paths |
def main(arguments=None):
"""
*The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command*
"""
# setup the command-line util settings
su = tools(
arguments=arguments,
docString=__doc__,
logLevel="DEBUG",
option... | *The main function used when ``cl_utils.py`` is run as a single script from the cl, or when installed as a cl command* |
def _put_bucket_website(self):
"""Configure static website on S3 bucket."""
if self.s3props['website']['enabled']:
website_config = {
'ErrorDocument': {
'Key': self.s3props['website']['error_document']
},
'IndexDocument': {
... | Configure static website on S3 bucket. |
async def seek(self, pos, *, device: Optional[SomeDevice] = None):
"""Seeks to the given position in the user’s currently playing track.
Parameters
----------
pos : int
The position in milliseconds to seek to.
Must be a positive number.
Passing in a p... | Seeks to the given position in the user’s currently playing track.
Parameters
----------
pos : int
The position in milliseconds to seek to.
Must be a positive number.
Passing in a position that is greater than the length of the track will cause the player to ... |
def get_nas_credentials(self, identifier, **kwargs):
"""Returns a list of IDs of VLANs which match the given VLAN name.
:param integer instance_id: the instance ID
:returns: A dictionary containing a large amount of information about
the specified instance.
"""
... | Returns a list of IDs of VLANs which match the given VLAN name.
:param integer instance_id: the instance ID
:returns: A dictionary containing a large amount of information about
the specified instance. |
def get_lemma_by_id(self, mongo_id):
'''
Builds a Lemma object from the database entry with the given
ObjectId.
Arguments:
- `mongo_id`: a bson.objectid.ObjectId object
'''
cache_hit = None
if self._lemma_cache is not None:
cache_hit = self._l... | Builds a Lemma object from the database entry with the given
ObjectId.
Arguments:
- `mongo_id`: a bson.objectid.ObjectId object |
def _example_short_number_for_cost(region_code, cost):
"""Gets a valid short number for the specified cost category.
Arguments:
region_code -- the region for which an example short number is needed.
cost -- the cost category of number that is needed.
Returns a valid short number for the specified ... | Gets a valid short number for the specified cost category.
Arguments:
region_code -- the region for which an example short number is needed.
cost -- the cost category of number that is needed.
Returns a valid short number for the specified region and cost
category. Returns an empty string when the... |
def _check_compound_minions(self,
expr,
delimiter,
greedy,
pillar_exact=False): # pylint: disable=unused-argument
'''
Return the minions found by looking via compound matcher
... | Return the minions found by looking via compound matcher |
def reverse(self):
"""
Reverses the items of this collection "in place" (only two values are
retrieved from Redis at a time).
"""
def reverse_trans(pipe):
if self.writeback:
self._sync_helper(pipe)
n = self.__len__(pipe)
for i ... | Reverses the items of this collection "in place" (only two values are
retrieved from Redis at a time). |
def mark_read(user, message):
"""
Mark message instance as read for user.
Returns True if the message was `unread` and thus actually marked as `read` or False in case
it is already `read` or it does not exist at all.
:param user: user instance for the recipient
:param message: a Message instanc... | Mark message instance as read for user.
Returns True if the message was `unread` and thus actually marked as `read` or False in case
it is already `read` or it does not exist at all.
:param user: user instance for the recipient
:param message: a Message instance to mark as read |
def translate(self, text, to_template='{name} ({url})', from_template=None, name_matcher=None, url_matcher=None):
""" Translate hyperinks into printable book style for Manning Publishing
>>> translator = HyperlinkStyleCorrector()
>>> adoc = 'See http://totalgood.com[Total Good] about that.'
... | Translate hyperinks into printable book style for Manning Publishing
>>> translator = HyperlinkStyleCorrector()
>>> adoc = 'See http://totalgood.com[Total Good] about that.'
>>> translator.translate(adoc)
'See Total Good (http://totalgood.com) about that.' |
def set_regs(self, regs_dump):
"""
Initialize register values within the state
:param regs_dump: The output of ``info registers`` in gdb.
"""
if self.real_stack_top == 0 and self.adjust_stack is True:
raise SimStateError("You need to set the stack first, or set"
... | Initialize register values within the state
:param regs_dump: The output of ``info registers`` in gdb. |
def partition_key(self, value):
"""
Set the partition key of the event data object.
:param value: The partition key to set.
:type value: str or bytes
"""
annotations = dict(self._annotations)
annotations[self._partition_key] = value
header = MessageHeader... | Set the partition key of the event data object.
:param value: The partition key to set.
:type value: str or bytes |
def _is_output(part):
""" Returns whether the given part represents an output variable. """
if part[0].lower() == 'o':
return True
elif part[0][:2].lower() == 'o:':
return True
elif part[0][:2].lower() == 'o.':
return True
else:
return False | Returns whether the given part represents an output variable. |
def compose_dynamic_tree(src, target_tree_alias=None, parent_tree_item_alias=None, include_trees=None):
"""Returns a structure describing a dynamic sitetree.utils
The structure can be built from various sources,
:param str|iterable src: If a string is passed to `src`, it'll be treated as the name of an app... | Returns a structure describing a dynamic sitetree.utils
The structure can be built from various sources,
:param str|iterable src: If a string is passed to `src`, it'll be treated as the name of an app,
from where one want to import sitetrees definitions. `src` can be an iterable
of tree definit... |
def merge_with(self, other):
"""Merge this ``ValuesAggregation`` with another one"""
result = ValuesAggregation()
result.total = self.total + other.total
result.count = self.count + other.count
result.min = min(self.min, other.min)
result.max = max(self.max, other.max)
... | Merge this ``ValuesAggregation`` with another one |
def derived(self, name, relative_coords, formula):
"""Helper function for derived quantities"""
relZ, relN = relative_coords
daughter_idx = [(x[0] + relZ, x[1] + relN) for x in self.df.index]
values = formula(self.df.values, self.df.loc[daughter_idx].values)
return Table(df=pd.Se... | Helper function for derived quantities |
def xml_report(self, morfs=None, outfile=None, ignore_errors=None,
omit=None, include=None):
"""Generate an XML report of coverage results.
The report is compatible with Cobertura reports.
Each module in `morfs` is included in the report. `outfile` is the
path to w... | Generate an XML report of coverage results.
The report is compatible with Cobertura reports.
Each module in `morfs` is included in the report. `outfile` is the
path to write the file to, "-" will write to stdout.
See `coverage.report()` for other arguments.
Returns a float, ... |
def as_dict(self, join='.'):
"""
Returns all the errors in this collection as a path to message
dictionary. Paths are joined with the ``join`` string.
"""
result = {}
for e in self.errors:
result.update(e.as_dict(join))
return result | Returns all the errors in this collection as a path to message
dictionary. Paths are joined with the ``join`` string. |
def _render_bundle(bundle_name):
"""
Renders the HTML for a bundle in place - one HTML tag or many depending on settings.USE_BUNDLES
"""
try:
bundle = get_bundles()[bundle_name]
except KeyError:
raise ImproperlyConfigured("Bundle '%s' is not defined" % bundle_name)
if bundle.use... | Renders the HTML for a bundle in place - one HTML tag or many depending on settings.USE_BUNDLES |
async def check_authorized(self, identity):
"""
Works like :func:`Security.identity`, but when check is failed
:func:`UnauthorizedError` exception is raised.
:param identity: Claim
:return: Checked claim or return ``None``
:raise: :func:`UnauthorizedError`
"""
... | Works like :func:`Security.identity`, but when check is failed
:func:`UnauthorizedError` exception is raised.
:param identity: Claim
:return: Checked claim or return ``None``
:raise: :func:`UnauthorizedError` |
def process(self, element):
"""Run the transformation graph on batched input data
Args:
element: list of csv strings, representing one batch input to the TF graph.
Returns:
dict containing the transformed data. Results are un-batched. Sparse
tensors are converted to lists.
"""
im... | Run the transformation graph on batched input data
Args:
element: list of csv strings, representing one batch input to the TF graph.
Returns:
dict containing the transformed data. Results are un-batched. Sparse
tensors are converted to lists. |
def do_use(self, args):
"""Use another instance, provided as argument."""
self.instance = args
self.prompt = self.instance + '> '
archive = self._client.get_archive(self.instance)
self.streams = [s.name for s in archive.list_streams()]
self.tables = [t.name for t in arch... | Use another instance, provided as argument. |
def base_url(self):
"""A base_url that will be used to construct the final
URL we're going to query against.
:returns: A URL of the form: ``proto://host:port``.
:rtype: :obj:`string`
"""
return '{proto}://{host}:{port}{url_path}'.format(
proto=self.protocol,
... | A base_url that will be used to construct the final
URL we're going to query against.
:returns: A URL of the form: ``proto://host:port``.
:rtype: :obj:`string` |
def unit(n, d=None, j=None, tt_instance=True):
''' Generates e_j _vector in tt.vector format
---------
Parameters:
n - modes (either integer or array)
d - dimensionality (integer)
j - position of 1 in full-format e_j (integer)
tt_instance - if True, returns tt.vector;
... | Generates e_j _vector in tt.vector format
---------
Parameters:
n - modes (either integer or array)
d - dimensionality (integer)
j - position of 1 in full-format e_j (integer)
tt_instance - if True, returns tt.vector;
if False, returns tt cores as a list |
def fixminimized(self, alphabet):
"""
After pyfst minimization,
all unused arcs are removed,
and all sink states are removed.
However this may break compatibility.
Args:
alphabet (list): The input alphabet
Returns:
None
"""
... | After pyfst minimization,
all unused arcs are removed,
and all sink states are removed.
However this may break compatibility.
Args:
alphabet (list): The input alphabet
Returns:
None |
def citations(val):
"""
# The CR Tag
extracts a list of all the citations in the record, the citations are the [metaknowledge.Citation](../classes/Citation.html#metaknowledge.citation.Citation) class.
# Parameters
_val_: `list[str]`
> The raw data from a WOS file
# Returns
` list[m... | # The CR Tag
extracts a list of all the citations in the record, the citations are the [metaknowledge.Citation](../classes/Citation.html#metaknowledge.citation.Citation) class.
# Parameters
_val_: `list[str]`
> The raw data from a WOS file
# Returns
` list[metaknowledge.Citation]`
> A... |
def _closure_deletelink(self, oldparentpk):
"""Remove incorrect links from the closure tree."""
self._closure_model.objects.filter(
**{
"parent__%s__child" % self._closure_parentref(): oldparentpk,
"child__%s__parent" % self._closure_childref(): self.pk
... | Remove incorrect links from the closure tree. |
def template_filter(self, name=None):
"""A decorator that is used to register custom template filter.
You can specify a name for the filter, otherwise the function
name will be used. Example::
@app.template_filter()
def reverse(s):
return s[::-1]
:para... | A decorator that is used to register custom template filter.
You can specify a name for the filter, otherwise the function
name will be used. Example::
@app.template_filter()
def reverse(s):
return s[::-1]
:param name: the optional name of the filter, otherwis... |
def validate_proxy_granting_ticket(pgt, target_service):
"""
Validate a proxy granting ticket string. Return an ordered pair
containing a ``ProxyTicket``, or a ``ValidationError`` if ticket
validation failed.
"""
logger.debug("Proxy ticket request received for %s using %s" % (target_service, pgt... | Validate a proxy granting ticket string. Return an ordered pair
containing a ``ProxyTicket``, or a ``ValidationError`` if ticket
validation failed. |
def list_keyvaults_sub(access_token, subscription_id):
'''Lists key vaults belonging to this subscription.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
Returns:
HTTP response. 200 OK.
'''
endpoint = ''.join([get... | Lists key vaults belonging to this subscription.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
Returns:
HTTP response. 200 OK. |
def remove_env(environment):
""" Remove an environment from the configuration. """
if not environment:
print("You need to supply an environment name")
return
parser = read_config()
if not parser.remove_section(environment):
print("Unknown environment type '%s'" % environment)
... | Remove an environment from the configuration. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.