code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def set_sys(layout):
'''
Set current system keyboard setting
CLI Example:
.. code-block:: bash
salt '*' keyboard.set_sys dvorak
'''
if salt.utils.path.which('localectl'):
__salt__['cmd.run']('localectl set-keymap {0}'.format(layout))
elif 'RedHat' in __grains__['os_family'... | Set current system keyboard setting
CLI Example:
.. code-block:: bash
salt '*' keyboard.set_sys dvorak |
def _new_err(self, errclass: str, *args) -> 'Err':
"""
Error constructor
"""
# get the message or exception
ex, msg = self._get_args(*args)
# construct the error
# handle exception
ftb = None # type: str
function = None # type: str
errtyp... | Error constructor |
def load_bot_parameters(config_bundle) -> ConfigObject:
"""
Initializes the agent in the bundle's python file and asks it to provide its
custom configuration object where its parameters can be set.
:return: the parameters as a ConfigObject
"""
# Python file relative to the config location.
p... | Initializes the agent in the bundle's python file and asks it to provide its
custom configuration object where its parameters can be set.
:return: the parameters as a ConfigObject |
def dropna(self, how='any', thresh=None, subset=None):
"""Returns a new :class:`DataFrame` omitting rows with null values.
:func:`DataFrame.dropna` and :func:`DataFrameNaFunctions.drop` are aliases of each other.
:param how: 'any' or 'all'.
If 'any', drop a row if it contains any nu... | Returns a new :class:`DataFrame` omitting rows with null values.
:func:`DataFrame.dropna` and :func:`DataFrameNaFunctions.drop` are aliases of each other.
:param how: 'any' or 'all'.
If 'any', drop a row if it contains any nulls.
If 'all', drop a row only if all its values are n... |
def path2edges(path):
"""Given: [2000343, 32722, 1819] Return: set([(2000343, 32722), (32722, 1819)])."""
node_a, node_b = tee(path)
next(node_b, None)
return zip(node_a, node_b) | Given: [2000343, 32722, 1819] Return: set([(2000343, 32722), (32722, 1819)]). |
async def filterindex(source, func):
"""Filter an asynchronous sequence using the index of the elements.
The given function is synchronous, takes the index as an argument,
and returns ``True`` if the corresponding should be forwarded,
``False`` otherwise.
"""
source = transform.enumerate.raw(so... | Filter an asynchronous sequence using the index of the elements.
The given function is synchronous, takes the index as an argument,
and returns ``True`` if the corresponding should be forwarded,
``False`` otherwise. |
def list_data_links(self, instance):
"""
Lists the data links visible to this client.
Data links are returned in random order.
:param str instance: A Yamcs instance name.
:rtype: ~collections.Iterable[.Link]
"""
# Server does not do pagination on listings of thi... | Lists the data links visible to this client.
Data links are returned in random order.
:param str instance: A Yamcs instance name.
:rtype: ~collections.Iterable[.Link] |
def conv2bin(data):
"""Convert a matrix of probabilities into binary values.
If the matrix has values <= 0 or >= 1, the values are
normalized to be in [0, 1].
:type data: numpy array
:param data: input matrix
:return: converted binary matrix
"""
if data.min() < 0 or data.max() > 1:
... | Convert a matrix of probabilities into binary values.
If the matrix has values <= 0 or >= 1, the values are
normalized to be in [0, 1].
:type data: numpy array
:param data: input matrix
:return: converted binary matrix |
def _build(self):
"""Connects the module to the graph.
Returns:
The learnable state, which has the same type, structure and shape as
the `initial_state` passed to the constructor.
"""
flat_initial_state = nest.flatten(self._initial_state)
if self._mask is not None:
flat_mask = n... | Connects the module to the graph.
Returns:
The learnable state, which has the same type, structure and shape as
the `initial_state` passed to the constructor. |
def plot_bit_for_bit(case, var_name, model_data, bench_data, diff_data):
""" Create a bit for bit plot """
plot_title = ""
plot_name = case + "_" + var_name + ".png"
plot_path = os.path.join(os.path.join(livvkit.output_dir, "verification", "imgs"))
functions.mkdir_p(plot_path)
m_ndim = np.ndim(m... | Create a bit for bit plot |
def remove_profile(name, s3=False):
"""
Removes a profile from your config
"""
user = os.path.expanduser("~")
if s3:
f = os.path.join(user, S3_PROFILE_ID + name)
else:
f = os.path.join(user, DBPY_PROFILE_ID + name)
try:
try:
open(f)
except:
... | Removes a profile from your config |
def quick_api(api_key, secret_key, port=8000):
"""
This method helps you get access to linkedin api quickly when using it
from the interpreter.
Notice that this method creates http server and wait for a request, so it
shouldn't be used in real production code - it's just an helper for debugging... | This method helps you get access to linkedin api quickly when using it
from the interpreter.
Notice that this method creates http server and wait for a request, so it
shouldn't be used in real production code - it's just an helper for debugging
The usage is basically:
api = quick_api(KEY, SEC... |
def acquire(self, *, raise_on_failure=True):
"""Attempt to acquire a slot under this rate limiter.
Parameters:
raise_on_failure(bool): Whether or not failures should raise an
exception. If this is false, the context manager will instead
return a boolean value represen... | Attempt to acquire a slot under this rate limiter.
Parameters:
raise_on_failure(bool): Whether or not failures should raise an
exception. If this is false, the context manager will instead
return a boolean value representing whether or not the rate
limit slot was ... |
def get_operators(self, name=None):
"""Get the list of :py:class:`Operator` elements associated with this job.
Args:
name(str): Only return operators matching `name`, where `name` can be a regular expression. If
`name` is not supplied, then all operators for this job are re... | Get the list of :py:class:`Operator` elements associated with this job.
Args:
name(str): Only return operators matching `name`, where `name` can be a regular expression. If
`name` is not supplied, then all operators for this job are returned.
Returns:
list(Oper... |
def _get_link_indices(self, current_modified_line):
"""
Get a list of tuples containing start and end indices of inline
anchor links
:param current_modified_line: The line being examined for links
:return: A list containing tuples of the form (start, end),
the starting a... | Get a list of tuples containing start and end indices of inline
anchor links
:param current_modified_line: The line being examined for links
:return: A list containing tuples of the form (start, end),
the starting and ending indices of inline anchors links. |
def resolve(self, resolve_from):
"""
:API: public
"""
session = requests.Session()
session.mount(resolve_from, requests.adapters.HTTPAdapter(max_retries=self._tries))
content = self._safe_get_content(session, resolve_from)
try:
parsed_urls = self._response_parser.parse(content)
i... | :API: public |
def build_all(cls, list_of_kwargs):
"""Similar to `create_all`. But transaction is not committed.
"""
return cls.add_all([
cls.new(**kwargs) for kwargs in list_of_kwargs], commit=False) | Similar to `create_all`. But transaction is not committed. |
async def create_scene_member(self, shade_position, scene_id, shade_id):
"""Adds a shade to an existing scene"""
data = {
ATTR_SCENE_MEMBER: {
ATTR_POSITION_DATA: shade_position,
ATTR_SCENE_ID: scene_id,
ATTR_SHADE_ID: shade_id,
}
... | Adds a shade to an existing scene |
def expr_to_json(expr):
"""
Converts a Sympy expression to a json-compatible tree-structure.
"""
if isinstance(expr, symbolics.Mul):
return {"type": "Mul", "args": [expr_to_json(arg) for arg in expr.args]}
elif isinstance(expr, symbolics.Add):
return {"type": "Add", "args": [expr_to_... | Converts a Sympy expression to a json-compatible tree-structure. |
async def dataSources(loop=None, executor=None):
"""Returns a dictionary mapping available DSNs to their descriptions.
:param loop: asyncio compatible event loop
:param executor: instance of custom ThreadPoolExecutor, if not supplied
default executor will be used
:return dict: mapping of dsn to... | Returns a dictionary mapping available DSNs to their descriptions.
:param loop: asyncio compatible event loop
:param executor: instance of custom ThreadPoolExecutor, if not supplied
default executor will be used
:return dict: mapping of dsn to driver description |
def dismiss_prompt(self, text=None, wait=None):
"""
Execute the wrapped code, dismissing a prompt.
Args:
text (str | RegexObject, optional): Text to match against the text in the modal.
wait (int | float, optional): Maximum time to wait for the modal to appear after
... | Execute the wrapped code, dismissing a prompt.
Args:
text (str | RegexObject, optional): Text to match against the text in the modal.
wait (int | float, optional): Maximum time to wait for the modal to appear after
executing the wrapped code.
Raises:
... |
def get_field(expr, field):
""" Fetch a field from a struct expr
"""
weld_obj = WeldObject(encoder_, decoder_)
struct_var = weld_obj.update(expr)
if isinstance(expr, WeldObject):
struct_var = expr.obj_id
weld_obj.dependencies[struct_var] = expr
weld_template = """
%(stru... | Fetch a field from a struct expr |
def main():
"""Controls the flow of the ddg application"""
'Build the parser and parse the arguments'
parser = argparse.ArgumentParser(
description='www.duckduckgo.com zero-click api for your command-line'
)
parser.add_argument('query', nargs='*', help='the search query')
parser.add_arg... | Controls the flow of the ddg application |
def _infer_xy_labels(darray, x, y, imshow=False, rgb=None):
"""
Determine x and y labels. For use in _plot2d
darray must be a 2 dimensional data array, or 3d for imshow only.
"""
assert x is None or x != y
if imshow and darray.ndim == 3:
return _infer_xy_labels_3d(darray, x, y, rgb)
... | Determine x and y labels. For use in _plot2d
darray must be a 2 dimensional data array, or 3d for imshow only. |
def edge(self, c):
"""rising edge"""
return ca.logic_and(c, ca.logic_not(self.pre_cond(c))) | rising edge |
def make_url(domain, location):
""" This function helps to make full url path."""
url = urlparse(location)
if url.scheme == '' and url.netloc == '':
return domain + url.path
elif url.scheme == '':
return 'http://' + url.netloc + url.path
else:
return url.geturl() | This function helps to make full url path. |
def read_dist_egginfo_json(dist, filename=DEFAULT_JSON):
"""
Safely get a json within an egginfo from a distribution.
"""
# use the given package's distribution to acquire the json file.
if not dist.has_metadata(filename):
logger.debug("no '%s' for '%s'", filename, dist)
return
... | Safely get a json within an egginfo from a distribution. |
def get_project() -> Optional[str]:
"""
Returns the current project name.
"""
project = SETTINGS.project
if not project:
require_test_mode_enabled()
raise RunError('Missing project name; for test mode, please set PULUMI_NODEJS_PROJECT')
return project | Returns the current project name. |
def set_content_type (self):
"""Return URL content type, or an empty string if content
type could not be found."""
if self.url:
self.content_type = mimeutil.guess_mimetype(self.url, read=self.get_content)
else:
self.content_type = u"" | Return URL content type, or an empty string if content
type could not be found. |
def make_file_exist(self):
"""Make sure the parent directory exists, then touch the file"""
self.parent.make_directory_exist()
self.parent.touch_file(self.name)
return self | Make sure the parent directory exists, then touch the file |
def list_modules(root_package = 'vlcp'):
'''
Walk through all the sub modules, find subclasses of vlcp.server.module.Module,
list their apis through apidefs
'''
pkg = __import__(root_package, fromlist=['_'])
module_dict = OrderedDict()
_server = Server()
for imp, module, _ in walk_packag... | Walk through all the sub modules, find subclasses of vlcp.server.module.Module,
list their apis through apidefs |
def authenticate(self, request, username=None, password=None, realm=None):
"""
Check credentials against the RADIUS server identified by `realm` and
return a User object or None. If no argument is supplied, Django will
skip this backend and try the next one (as a TypeError will be raised... | Check credentials against the RADIUS server identified by `realm` and
return a User object or None. If no argument is supplied, Django will
skip this backend and try the next one (as a TypeError will be raised
and caught). |
def _set_types(self):
"""Make sure that x, y have consistent types and set dtype."""
# If we given something that is not an int or a float we raise
# a RuntimeError as we do not want to have to guess if the given
# input should be interpreted as an int or a float, for example the
... | Make sure that x, y have consistent types and set dtype. |
def register(cache):
''' Registers a cache. '''
global caches
name = cache().name
if not caches.has_key(name):
caches[name] = cache | Registers a cache. |
def serialize(input, tree="etree", encoding=None, **serializer_opts):
"""Serializes the input token stream using the specified treewalker
:arg input: the token stream to serialize
:arg tree: the treewalker to use
:arg encoding: the encoding to use
:arg serializer_opts: any options to pass to the... | Serializes the input token stream using the specified treewalker
:arg input: the token stream to serialize
:arg tree: the treewalker to use
:arg encoding: the encoding to use
:arg serializer_opts: any options to pass to the
:py:class:`html5lib.serializer.HTMLSerializer` that gets created
... |
def t_MINUS(self, t):
r'-'
t.endlexpos = t.lexpos + len(t.value)
return t | r'- |
def cmd_output_remove(self, args):
'''remove an output'''
device = args[0]
for i in range(len(self.mpstate.mav_outputs)):
conn = self.mpstate.mav_outputs[i]
if str(i) == device or conn.address == device:
print("Removing output %s" % conn.address)
... | remove an output |
def get_core(self):
"""
Get an unsatisfiable core if the formula was previously
unsatisfied.
"""
if self.minisat and self.status == False:
return pysolvers.minisatgh_core(self.minisat) | Get an unsatisfiable core if the formula was previously
unsatisfied. |
def get_token_and_data(self, data):
'''
When we receive this, we have 'token):data'
'''
token = ''
for c in data:
if c != ')':
token = token + c
else:
break;
return token, data.lstrip(token + '):') | When we receive this, we have 'token):data' |
def _is_valid_amendment_json(self, json_repr):
"""Call the primary validator for a quick test"""
amendment = self._coerce_json_to_amendment(json_repr)
if amendment is None:
# invalid JSON, definitely broken
return False
aa = validate_amendment(amendment)
e... | Call the primary validator for a quick test |
def save(self, *args, **kwargs):
"""
Before saving, if slide is for a publication, use publication info
for slide's title, subtitle, description.
"""
if self.publication:
publication = self.publication
if not self.title:
self.title = publi... | Before saving, if slide is for a publication, use publication info
for slide's title, subtitle, description. |
def load_config(data, *models, **kwargs):
'''
Generate and load the config on the device using the OpenConfig or IETF
models and device profiles.
data
Dictionary structured with respect to the models referenced.
models
A list of models to be used when generating the config.
pr... | Generate and load the config on the device using the OpenConfig or IETF
models and device profiles.
data
Dictionary structured with respect to the models referenced.
models
A list of models to be used when generating the config.
profiles: ``None``
Use certain profiles to gener... |
def filter_leader_files(cluster_config, broker_files):
"""Given a list of broker files, filters out all the files that
are in the replicas.
:param cluster_config: the cluster
:type cluster_config: kafka_utils.utils.config.ClusterConfig
:param broker_files: the broker files
:type broker_files: l... | Given a list of broker files, filters out all the files that
are in the replicas.
:param cluster_config: the cluster
:type cluster_config: kafka_utils.utils.config.ClusterConfig
:param broker_files: the broker files
:type broker_files: list of (b_id, host, [file_path, file_path ...]) tuples
:re... |
def identify_and_tag_authors(line, authors_kb):
"""Given a reference, look for a group of author names,
place tags around the author group, return the newly tagged line.
"""
re_auth, re_auth_near_miss = get_author_regexps()
# Replace authors which do not convert well from utf-8
for pattern, ... | Given a reference, look for a group of author names,
place tags around the author group, return the newly tagged line. |
async def SetFilesystemAttachmentInfo(self, filesystem_attachments):
'''
filesystem_attachments : typing.Sequence[~FilesystemAttachment]
Returns -> typing.Sequence[~ErrorResult]
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='StorageProvisioner'... | filesystem_attachments : typing.Sequence[~FilesystemAttachment]
Returns -> typing.Sequence[~ErrorResult] |
def setup(argv):
"""Sets up the ArgumentParser.
Args:
argv: an array of arguments
"""
parser = argparse.ArgumentParser(
description='Compute Jekyl- and prose-aware wordcounts',
epilog='Accepted filetypes: plaintext, markdown, markdown (Jekyll)')
parser.add_argument('-S', '--... | Sets up the ArgumentParser.
Args:
argv: an array of arguments |
def get_ordering(self, request, queryset, view):
"""Return an ordering for a given request.
DRF expects a comma separated list, while DREST expects an array.
This method overwrites the DRF default so it can parse the array.
"""
params = view.get_request_feature(view.SORT)
... | Return an ordering for a given request.
DRF expects a comma separated list, while DREST expects an array.
This method overwrites the DRF default so it can parse the array. |
def type(self, name: str):
"""return the first complete definition of type 'name'"""
for f in self.body:
if (hasattr(f, '_ctype')
and f._ctype._storage == Storages.TYPEDEF
and f._name == name):
return f | return the first complete definition of type 'name |
def _try_cast(self, result, obj, numeric_only=False):
"""
Try to cast the result to our obj original type,
we may have roundtripped through object in the mean-time.
If numeric_only is True, then only try to cast numerics
and not datetimelikes.
"""
if obj.ndim > ... | Try to cast the result to our obj original type,
we may have roundtripped through object in the mean-time.
If numeric_only is True, then only try to cast numerics
and not datetimelikes. |
def get_page_url_title(self):
'''
Get the title and current url from the remote session.
Return is a 2-tuple: (page_title, page_url).
'''
cr_tab_id = self.transport._get_cr_tab_meta_for_key(self.tab_id)['id']
targets = self.Target_getTargets()
assert 'result' in targets
assert 'targetInfos' in targe... | Get the title and current url from the remote session.
Return is a 2-tuple: (page_title, page_url). |
def get_staged_signatures(vcs):
"""Get the list of staged signatures
Args:
vcs (easyci.vcs.base.Vcs)
Returns:
list(basestring) - list of signatures
"""
staged_path = _get_staged_history_path(vcs)
known_signatures = []
if os.path.exists(staged_path):
with open(staged... | Get the list of staged signatures
Args:
vcs (easyci.vcs.base.Vcs)
Returns:
list(basestring) - list of signatures |
def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
r"""
When replacing one block of lines with another, search the blocks
for *similar* lines; the best-matching pair (if any) is used as a
synch point, and intraline difference marking is done on the
similar pair. Lots of work, bu... | r"""
When replacing one block of lines with another, search the blocks
for *similar* lines; the best-matching pair (if any) is used as a
synch point, and intraline difference marking is done on the
similar pair. Lots of work, but often worth it.
Example:
>>> d = Differ(... |
def lxqstr(string, qchar, first):
"""
Lex (scan) a quoted string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lxqstr_c.html
:param string: String to be scanned.
:type string: str
:param qchar: Quote delimiter character.
:type qchar: char (string of one char)
:param first: C... | Lex (scan) a quoted string.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/lxqstr_c.html
:param string: String to be scanned.
:type string: str
:param qchar: Quote delimiter character.
:type qchar: char (string of one char)
:param first: Character position at which to start scanning.
... |
def validate(func):
"""
Check if annotated function arguments validate according to spec
"""
call = PythonCall(func)
@wraps(func)
def decorator(*args, **kwargs):
parameters = call.bind(args, kwargs)
for arg_name, validator in func.__annotations__.items():
if not vali... | Check if annotated function arguments validate according to spec |
def bgp_summary_parser(bgp_summary):
"""Parse 'show bgp all summary vrf' output information from NX-OS devices."""
bgp_summary_dict = {}
# Check for BGP summary information lines that have no data
if len(bgp_summary.strip().splitlines()) <= 1:
return {}
allowed_afi = ["ipv4", "ipv6", "l2vp... | Parse 'show bgp all summary vrf' output information from NX-OS devices. |
def pivot_query_as_matrix(facet=None, facet_pivot_fields=None, **kwargs):
"""
Pivot query
"""
if facet_pivot_fields is None:
facet_pivot_fields = []
logging.info("Additional args: {}".format(kwargs))
fp = search_associations(rows=0,
facet_fields=[facet],
... | Pivot query |
def gpu_a_trous():
"""
Simple convenience function so that the a trous kernels can be easily accessed by any function.
"""
ker1 = SourceModule("""
__global__ void gpu_a_trous_row_kernel(float *in1, float *in2, float *wfil, int *scale)
{
... | Simple convenience function so that the a trous kernels can be easily accessed by any function. |
def get_artist(self, object_id, relation=None, **kwargs):
"""
Get the artist with the provided id
:returns: an :class:`~deezer.resources.Artist` object
"""
return self.get_object("artist", object_id, relation=relation, **kwargs) | Get the artist with the provided id
:returns: an :class:`~deezer.resources.Artist` object |
def connect(uri):
"""
Connects to an nREPL endpoint identified by the given URL/URI. Valid
examples include:
nrepl://192.168.0.12:7889
telnet://localhost:5000
http://your-app-name.heroku.com/repl
This fn delegates to another looked up in that dispatches on the scheme of
the URI... | Connects to an nREPL endpoint identified by the given URL/URI. Valid
examples include:
nrepl://192.168.0.12:7889
telnet://localhost:5000
http://your-app-name.heroku.com/repl
This fn delegates to another looked up in that dispatches on the scheme of
the URI provided (which can be a stri... |
def validate_submit_args_or_fail(job_descriptor, provider_name, input_providers,
output_providers, logging_providers):
"""Validate that arguments passed to submit_job have valid file providers.
This utility function takes resources and task data args from `submit_job`
in the base... | Validate that arguments passed to submit_job have valid file providers.
This utility function takes resources and task data args from `submit_job`
in the base provider. This function will fail with a value error if any of the
parameters are not valid. See the following example;
>>> job_resources = type('', (o... |
def deserialize(cls, data, content_type=None):
"""Parse a str using the RestAPI syntax and return a model.
:param str data: A str using RestAPI structure. JSON by default.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:... | Parse a str using the RestAPI syntax and return a model.
:param str data: A str using RestAPI structure. JSON by default.
:param str content_type: JSON by default, set application/xml if XML.
:returns: An instance of this model
:raises: DeserializationError if something went wrong |
def lovasz_hinge(logits, labels, per_image=True, ignore=None):
"""
Binary Lovasz hinge loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
per_image: compute the loss per image instead of per batch
... | Binary Lovasz hinge loss
logits: [B, H, W] Variable, logits at each pixel (between -\infty and +\infty)
labels: [B, H, W] Tensor, binary ground truth masks (0 or 1)
per_image: compute the loss per image instead of per batch
ignore: void class id |
def get_recent_matches(self, card_type="micro_card"):
"""
Calling the Recent Matches API.
Arg:
card_type: optional, default to micro_card. Accepted values are
micro_card & summary_card.
Return:
json data
"""
recent_matches_url = self.api... | Calling the Recent Matches API.
Arg:
card_type: optional, default to micro_card. Accepted values are
micro_card & summary_card.
Return:
json data |
def _kl_divergence(self, other_locs, other_weights, kernel=None, delta=1e-2):
"""
Finds the KL divergence between this and another particle
distribution by using a kernel density estimator to smooth over the
other distribution's particles.
"""
if kernel is None:
... | Finds the KL divergence between this and another particle
distribution by using a kernel density estimator to smooth over the
other distribution's particles. |
def extract_tag_metadata(self, el):
"""Extract meta data."""
if self.type == 'odp':
if el.namespace and el.namespace == self.namespaces['draw'] and el.name == 'page-thumbnail':
name = el.attrs.get('draw:page-number', '')
self.additional_context = 'slide{}:'.f... | Extract meta data. |
def forwards(self, orm):
"Write your forwards methods here."
for doc in orm['document_library.Document'].objects.all():
for title in doc.documenttitle_set.all():
title.is_published = doc.is_published
title.save() | Write your forwards methods here. |
def filter_sequences(self, seq_type):
"""Return a DictList of only specified types in the sequences attribute.
Args:
seq_type (SeqProp): Object type
Returns:
DictList: A filtered DictList of specified object type only
"""
return DictList(x for x in self... | Return a DictList of only specified types in the sequences attribute.
Args:
seq_type (SeqProp): Object type
Returns:
DictList: A filtered DictList of specified object type only |
def is_literal_or_name(value):
"""Return True if value is a literal or a name."""
try:
ast.literal_eval(value)
return True
except (SyntaxError, ValueError):
pass
if value.strip() in ['dict()', 'list()', 'set()']:
return True
# Support removal of variables on the rig... | Return True if value is a literal or a name. |
def file_data_to_str(data):
"""
Convert file data to a string for display.
This function takes the file data produced by gather_file_data().
"""
if not data:
return _('<i>File name not recorded</i>')
res = data['name']
try:
mtime_as_str = time.strftime('%Y-%m-%d %H:%M:%S',
... | Convert file data to a string for display.
This function takes the file data produced by gather_file_data(). |
def makeEndOfPrdvFuncCond(self):
'''
Construct the end-of-period value function conditional on next period's
state. NOTE: It might be possible to eliminate this method and replace
it with ConsIndShockSolver.makeEndOfPrdvFunc, but the self.X_cond
variables must be renamed.
... | Construct the end-of-period value function conditional on next period's
state. NOTE: It might be possible to eliminate this method and replace
it with ConsIndShockSolver.makeEndOfPrdvFunc, but the self.X_cond
variables must be renamed.
Parameters
----------
none
... |
def API520_W(Pset, Pback):
r'''Calculates capacity correction due to backpressure on balanced
spring-loaded PRVs in liquid service. For pilot operated valves,
this is always 1. Applicable up to 50% of the percent gauge backpressure,
For use in API 520 relief valve sizing. 1D interpolation among a table ... | r'''Calculates capacity correction due to backpressure on balanced
spring-loaded PRVs in liquid service. For pilot operated valves,
this is always 1. Applicable up to 50% of the percent gauge backpressure,
For use in API 520 relief valve sizing. 1D interpolation among a table with
53 backpressures is pe... |
def dn(self,x,M_change = 12):
"""
Downsample and filter the signal
"""
y = signal.sosfilt(self.sos,x)
y = ssd.downsample(y,M_change)
return y | Downsample and filter the signal |
def copy(self, extra=None):
"""
Creates a copy of this instance with a randomly generated uid
and some extra params. This copies the underlying bestModel,
creates a deep copy of the embedded paramMap, and
copies the embedded and extra parameters over.
It does not copy the... | Creates a copy of this instance with a randomly generated uid
and some extra params. This copies the underlying bestModel,
creates a deep copy of the embedded paramMap, and
copies the embedded and extra parameters over.
It does not copy the extra Params into the subModels.
:para... |
def get_node_at_path(query_path, context):
"""Return the SqlNode associated with the query path."""
if query_path not in context.query_path_to_node:
raise AssertionError(
u'Unable to find SqlNode for query path {} with context {}.'.format(
query_path, context))
node = con... | Return the SqlNode associated with the query path. |
def draw(canvas, mol):
"""Draw molecule structure image.
Args:
canvas: draw.drawable.Drawable
mol: model.graphmol.Compound
"""
mol.require("ScaleAndCenter")
mlb = mol.size2d[2]
if not mol.atom_count():
return
bond_type_fn = {
1: {
0: single_bond,
... | Draw molecule structure image.
Args:
canvas: draw.drawable.Drawable
mol: model.graphmol.Compound |
def append(self, filename_in_zip, file_contents):
'''
Appends a file with name filename_in_zip and contents of
file_contents to the in-memory zip.
'''
# Set the file pointer to the end of the file
self.in_memory_zip.seek(-1, io.SEEK_END)
# Get a handle to the in-... | Appends a file with name filename_in_zip and contents of
file_contents to the in-memory zip. |
def _replace_global_vars(xs, global_vars):
"""Replace globally shared names from input header with value.
The value of the `algorithm` item may be a pointer to a real
file specified in the `global` section. If found, replace with
the full value.
"""
if isinstance(xs, (list, tuple)):
ret... | Replace globally shared names from input header with value.
The value of the `algorithm` item may be a pointer to a real
file specified in the `global` section. If found, replace with
the full value. |
def sample_name(in_bam):
"""Get sample name from BAM file.
"""
with pysam.AlignmentFile(in_bam, "rb", check_sq=False) as in_pysam:
try:
if "RG" in in_pysam.header:
return in_pysam.header["RG"][0]["SM"]
except ValueError:
return None | Get sample name from BAM file. |
def get_assessment_taken_bank_assignment_session(self, proxy):
"""Gets the session for assigning taken assessments to bank mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenBankAssignmentSession) -
an ``AssessmentTakenBankAssignmentSessi... | Gets the session for assigning taken assessments to bank mappings.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.assessment.AssessmentTakenBankAssignmentSession) -
an ``AssessmentTakenBankAssignmentSession``
raise: NullArgument - ``proxy`` is ``null``
raise: ... |
def set_archive_layout(self, archive_id, layout_type, stylesheet=None):
"""
Use this method to change the layout of videos in an OpenTok archive
:param String archive_id: The ID of the archive that will be updated
:param String layout_type: The layout type for the archive. Valid values... | Use this method to change the layout of videos in an OpenTok archive
:param String archive_id: The ID of the archive that will be updated
:param String layout_type: The layout type for the archive. Valid values are:
'bestFit', 'custom', 'horizontalPresentation', 'pip' and 'verticalPresentation... |
def _handle_next_export_subtask(self, export_state=None):
"""
Process the next export sub-task, if there is one.
:param ExportState export_state:
If provided, this is used instead of the database queue, in effect directing the exporter to process the
previous export agai... | Process the next export sub-task, if there is one.
:param ExportState export_state:
If provided, this is used instead of the database queue, in effect directing the exporter to process the
previous export again. This is used to avoid having to query the database when we know already wha... |
def exec_rabbitmqctl(self, command, args=[], rabbitmqctl_opts=['-q']):
"""
Execute a ``rabbitmqctl`` command inside a running container.
:param command: the command to run
:param args: a list of args for the command
:param rabbitmqctl_opts:
a list of extra options to... | Execute a ``rabbitmqctl`` command inside a running container.
:param command: the command to run
:param args: a list of args for the command
:param rabbitmqctl_opts:
a list of extra options to pass to ``rabbitmqctl``
:returns: a tuple of the command exit code and output |
def GetChildClassId(self, classId):
"""
Method extracts and returns the child object list same as the given classId
"""
childList = []
for ch in self.child:
if ch.classId.lower() == classId.lower():
childList.append(ch)
return childList | Method extracts and returns the child object list same as the given classId |
def is_possible_number(numobj):
"""Convenience wrapper around is_possible_number_with_reason.
Instead of returning the reason for failure, this method returns true if
the number is either a possible fully-qualified number (containing the area
code and country code), or if the number could be a possible... | Convenience wrapper around is_possible_number_with_reason.
Instead of returning the reason for failure, this method returns true if
the number is either a possible fully-qualified number (containing the area
code and country code), or if the number could be a possible local number
(with a country code,... |
def set_phases(self, literals=[]):
"""
Sets polarities of a given list of variables.
"""
if self.lingeling:
pysolvers.lingeling_setphases(self.lingeling, literals) | Sets polarities of a given list of variables. |
def edit_securitygroup(self, group_id, name=None, description=None):
"""Edit security group details.
:param int group_id: The ID of the security group
:param string name: The name of the security group
:param string description: The description of the security group
"""
... | Edit security group details.
:param int group_id: The ID of the security group
:param string name: The name of the security group
:param string description: The description of the security group |
def get_block_hash(self, height, id=None, endpoint=None):
"""
Get hash of a block by its height
Args:
height: (int) height of the block to lookup
id: (int, optional) id to use for response tracking
endpoint: (RPCEndpoint, optional) endpoint to specify to use
... | Get hash of a block by its height
Args:
height: (int) height of the block to lookup
id: (int, optional) id to use for response tracking
endpoint: (RPCEndpoint, optional) endpoint to specify to use
Returns:
json object of the result or the error encountere... |
def create(self, Name, Subject, HtmlBody=None, TextBody=None, Alias=None):
"""
Creates a template.
:param Name: Name of template
:param Subject: The content to use for the Subject when this template is used to send email.
:param HtmlBody: The content to use for the HtmlBody when... | Creates a template.
:param Name: Name of template
:param Subject: The content to use for the Subject when this template is used to send email.
:param HtmlBody: The content to use for the HtmlBody when this template is used to send email.
:param TextBody: The content to use for the HtmlB... |
def start_continuous(self, aichans, update_hz=10):
"""Begins a continuous analog generation, calling a provided function
at a rate of 10Hz
:param aichans: name of channel(s) to record (analog input) from
:type aichans: list<str>
:param update_hz: Rate (Hz) at which to read data... | Begins a continuous analog generation, calling a provided function
at a rate of 10Hz
:param aichans: name of channel(s) to record (analog input) from
:type aichans: list<str>
:param update_hz: Rate (Hz) at which to read data from the device input buffer
:type update_hz: int |
def update():
'''
When we are asked to update (regular interval) lets reap the cache
'''
try:
salt.fileserver.reap_fileserver_cache_dir(
os.path.join(__opts__['cachedir'], 'roots', 'hash'),
find_file
)
except (IOError, OSError):
# Hash file won't exist... | When we are asked to update (regular interval) lets reap the cache |
def extract_name_from_job_arn(arn):
"""Returns the name used in the API given a full ARN for a training job
or hyperparameter tuning job.
"""
slash_pos = arn.find('/')
if slash_pos == -1:
raise ValueError("Cannot parse invalid ARN: %s" % arn)
return arn[(slash_pos + 1):] | Returns the name used in the API given a full ARN for a training job
or hyperparameter tuning job. |
def extract_spans(html_string):
"""
Creates a list of the spanned cell groups of [row, column] pairs.
Parameters
----------
html_string : str
Returns
-------
list of lists of lists of int
"""
try:
from bs4 import BeautifulSoup
except ImportError:
print("ERRO... | Creates a list of the spanned cell groups of [row, column] pairs.
Parameters
----------
html_string : str
Returns
-------
list of lists of lists of int |
def ensure_property_set(host=None, admin_username=None, admin_password=None, property=None, value=None):
'''
.. versionadded:: Fluorine
Ensure that property is set to specific value
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
... | .. versionadded:: Fluorine
Ensure that property is set to specific value
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
property:
The property which should be set.
value:... |
def to_bigquery_fields(self, name_case=DdlParseBase.NAME_CASE.original):
"""
Generate BigQuery JSON fields define
:param name_case: name case type
* DdlParse.NAME_CASE.original : Return to no convert
* DdlParse.NAME_CASE.lower : Return to lower
* DdlParse.NAM... | Generate BigQuery JSON fields define
:param name_case: name case type
* DdlParse.NAME_CASE.original : Return to no convert
* DdlParse.NAME_CASE.lower : Return to lower
* DdlParse.NAME_CASE.upper : Return to upper
:return: BigQuery JSON fields define |
def p_try_statement_3(self, p):
"""try_statement : TRY block catch finally"""
p[0] = ast.Try(statements=p[2], catch=p[3], fin=p[4]) | try_statement : TRY block catch finally |
def unicode_iter(val):
"""Provides an iterator over the *code points* of the given Unicode sequence.
Notes:
Before PEP-393, Python has the potential to support Unicode as UTF-16 or UTF-32.
This is reified in the property as ``sys.maxunicode``. As a result, naive iteration
of Unicode se... | Provides an iterator over the *code points* of the given Unicode sequence.
Notes:
Before PEP-393, Python has the potential to support Unicode as UTF-16 or UTF-32.
This is reified in the property as ``sys.maxunicode``. As a result, naive iteration
of Unicode sequences will render non-charac... |
def as_mpl_artists(shape_list,
properties_func=None,
text_offset=5.0, origin=1):
"""
Converts a region list to a list of patches and a list of artists.
Optional Keywords:
[ text_offset ] - If there is text associated with the regions, add
some vertical offset ... | Converts a region list to a list of patches and a list of artists.
Optional Keywords:
[ text_offset ] - If there is text associated with the regions, add
some vertical offset (in pixels) to the text so that it doesn't overlap
with the regions.
Often, the regions files implicitly assume the lower-... |
def query_all():
'''
Query all the records from TabPost2Tag.
'''
recs = TabPost2Tag.select(
TabPost2Tag,
TabTag.kind.alias('tag_kind'),
).join(
TabTag,
on=(TabPost2Tag.tag_id == TabTag.uid)
)
return recs | Query all the records from TabPost2Tag. |
def add_header(self, name, value):
"""Add an HTTP header to response object.
Arguments:
name (str): HTTP header field name
value (str): HTTP header field value
"""
if value is not None:
self._headers.append((name, value)) | Add an HTTP header to response object.
Arguments:
name (str): HTTP header field name
value (str): HTTP header field value |
def load_file(self, fname, table=None, sep="\t", bins=False, indexes=None):
"""
use some of the machinery in pandas to load a file into a table
Parameters
----------
fname : str
filename or filehandle to load
table : str
table to load the file t... | use some of the machinery in pandas to load a file into a table
Parameters
----------
fname : str
filename or filehandle to load
table : str
table to load the file to
sep : str
CSV separator
bins : bool
add a "bin" colu... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.