code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def parse_grain(grain):
""" Parse a string to a granularity, e.g. "Day" to InstantTime.day.
:param grain: a string representing a granularity.
"""
if not grain:
return InstantTime.day
if grain.lower() == 'week':
return InstantTime.week
return Inst... | Parse a string to a granularity, e.g. "Day" to InstantTime.day.
:param grain: a string representing a granularity. |
def get_prev_status_from_history(instance, status=None):
"""Returns the previous status of the object. If status is set, returns the
previous status before the object reached the status passed in.
If instance has reached the status passed in more than once, only the last
one is considered.
"""
t... | Returns the previous status of the object. If status is set, returns the
previous status before the object reached the status passed in.
If instance has reached the status passed in more than once, only the last
one is considered. |
def _X_selected(X, selected):
"""Split X into selected features and other features"""
n_features = X.shape[1]
ind = np.arange(n_features)
sel = np.zeros(n_features, dtype=bool)
sel[np.asarray(selected)] = True
non_sel = np.logical_not(sel)
n_selected = np.sum(sel)
X_sel = X[:, ind[sel]]
... | Split X into selected features and other features |
def debug(ftn, txt):
"""Used for debugging."""
if debug_p:
sys.stdout.write("{0}.{1}:{2}\n".format(modname, ftn, txt))
sys.stdout.flush() | Used for debugging. |
def pull_full_properties(self):
"""
Retrieve the full set of resource properties and cache them in this
object.
Authorization requirements:
* Object-access permission to this resource.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseErr... | Retrieve the full set of resource properties and cache them in this
object.
Authorization requirements:
* Object-access permission to this resource.
Raises:
:exc:`~zhmcclient.HTTPError`
:exc:`~zhmcclient.ParseError`
:exc:`~zhmcclient.AuthError`
... |
def run_via_binary(self, command=None, foreground=False, volumes=None,
additional_opts=None, default_options=None, name=None, *args, **kwargs):
"""
Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process
object
:param ... | Create new instance NspawnContianer in case of not running at foreground, in case foreground run, return process
object
:param command: list - command to run
:param foreground: bool - run process at foreground
:param volumes: list - put additional bind mounts
:param additional_o... |
def transformer_image_decoder(targets,
encoder_output,
ed_attention_bias,
hparams,
name=None):
"""Transformer image decoder over targets with local attention.
Args:
targets: Tensor of shape [... | Transformer image decoder over targets with local attention.
Args:
targets: Tensor of shape [batch, ...], and whose size is batch * height *
width * hparams.num_channels * hparams.hidden_size.
encoder_output: Tensor of shape [batch, length_kv, hparams.hidden_size].
ed_attention_bias: Tensor which b... |
def gep(self, ptr, indices, inbounds=False, name=''):
"""
Compute effective address (getelementptr):
name = getelementptr ptr, <indices...>
"""
instr = instructions.GEPInstr(self.block, ptr, indices,
inbounds=inbounds, name=name)
... | Compute effective address (getelementptr):
name = getelementptr ptr, <indices...> |
def get(self, *args, **kwargs):
"""
An interface for get requests that handles errors more gracefully to
prevent data loss
"""
try:
req_func = self.session.get if self.session else requests.get
req = req_func(*args, **kwargs)
req.ra... | An interface for get requests that handles errors more gracefully to
prevent data loss |
def pipe_util(func):
"""
Decorator that handles X objects and partial application for pipe-utils.
"""
@wraps(func)
def pipe_util_wrapper(function, *args, **kwargs):
if isinstance(function, XObject):
function = ~function
original_function = function
if args or kw... | Decorator that handles X objects and partial application for pipe-utils. |
def advanced_search(pattern):
"""
Parse the grammar of a pattern and build a queryset with it.
"""
query_parsed = QUERY.parseString(pattern)
return Entry.published.filter(query_parsed[0]).distinct() | Parse the grammar of a pattern and build a queryset with it. |
def fisher_by_pol(data):
"""
input: as in dolnp (list of dictionaries with 'dec' and 'inc')
description: do fisher mean after splitting data into two polarity domains.
output: three dictionaries:
'A'= polarity 'A'
'B = polarity 'B'
'ALL'= switching polarity of 'B' directions, ... | input: as in dolnp (list of dictionaries with 'dec' and 'inc')
description: do fisher mean after splitting data into two polarity domains.
output: three dictionaries:
'A'= polarity 'A'
'B = polarity 'B'
'ALL'= switching polarity of 'B' directions, and calculate fisher mean of all data... |
def indices_for_body(self, name, step=3):
'''Get a list of the indices for a specific body.
Parameters
----------
name : str
The name of the body to look up.
step : int, optional
The number of numbers for each body. Defaults to 3, should be set
... | Get a list of the indices for a specific body.
Parameters
----------
name : str
The name of the body to look up.
step : int, optional
The number of numbers for each body. Defaults to 3, should be set
to 4 for body rotation (since quaternions have 4 va... |
def mount_disks(self):
"""Mounts all disks in the parser, i.e. calling :func:`Disk.mount` on all underlying disks. You probably want to
use :func:`init` instead.
:return: whether all mounts have succeeded
:rtype: bool"""
result = True
for disk in self.disks:
... | Mounts all disks in the parser, i.e. calling :func:`Disk.mount` on all underlying disks. You probably want to
use :func:`init` instead.
:return: whether all mounts have succeeded
:rtype: bool |
def ipv6_acl_ipv6_access_list_standard_name(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ipv6_acl = ET.SubElement(config, "ipv6-acl", xmlns="urn:brocade.com:mgmt:brocade-ipv6-access-list")
ipv6 = ET.SubElement(ipv6_acl, "ipv6")
access_list = E... | Auto Generated Code |
def get_app(system_version_file: str = None,
config_file_override: str = None,
name_override: str = None,
loop: asyncio.AbstractEventLoop = None) -> web.Application:
""" Build and return the aiohttp.web.Application that runs the server
The params can be overloaded for testin... | Build and return the aiohttp.web.Application that runs the server
The params can be overloaded for testing. |
def _encrypt_password(self, password):
"""encrypt the password for given mode """
if self.encryption_mode.lower() == 'crypt':
return self._crypt_password(password)
elif self.encryption_mode.lower() == 'md5':
return self._md5_password(password)
elif self.encryption... | encrypt the password for given mode |
def patch(func=None, obj=None, name=None, avoid_doublewrap=True):
"""
Decorator for monkeypatching functions on modules and classes.
Example::
# This replaces FooClass.bar with our method
@monkeybiz.patch(FooClass)
def bar(original_bar, *args, **kwargs):
print "Patched!... | Decorator for monkeypatching functions on modules and classes.
Example::
# This replaces FooClass.bar with our method
@monkeybiz.patch(FooClass)
def bar(original_bar, *args, **kwargs):
print "Patched!"
return original_bar(*args, **kwargs)
# This replaces Fo... |
def create_region_from_border_clip(self, onerror = None):
"""Create a region of the border clip of the window, i.e. the area
that is not clipped by the parent and any sibling windows.
"""
rid = self.display.allocate_resource_id()
CreateRegionFromBorderClip(
display = self.display,
o... | Create a region of the border clip of the window, i.e. the area
that is not clipped by the parent and any sibling windows. |
def prepare_io_example_1() -> Tuple[devicetools.Nodes, devicetools.Elements]:
# noinspection PyUnresolvedReferences
"""Prepare an IO example configuration.
>>> from hydpy.core.examples import prepare_io_example_1
>>> nodes, elements = prepare_io_example_1()
(1) Prepares a short initialisation peri... | Prepare an IO example configuration.
>>> from hydpy.core.examples import prepare_io_example_1
>>> nodes, elements = prepare_io_example_1()
(1) Prepares a short initialisation period of five days:
>>> from hydpy import pub
>>> pub.timegrids
Timegrids(Timegrid('2000-01-01 00:00:00',
... |
def nb_to_html(nb_path):
"""convert notebook to html"""
exporter = html.HTMLExporter(template_file='full')
output, resources = exporter.from_filename(nb_path)
header = output.split('<head>', 1)[1].split('</head>',1)[0]
body = output.split('<body>', 1)[1].split('</body>',1)[0]
# http://imgur.com... | convert notebook to html |
def manage_job_with_blocking_dependencies(self, job_record):
""" method will trigger job processing only if:
- all dependencies are in [STATE_PROCESSED, STATE_NOOP]
NOTICE: method will transfer current job into STATE_SKIPPED if any dependency is in STATE_SKIPPED """
composite_sta... | method will trigger job processing only if:
- all dependencies are in [STATE_PROCESSED, STATE_NOOP]
NOTICE: method will transfer current job into STATE_SKIPPED if any dependency is in STATE_SKIPPED |
def count(self, query, _or=False):
"""Count word from FM-index
Params:
<str> | <Sequential> query
<bool> _or
<list <str> > ignores
Return:
<int> counts
"""
if isinstance(query, str):
return self.fm.count(query, MapIntInt... | Count word from FM-index
Params:
<str> | <Sequential> query
<bool> _or
<list <str> > ignores
Return:
<int> counts |
def accept_line(self, logevent):
"""
Return True on match.
Only match log lines containing 'is now in state' (reflects other
node's state changes) or of type "[rsMgr] replSet PRIMARY" (reflects
own state changes).
"""
if ("is now in state" in logevent.line_str an... | Return True on match.
Only match log lines containing 'is now in state' (reflects other
node's state changes) or of type "[rsMgr] replSet PRIMARY" (reflects
own state changes). |
def report_fit(self):
"""
Print a report of the fit results.
"""
if not self.fitted:
print('Model not yet fit.')
return
print('Null Log-liklihood: {0:.3f}'.format(
self.log_likelihoods['null']))
print('Log-liklihood at convergence: {0... | Print a report of the fit results. |
def calc_q0_perc_uz_v1(self):
"""Perform the upper zone layer routine which determines percolation
to the lower zone layer and the fast response of the hland model.
Note that the system behaviour of this method depends strongly on the
specifications of the options |RespArea| and |RecStep|.
Required... | Perform the upper zone layer routine which determines percolation
to the lower zone layer and the fast response of the hland model.
Note that the system behaviour of this method depends strongly on the
specifications of the options |RespArea| and |RecStep|.
Required control parameters:
|RecStep|
... |
def _extract_input_processes(self):
"""
Given user input of interested processes, it will extract the info and output a list of tuples.
- input can be multiple values, separated by space;
- either pid or process_name is optional
- e.g., "10001/python 10002/java cpp"
:return: None
"""
for... | Given user input of interested processes, it will extract the info and output a list of tuples.
- input can be multiple values, separated by space;
- either pid or process_name is optional
- e.g., "10001/python 10002/java cpp"
:return: None |
def _get_text(node, tag, default=None):
"""Get the text for the provided tag from the provided node"""
try:
return node.find(tag).text
except AttributeError:
return default | Get the text for the provided tag from the provided node |
def _set_valid_props(artist, kwargs):
"""Set valid properties for the artist, dropping the others."""
artist.set(**{k: kwargs[k] for k in kwargs if hasattr(artist, "set_" + k)})
return artist | Set valid properties for the artist, dropping the others. |
def getSpecs(self):
"""Get specs
Returns:
dict: Representation of the object
"""
content = {}
if len(self.roles) != 0:
content["roles"] = self.roles
if self.password:
content["password"] = self.passwo... | Get specs
Returns:
dict: Representation of the object |
def merge_blocks(a_blocks, b_blocks):
"""Given two lists of blocks, combine them, in the proper order.
Ensure that there are no overlaps, and that they are for sequences of the
same length.
"""
# Check sentinels for sequence length.
assert a_blocks[-1][2] == b_blocks[-1][2] == 0 # sentinel size... | Given two lists of blocks, combine them, in the proper order.
Ensure that there are no overlaps, and that they are for sequences of the
same length. |
def _filtered_data_zeroed(self):
"""
A 2D `~numpy.nddarray` cutout from the input ``filtered_data``
(or ``data`` if ``filtered_data`` is `None`) where any masked
pixels (_segment_mask, _input_mask, or _data_mask) are set to
zero. Invalid values (e.g. NaNs or infs) are set to zer... | A 2D `~numpy.nddarray` cutout from the input ``filtered_data``
(or ``data`` if ``filtered_data`` is `None`) where any masked
pixels (_segment_mask, _input_mask, or _data_mask) are set to
zero. Invalid values (e.g. NaNs or infs) are set to zero.
Units are dropped on the input ``filtered_... |
def add_integer_proxy_for(self, label: str, shape: Collection[int] = None) -> Vertex:
"""
Creates a proxy vertex for the given label and adds to the sequence item
"""
if shape is None:
return Vertex._from_java_vertex(self.unwrap().addIntegerProxyFor(_VertexLabel(label).unwrap... | Creates a proxy vertex for the given label and adds to the sequence item |
def get_comparable_values(self):
"""Return a tupple of values representing the unicity of the object
"""
return (int(self.major), int(self.minor), str(self.label), str(self.name)) | Return a tupple of values representing the unicity of the object |
def encode_events(self, duration, events, values, dtype=np.bool):
'''Encode labeled events as a time-series matrix.
Parameters
----------
duration : number
The duration of the track
events : ndarray, shape=(n,)
Time index of the events
values : ... | Encode labeled events as a time-series matrix.
Parameters
----------
duration : number
The duration of the track
events : ndarray, shape=(n,)
Time index of the events
values : ndarray, shape=(n, m)
Values array. Must have the same first ind... |
def replace(self, text=None):
"""
Replaces the selected occurrence.
:param text: The replacement text. If it is None, the lineEditReplace's
text is used instead.
:return True if the text could be replace properly, False if there is
no more occurrenc... | Replaces the selected occurrence.
:param text: The replacement text. If it is None, the lineEditReplace's
text is used instead.
:return True if the text could be replace properly, False if there is
no more occurrences to replace. |
def view_on_site(self, request, content_type_id, object_id):
"""
Redirect to an object's page based on a content-type ID and an object ID.
"""
# Look up the object, making sure it's got a get_absolute_url() function.
try:
content_type = ContentType.objects.get(pk=cont... | Redirect to an object's page based on a content-type ID and an object ID. |
def fiemap(fd):
"""
Gets a map of file extents.
"""
count = 72
fiemap_cbuf = ffi.new(
'char[]',
ffi.sizeof('struct fiemap')
+ count * ffi.sizeof('struct fiemap_extent'))
fiemap_pybuf = ffi.buffer(fiemap_cbuf)
fiemap_ptr = ffi.cast('struct fiemap*', fiemap_cbuf)
a... | Gets a map of file extents. |
def tree(path, depth=2, topdown=True, followlinks=False, showhidden=False):
"""A generator return a tuple with three elements (root, dirs, files)."""
rt = []
for root, dirs, files in os.walk(path, topdown=topdown, followlinks=followlinks):
if not showhidden and File.is_hidden(root):
cont... | A generator return a tuple with three elements (root, dirs, files). |
def crypto_sign(msg, sk):
"""Return signature+message given message and secret key.
The signature is the first SIGNATUREBYTES bytes of the return value.
A copy of msg is in the remainder."""
if len(sk) != SECRETKEYBYTES:
raise ValueError("Bad signing key length %d" % len(sk))
vkbytes = sk[PU... | Return signature+message given message and secret key.
The signature is the first SIGNATUREBYTES bytes of the return value.
A copy of msg is in the remainder. |
def usages(self):
"""Instance depends on the API version:
* 2018-03-01-preview: :class:`UsagesOperations<azure.mgmt.storage.v2018_03_01_preview.operations.UsagesOperations>`
* 2018-07-01: :class:`UsagesOperations<azure.mgmt.storage.v2018_07_01.operations.UsagesOperations>`
"""
... | Instance depends on the API version:
* 2018-03-01-preview: :class:`UsagesOperations<azure.mgmt.storage.v2018_03_01_preview.operations.UsagesOperations>`
* 2018-07-01: :class:`UsagesOperations<azure.mgmt.storage.v2018_07_01.operations.UsagesOperations>` |
def SetUseSSL(self, use_ssl):
"""Sets the use of ssl.
Args:
use_ssl (bool): enforces use of ssl.
"""
self._use_ssl = use_ssl
logger.debug('Elasticsearch use_ssl: {0!s}'.format(use_ssl)) | Sets the use of ssl.
Args:
use_ssl (bool): enforces use of ssl. |
def cycle_gan_internal(inputs, targets, _, hparams):
"""Cycle GAN, main step used for training."""
with tf.variable_scope("cycle_gan"):
# Embed inputs and targets.
inputs_orig, targets_orig = tf.to_int32(inputs), tf.to_int32(targets)
inputs = common_layers.embedding(
inputs_orig, hparams.vocab_s... | Cycle GAN, main step used for training. |
def sigma_sq(self, sample):
"""returns the value of sigma square, given the weight's sample
Parameters
----------
sample: list
sample is a (1 * NUM_OF_FUNCTIONS) matrix, representing{w1, w2, ... wk}
Returns
-------
float
the value... | returns the value of sigma square, given the weight's sample
Parameters
----------
sample: list
sample is a (1 * NUM_OF_FUNCTIONS) matrix, representing{w1, w2, ... wk}
Returns
-------
float
the value of sigma square, given the weight's sa... |
def _extendrange(self, start, end):
"""Creates list of values in a range with output delimiters.
Arguments:
start - range start
end - range end
"""
range_positions = []
for i in range(start, end):
if i != 0:
range_pos... | Creates list of values in a range with output delimiters.
Arguments:
start - range start
end - range end |
def db_for_read(self, model, **hints):
"""
If the app has its own database, use it for reads
"""
if model._meta.app_label in self._apps:
return getattr(model, '_db_alias', model._meta.app_label)
return None | If the app has its own database, use it for reads |
def parse_bismark_report(self, report, regexes):
""" Search a bismark report with a set of regexes """
parsed_data = {}
for k, r in regexes.items():
r_search = re.search(r, report, re.MULTILINE)
if r_search:
try:
parsed_data[k] = float(... | Search a bismark report with a set of regexes |
def syslog_generate(str_processName, str_pid):
'''
Returns a string similar to:
Tue Oct 9 10:49:53 2012 pretoria message.py[26873]:
where 'pretoria' is the hostname, 'message.py' is the current process
name and 26873 is the current process id.
'''
localtime = time.asct... | Returns a string similar to:
Tue Oct 9 10:49:53 2012 pretoria message.py[26873]:
where 'pretoria' is the hostname, 'message.py' is the current process
name and 26873 is the current process id. |
def slug_field_data(field, **kwargs):
"""
Return random value for SlugField
>>> result = any_form_field(forms.SlugField())
>>> type(result)
<type 'str'>
>>> from django.core.validators import slug_re
>>> import re
>>> re.match(slug_re, result) is not None
True
"""
min_le... | Return random value for SlugField
>>> result = any_form_field(forms.SlugField())
>>> type(result)
<type 'str'>
>>> from django.core.validators import slug_re
>>> import re
>>> re.match(slug_re, result) is not None
True |
def get_connection(cls):
"""Return connection object.
:rtype: :class:`cloud_browser.cloud.base.CloudConnection`
"""
if cls.__connection_obj is None:
if cls.__connection_fn is None:
_, cls.__connection_fn = cls.from_settings()
cls.__connection_obj ... | Return connection object.
:rtype: :class:`cloud_browser.cloud.base.CloudConnection` |
def make_sh_output(value, output_script, witness=False):
'''
int, str -> TxOut
'''
return _make_output(
value=utils.i2le_padded(value, 8),
output_script=make_sh_output_script(output_script, witness)) | int, str -> TxOut |
def create_kubernetes_role(self, name, bound_service_account_names, bound_service_account_namespaces, ttl="",
max_ttl="", period="", policies=None, mount_point='kubernetes'):
"""POST /auth/<mount_point>/role/:name
:param name: Name of the role.
:type name: str.
... | POST /auth/<mount_point>/role/:name
:param name: Name of the role.
:type name: str.
:param bound_service_account_names: List of service account names able to access this role. If set to "*" all
names are allowed, both this and bound_service_account_namespaces can not be "*".
... |
def loop_template_list(loop_positions, instance, instance_type,
default_template, registry):
"""
Build a list of templates from a position within a loop
and a registry of templates.
"""
templates = []
local_loop_position = loop_positions[1]
global_loop_position = loop_... | Build a list of templates from a position within a loop
and a registry of templates. |
def needs_quotes( s ):
"""Checks whether a string is a dot language ID.
It will check whether the string is solely composed
by the characters allowed in an ID or not.
If the string is one of the reserved keywords it will
need quotes too but the user will need to add them
manually.
"""
... | Checks whether a string is a dot language ID.
It will check whether the string is solely composed
by the characters allowed in an ID or not.
If the string is one of the reserved keywords it will
need quotes too but the user will need to add them
manually. |
def astimezone(self, tz):
"""
Return a :py:class:`khayyam.JalaliDatetime` object with new :py:meth:`khayyam.JalaliDatetime.tzinfo` attribute
tz, adjusting the date and time data so the result is the same UTC time as self, but in *tz*‘s local time.
*tz* must be an instance of a :py:class... | Return a :py:class:`khayyam.JalaliDatetime` object with new :py:meth:`khayyam.JalaliDatetime.tzinfo` attribute
tz, adjusting the date and time data so the result is the same UTC time as self, but in *tz*‘s local time.
*tz* must be an instance of a :py:class:`datetime.tzinfo` subclass, and
its :... |
def p_duration_number_duration_unit(self, p):
'duration : NUMBER DURATION_UNIT'
logger.debug('duration = number %s, duration unit %s', p[1], p[2])
p[0] = Duration.from_quantity_unit(p[1], p[2]) | duration : NUMBER DURATION_UNIT |
def entitlement(self, token):
"""
Client applications can use a specific endpoint to obtain a special
security token called a requesting party token (RPT). This token
consists of all the entitlements (or permissions) for a user as a
result of the evaluation of the permissions and... | Client applications can use a specific endpoint to obtain a special
security token called a requesting party token (RPT). This token
consists of all the entitlements (or permissions) for a user as a
result of the evaluation of the permissions and authorization policies
associated with th... |
def ensure_unique_obs_ids_in_wide_data(obs_id_col, wide_data):
"""
Ensures that there is one observation per row in wide_data. Raises a
helpful ValueError if otherwise.
Parameters
----------
obs_id_col : str.
Denotes the column in `wide_data` that contains the observation ID
val... | Ensures that there is one observation per row in wide_data. Raises a
helpful ValueError if otherwise.
Parameters
----------
obs_id_col : str.
Denotes the column in `wide_data` that contains the observation ID
values for each row.
wide_data : pandas dataframe.
Contains one ro... |
def hacking_has_license(physical_line, filename, lines, line_number):
"""Check for Apache 2.0 license.
H102 license header not found
"""
# don't work about init files for now
# TODO(sdague): enforce license in init file if it's not empty of content
license_found = False
# skip files that a... | Check for Apache 2.0 license.
H102 license header not found |
def protein_sequences_generator_to_dataframe(variant_and_protein_sequences_generator):
"""
Given a generator which yields (Variant, [ProteinSequence]) elements,
returns a pandas.DataFrame
"""
return dataframe_from_generator(
element_class=ProteinSequence,
variant_and_elements_generat... | Given a generator which yields (Variant, [ProteinSequence]) elements,
returns a pandas.DataFrame |
def _read_http_window_update(self, size, kind, flag):
"""Read HTTP/2 WINDOW_UPDATE frames.
Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]:
+-----------------------------------------------+
| Length (24) |
+---------------+-------... | Read HTTP/2 WINDOW_UPDATE frames.
Structure of HTTP/2 WINDOW_UPDATE frame [RFC 7540]:
+-----------------------------------------------+
| Length (24) |
+---------------+---------------+---------------+
| Type (8) | Flags (... |
def open(self):
"""
Called on new websocket connection.
"""
sess_id = self._get_sess_id()
if sess_id:
self.application.pc.websockets[self._get_sess_id()] = self
self.write_message(json.dumps({"cmd": "status", "status": "open"}))
else:
s... | Called on new websocket connection. |
def _classify_section(cls, section):
"""Attempt to find the canonical name of this section."""
name = section.lower()
if name in frozenset(['args', 'arguments', "params", "parameters"]):
return cls.ARGS_SECTION
if name in frozenset(['returns', 'return']):
retur... | Attempt to find the canonical name of this section. |
def show(self):
"""Show the structure of self.rules_list, only for debug."""
for rule in self.rules_list:
result = ", ".join([str(check) for check, deny in rule])
print(result) | Show the structure of self.rules_list, only for debug. |
def json(self):
"""
Return response body deserialized into JSON object.
"""
if six.PY3:
return json.loads(self.body.decode(self.charset))
else:
return json.loads(self.body) | Return response body deserialized into JSON object. |
def fetch_from_sdr(folder=data_folder, data='test'):
"""
Download MRS data from SDR
Parameters
----------
folder : str
Full path to a location in which to place the data. Per default this
will be a directory under the user's home `.mrs_data`.
data : str
Which data to downloa... | Download MRS data from SDR
Parameters
----------
folder : str
Full path to a location in which to place the data. Per default this
will be a directory under the user's home `.mrs_data`.
data : str
Which data to download. Either 'test', which is data required for
testing, or '... |
def _fetchone(self, query, vars):
"""
Return none or one row.
"""
cursor = self.get_db().cursor()
self._log(cursor, query, vars)
cursor.execute(query, vars)
return cursor.fetchone() | Return none or one row. |
def load_translations(directory: str, encoding: str = None) -> None:
"""Loads translations from CSV files in a directory.
Translations are strings with optional Python-style named placeholders
(e.g., ``My name is %(name)s``) and their associated translations.
The directory should have translation file... | Loads translations from CSV files in a directory.
Translations are strings with optional Python-style named placeholders
(e.g., ``My name is %(name)s``) and their associated translations.
The directory should have translation files of the form ``LOCALE.csv``,
e.g. ``es_GT.csv``. The CSV files should h... |
def EventsNotificationsPost(self, parameters):
"""
Create an event-notification in CommonSense.
If EvensNotificationsPost was successful the result, including the event_notification_id can be obtained from getResponse(), and should be a json string.
@param p... | Create an event-notification in CommonSense.
If EvensNotificationsPost was successful the result, including the event_notification_id can be obtained from getResponse(), and should be a json string.
@param parameters (dictionary) - Parameters according to which to create the even... |
def asscalar(a):
""" https://github.com/numpy/numpy/issues/4701 """
# Do we want to check that the value is numeric?
#if isinstance(value, (int, long, float)): return value
try:
return np.asscalar(a)
except AttributeError as e:
return np.asscalar(np.asarray(a)) | https://github.com/numpy/numpy/issues/4701 |
def _level_coords(self):
"""Return a mapping of all MultiIndex levels and their corresponding
coordinate name.
"""
level_coords = OrderedDict()
for cname, var in self._coords.items():
if var.ndim == 1 and isinstance(var, IndexVariable):
level_names = v... | Return a mapping of all MultiIndex levels and their corresponding
coordinate name. |
def emitRemoved( self ):
"""
Emits the removed signal, provided the dispatcher's signals \
are not currently blocked.
:return <bool> emitted
"""
# check the signals blocked
if ( self.signalsBlocked() ):
return False
# emit... | Emits the removed signal, provided the dispatcher's signals \
are not currently blocked.
:return <bool> emitted |
def getResetsIndices(networkDataFile):
"""Returns the indices at which the data sequences reset."""
try:
with open(networkDataFile) as f:
reader = csv.reader(f)
next(reader, None)
next(reader, None)
resetIdx = next(reader).index("R")
resets = []
for i, line... | Returns the indices at which the data sequences reset. |
def _set_set_overload_bit(self, v, load=False):
"""
Setter method for set_overload_bit, mapped from YANG variable /routing_system/router/isis/router_isis_cmds_holder/router_isis_attributes/set_overload_bit (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_set_o... | Setter method for set_overload_bit, mapped from YANG variable /routing_system/router/isis/router_isis_cmds_holder/router_isis_attributes/set_overload_bit (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_set_overload_bit is considered as a private
method. Backends ... |
def post_registration_redirect(self, request, user):
"""
After registration, redirect to the home page or supplied "next"
query string or hidden field value.
"""
next_url = "/registration/register/complete/"
if "next" in request.GET or "next" in request.POST:
... | After registration, redirect to the home page or supplied "next"
query string or hidden field value. |
def background(self):
"""Only a getter on purpose. See the tests."""
if self._background is None:
self._background = GSBackgroundLayer()
self._background._foreground = self
return self._background | Only a getter on purpose. See the tests. |
def _get_hashing_context(self, app: FlaskUnchained) -> CryptContext:
"""
Get the token hashing (and verifying) context.
"""
return CryptContext(schemes=app.config.SECURITY_HASHING_SCHEMES,
deprecated=app.config.SECURITY_DEPRECATED_HASHING_SCHEMES) | Get the token hashing (and verifying) context. |
def evalMetric(self, x, method=None):
'''Evaluates the density matching metric at a given design point.
:param iterable x: values of the design variables, this is passed as
the first argument to the function fqoi
:return: metric_value - value of the metric evaluated at the design
... | Evaluates the density matching metric at a given design point.
:param iterable x: values of the design variables, this is passed as
the first argument to the function fqoi
:return: metric_value - value of the metric evaluated at the design
point given by x
:rtype: floa... |
def Bernoulli(p, tag=None):
"""
A Bernoulli random variate
Parameters
----------
p : scalar
The probability of success
"""
assert (
0 < p < 1
), 'Bernoulli probability "p" must be between zero and one, non-inclusive'
return uv(ss.bernoulli(p), tag=tag) | A Bernoulli random variate
Parameters
----------
p : scalar
The probability of success |
def convert_to_codec_key(value):
"""
Normalize code key value (encoding codecs must be lower case and must
not contain any dashes).
:param value: value to convert.
"""
if not value:
# fallback to utf-8
value = 'UTF-8'
# UTF-8 -> utf_8
converted = value.replace('-', '_').... | Normalize code key value (encoding codecs must be lower case and must
not contain any dashes).
:param value: value to convert. |
def log_prob(self, response, predicted_linear_response, name=None):
"""Computes `D(param=mean(r)).log_prob(response)` for linear response, `r`.
Args:
response: `float`-like `Tensor` representing observed ("actual")
responses.
predicted_linear_response: `float`-like `Tensor` corresponding to... | Computes `D(param=mean(r)).log_prob(response)` for linear response, `r`.
Args:
response: `float`-like `Tensor` representing observed ("actual")
responses.
predicted_linear_response: `float`-like `Tensor` corresponding to
`tf.matmul(model_matrix, weights)`.
name: Python `str` used ... |
def getDescendant(Ancestor, RouteParts):
r"""Resolves a descendant, of the given Ancestor, as pointed by the RouteParts.
"""
if not RouteParts:
return Ancestor
Resolved = Ancestor.Members.get(RouteParts.pop(0))
if isinstance(Resolved, Group):
return getDescendant(Resolved, RouteParts)
... | r"""Resolves a descendant, of the given Ancestor, as pointed by the RouteParts. |
def gen_compliance_xdr(self):
"""Create an XDR object representing this builder's transaction to be
sent over via the Compliance protocol (notably, with a sequence number
of 0).
Intentionally, the XDR object is returned without any signatures on the
transaction.
See `St... | Create an XDR object representing this builder's transaction to be
sent over via the Compliance protocol (notably, with a sequence number
of 0).
Intentionally, the XDR object is returned without any signatures on the
transaction.
See `Stellar's documentation on its Compliance P... |
def slurp(path, encoding='UTF-8'):
"""
Reads file `path` and returns the entire contents as a unicode string
By default assumes the file is encoded as UTF-8
Parameters
----------
path : str
File path to file on disk
encoding : str, default `UTF-8`, optional
Encoding of the ... | Reads file `path` and returns the entire contents as a unicode string
By default assumes the file is encoded as UTF-8
Parameters
----------
path : str
File path to file on disk
encoding : str, default `UTF-8`, optional
Encoding of the file
Returns
-------
The txt read... |
def indices(self, names, axis=None):
"""get the row and col indices of names. If axis is None, two ndarrays
are returned, corresponding the indices of names for each axis
Parameters
----------
names : iterable
column and/or row names
axis : (int) (opt... | get the row and col indices of names. If axis is None, two ndarrays
are returned, corresponding the indices of names for each axis
Parameters
----------
names : iterable
column and/or row names
axis : (int) (optional)
the axis to search.
... |
def generate_rrab_lightcurve(
times,
mags=None,
errs=None,
paramdists={
'period':sps.uniform(loc=0.45,scale=0.35),
'fourierorder':[8,11],
'amplitude':sps.uniform(loc=0.4,scale=0.5),
'phioffset':np.pi,
},
magsarefluxes=False
... | This generates fake RRab light curves.
Parameters
----------
times : np.array
This is an array of time values that will be used as the time base.
mags,errs : np.array
These arrays will have the model added to them. If either is
None, `np.full_like(times, 0.0)` will used as a s... |
def GetService(self, service_name, version=sorted(_SERVICE_MAP.keys())[-1],
server=None):
"""Creates a service client for the given service.
Args:
service_name: A string identifying which Ad Manager service to create a
service client for.
[optional]
version: A strin... | Creates a service client for the given service.
Args:
service_name: A string identifying which Ad Manager service to create a
service client for.
[optional]
version: A string identifying the Ad Manager version to connect to. This
defaults to what is currently the latest versio... |
def _events_process(event_types=None, eager=False):
"""Process stats events."""
event_types = event_types or list(current_stats.enabled_events)
if eager:
process_events.apply((event_types,), throw=True)
click.secho('Events processed successfully.', fg='green')
else:
process_event... | Process stats events. |
def url_signature(url: str) -> Optional[Tuple]:
"""
Return an identify signature for url
:param url: item to get signature for
:return: tuple containing last modified, length and, if present, etag
"""
request = urllib.request.Request(url)
request.get_method = lambda: 'HEAD'
response = No... | Return an identify signature for url
:param url: item to get signature for
:return: tuple containing last modified, length and, if present, etag |
def list_motors(name_pattern=Motor.SYSTEM_DEVICE_NAME_CONVENTION, **kwargs):
"""
This is a generator function that enumerates all tacho motors that match
the provided arguments.
Parameters:
name_pattern: pattern that device name should match.
For example, 'motor*'. Default value: '*... | This is a generator function that enumerates all tacho motors that match
the provided arguments.
Parameters:
name_pattern: pattern that device name should match.
For example, 'motor*'. Default value: '*'.
keyword arguments: used for matching the corresponding device
attr... |
def _flatten_projection(cls, projection):
"""
Flatten a structured projection (structure projections support for
projections of (to be) dereferenced fields.
"""
# If `projection` is empty return a full projection based on `_fields`
if not projection:
return {... | Flatten a structured projection (structure projections support for
projections of (to be) dereferenced fields. |
def ExtractEvents(self, parser_mediator, registry_key, **kwargs):
"""Extracts events from a Terminal Server Client Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.Wi... | Extracts events from a Terminal Server Client Windows Registry key.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
registry_key (dfwinreg.WinRegistryKey): Windows Registry key. |
def signature(self, node, frame, extra_kwargs=None):
"""Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occour. The extra keyword arguments shou... | Writes a function call to the stream for the current node.
A leading comma is added automatically. The extra keyword
arguments may not include python keywords otherwise a syntax
error could occour. The extra keyword arguments should be given
as python dict. |
def cleanupContainers(self):
"""
Cleans up all containers to the right of the current one.
"""
for i in range(self.count() - 1, self.currentIndex(), -1):
widget = self.widget(i)
widget.close()
widget.setParent(None)
widget.deleteLat... | Cleans up all containers to the right of the current one. |
def make_multi_entry(plist, pkg_pyvers, ver_dict):
"""Generate Python interpreter version entries."""
for pyver in pkg_pyvers:
pver = pyver[2] + "." + pyver[3:]
plist.append("Python {0}: {1}".format(pver, ops_to_words(ver_dict[pyver]))) | Generate Python interpreter version entries. |
def tag(self, version='bump', message=''):
""" tag and commit
"""
self.clone_from_github()
self.github_repo.tag(version, message=message) | tag and commit |
def _update_with_like_args(ctx, _, value):
"""Update arguments with options taken from a currently running VS."""
if value is None:
return
env = ctx.ensure_object(environment.Environment)
vsi = SoftLayer.VSManager(env.client)
vs_id = helpers.resolve_id(vsi.resolve_ids, value, 'VS')
like... | Update arguments with options taken from a currently running VS. |
def transpose(self, *axes):
"""Permute the dimensions of a Timeseries."""
if self.ndim <= 1:
return self
ar = np.asarray(self).transpose(*axes)
if axes[0] != 0:
# then axis 0 is unaffected by the transposition
newlabels = [self.labels[ax] for ax in axe... | Permute the dimensions of a Timeseries. |
def findNestedEnums(self, lst):
'''
Recursive helper function for finding nested enums. If this node is a class or
struct it may have had an enum added to its child list. When this occurred, the
enum was removed from ``self.enums`` in the :class:`~exhale.graph.ExhaleRoot`
class... | Recursive helper function for finding nested enums. If this node is a class or
struct it may have had an enum added to its child list. When this occurred, the
enum was removed from ``self.enums`` in the :class:`~exhale.graph.ExhaleRoot`
class and needs to be rediscovered by calling this method... |
def _line_parse(line):
"""Removes line ending characters and returns a tuple (`stripped_line`,
`is_terminated`).
"""
if line[-2:] in ['\r\n', b'\r\n']:
return line[:-2], True
elif line[-1:] in ['\r', '\n', b'\r', b'\n']:
return line[:-1], True
return line, False | Removes line ending characters and returns a tuple (`stripped_line`,
`is_terminated`). |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.