code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def execute_remote(self, remote_target, cmd, **kwargs):
"""
Executes the given command (with the given arguments)
on the given remote target of the connected machine
"""
data = self._build_command(cmd, kwargs, self._contexts[-1],
remote_target)
... | Executes the given command (with the given arguments)
on the given remote target of the connected machine |
def _starfeatures_worker(task):
'''
This wraps starfeatures.
'''
try:
(lcfile, outdir, kdtree, objlist,
lcflist, neighbor_radius_arcsec,
deredden, custom_bandpasses, lcformat, lcformatdir) = task
return get_starfeatures(lcfile, outdir,
... | This wraps starfeatures. |
def visible(self):
"""
Read/write. |True| if axis is visible, |False| otherwise.
"""
delete = self._element.delete_
if delete is None:
return False
return False if delete.val else True | Read/write. |True| if axis is visible, |False| otherwise. |
async def iterUnivRows(self, prop):
'''
Iterate (buid, valu) rows for the given universal prop
'''
penc = prop.encode()
pref = penc + b'\x00'
for _, pval in self.layrslab.scanByPref(pref, db=self.byuniv):
buid = s_msgpack.un(pval)[0]
byts = self.... | Iterate (buid, valu) rows for the given universal prop |
def get_subsites(self):
""" Returns a list of subsites defined for this site
:rtype: list[Site]
"""
url = self.build_url(
self._endpoints.get('get_subsites').format(id=self.object_id))
response = self.con.get(url)
if not response:
return []
... | Returns a list of subsites defined for this site
:rtype: list[Site] |
def get_reservations_for_booking_ids(self, booking_ids):
"""Gets booking information for a given list of booking ids.
:param booking_ids: a booking id or a list of room ids (comma separated).
:type booking_ids: string
"""
try:
resp = self._request("GET", "/1.1/space/... | Gets booking information for a given list of booking ids.
:param booking_ids: a booking id or a list of room ids (comma separated).
:type booking_ids: string |
def derivative(xdata, ydata):
"""
performs d(ydata)/d(xdata) with nearest-neighbor slopes
must be well-ordered, returns new arrays [xdata, dydx_data]
neighbors:
"""
D_ydata = []
D_xdata = []
for n in range(1, len(xdata)-1):
D_xdata.append(xdata[n])
D_ydata.append((ydata[... | performs d(ydata)/d(xdata) with nearest-neighbor slopes
must be well-ordered, returns new arrays [xdata, dydx_data]
neighbors: |
def element_count(self):
"""Retrieve the number of elements in this type.
Returns an int.
If the Type is not an array or vector, this raises.
"""
result = conf.lib.clang_getNumElements(self)
if result < 0:
raise Exception('Type does not have elements.')
... | Retrieve the number of elements in this type.
Returns an int.
If the Type is not an array or vector, this raises. |
def _model_abilities_two_components(self,beta):
""" Creates the structure of the model - store abilities
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
... | Creates the structure of the model - store abilities
Parameters
----------
beta : np.array
Contains untransformed starting values for latent variables
Returns
----------
theta : np.array
Contains the predicted values for the time series
... |
def calc_directional_aop(self, report, parameter, parameter_dir):
"""
Will calcuate the directional AOP (only sub-surface rrs for now) if the direction is defined using @
e.g. rrs@32.0:45 where <zenith-theta>:<azimuth-phi>
:param report: The planarrad report dictionary. should include... | Will calcuate the directional AOP (only sub-surface rrs for now) if the direction is defined using @
e.g. rrs@32.0:45 where <zenith-theta>:<azimuth-phi>
:param report: The planarrad report dictionary. should include the quadtables and the directional info
:param parameter: parameter to calc. ... |
def check_native_jsonfield_postgres_engine(app_configs=None, **kwargs):
"""
Check that the DJSTRIPE_USE_NATIVE_JSONFIELD isn't set unless Postgres is in use.
"""
from . import settings as djstripe_settings
messages = []
error_msg = "DJSTRIPE_USE_NATIVE_JSONFIELD is not compatible with engine {engine} for databas... | Check that the DJSTRIPE_USE_NATIVE_JSONFIELD isn't set unless Postgres is in use. |
def _set_precision(self, precision):
'''
function that sets precision to an (hopfully) reasonable guess based
on the length of the sequence if not explicitly set
'''
# if precision is explicitly specified, use it.
if self.one_mutation:
self.min_width = 10*sel... | function that sets precision to an (hopfully) reasonable guess based
on the length of the sequence if not explicitly set |
def error_handler(self, e, request, meth, em_format):
"""
Override this method to add handling of errors customized for your
needs
"""
if isinstance(e, FormValidationError):
return self.form_validation_response(e)
elif isinstance(e, TypeError):
r... | Override this method to add handling of errors customized for your
needs |
def create_package_file(root, master_package, subroot, py_files, opts, subs, is_namespace):
# type: (unicode, unicode, unicode, List[unicode], Any, List[unicode], bool) -> None
"""Build the text of the file and write the file."""
use_templates = False
fullname = makename(master_package, subroot)
i... | Build the text of the file and write the file. |
def executed_block_set(trace):
"""
Given an execution trace, returns a python set object containing the names of each block for which the user code
was executed. Block names can be set via set_debug_name().
"""
executed_set = set()
for entry in trace:
if entry[0] == 'execute':
... | Given an execution trace, returns a python set object containing the names of each block for which the user code
was executed. Block names can be set via set_debug_name(). |
def find_line_containing(strings: Sequence[str], contents: str) -> int:
"""
Finds the index of the line in ``strings`` that contains ``contents``,
or ``-1`` if none is found.
"""
for i in range(len(strings)):
if strings[i].find(contents) != -1:
return i
return -1 | Finds the index of the line in ``strings`` that contains ``contents``,
or ``-1`` if none is found. |
def trim_ordered_range_list(ranges,start,finish):
"""A function to help with slicing a mapping
Start with a list of ranges and get another list of ranges constrained by start (0-indexed) and finish (1-indexed)
:param ranges: ordered non-overlapping ranges on the same chromosome
:param start: start 0-i... | A function to help with slicing a mapping
Start with a list of ranges and get another list of ranges constrained by start (0-indexed) and finish (1-indexed)
:param ranges: ordered non-overlapping ranges on the same chromosome
:param start: start 0-indexed
:param finish: ending 1-indexed
:type ... |
def run(self):
"""Wrap _run method."""
# Catch all possible exceptions raised by the running thread
# and let parent process know about it.
try:
self._run()
except Exception: # pylint: disable=broad-except
self.action.put(
ServiceCheckDied... | Wrap _run method. |
def fix_config(self, options):
"""
Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict
"""
options = super(Com... | Fixes the options, if necessary. I.e., it adds all required elements to the dictionary.
:param options: the options to fix
:type options: dict
:return: the (potentially) fixed options
:rtype: dict |
def external(name, value, dtype=tf.sg_floatx, summary=True, regularizer=None, trainable=True):
r"""Creates a tensor variable of which initial values are `value`.
For example,
```
external("external", [3,3,1,2])
=> [3. 3. 1. 2.]
```
Args:
name: The name of new variable.
... | r"""Creates a tensor variable of which initial values are `value`.
For example,
```
external("external", [3,3,1,2])
=> [3. 3. 1. 2.]
```
Args:
name: The name of new variable.
value: A constant value (or list) of output type `dtype`.
dtype: The type of the element... |
def write_points(self, data, time_precision='s', *args, **kwargs):
"""Write to multiple time series names.
An example data blob is:
data = [
{
"points": [
[
12
]
],
"name... | Write to multiple time series names.
An example data blob is:
data = [
{
"points": [
[
12
]
],
"name": "cpu_load_short",
"columns": [
"value"
... |
def _init_virtual_io(self, file):
"""Initialize callback functions for sf_open_virtual()."""
@_ffi.callback("sf_vio_get_filelen")
def vio_get_filelen(user_data):
curr = file.tell()
file.seek(0, SEEK_END)
size = file.tell()
file.seek(curr, SEEK_SET)... | Initialize callback functions for sf_open_virtual(). |
def fit_interval_censoring(
self,
df,
lower_bound_col,
upper_bound_col,
event_col=None,
ancillary_df=None,
show_progress=False,
timeline=None,
weights_col=None,
robust=False,
initial_point=None,
entry_col=None,
):
... | Fit the accelerated failure time model to a left-censored dataset.
Parameters
----------
df: DataFrame
a Pandas DataFrame with necessary columns ``lower_bound_col``, ``upper_bound_col`` (see below),
and any other covariates or weights.
lower_bound_col: string
... |
def divideHosts(self, hosts, qty):
"""Divide processes among hosts."""
maximumWorkers = sum(host[1] for host in hosts)
# If specified amount of workers is greater than sum of each specified.
if qty > maximumWorkers:
index = 0
while qty > maximumWorkers:
... | Divide processes among hosts. |
def collect_variables(self, selections) -> None:
"""Apply method |ExchangeItem.collect_variables| of the base class
|ExchangeItem| and determine the `ndim` attribute of the current
|ChangeItem| object afterwards.
The value of `ndim` depends on whether the values of the target
va... | Apply method |ExchangeItem.collect_variables| of the base class
|ExchangeItem| and determine the `ndim` attribute of the current
|ChangeItem| object afterwards.
The value of `ndim` depends on whether the values of the target
variable or its time series is of interest:
>>> from ... |
def gt(self, v, limit=None, offset=None):
"""Returns the list of the members of the set that have scores
greater than v.
"""
if limit is not None and offset is None:
offset = 0
return self.zrangebyscore("(%f" % v, self._max_score,
start=offset, num=lim... | Returns the list of the members of the set that have scores
greater than v. |
def makeReadPacket(ID, reg, values=None):
"""
Creates a packet that reads the register(s) of servo ID at location reg. Make
sure the values are in little endian (use Packet.le() if necessary) for 16 b
(word size) values.
"""
pkt = makePacket(ID, xl320.XL320_READ, reg, values)
return pkt | Creates a packet that reads the register(s) of servo ID at location reg. Make
sure the values are in little endian (use Packet.le() if necessary) for 16 b
(word size) values. |
def get_column_def(self):
"""
Returns a column definition for CQL table definition
"""
static = "static" if self.static else ""
db_type = self.db_type.format(self.value_type.db_type)
return '{} {} {}'.format(self.cql, db_type, static) | Returns a column definition for CQL table definition |
def GET_account_balance(self, path_info, account_addr, token_type):
"""
Get the balance of a particular token
Returns {'balance': ...}
"""
if not check_account_address(account_addr):
return self._reply_json({'error': 'Invalid address'}, status_code=400)
if no... | Get the balance of a particular token
Returns {'balance': ...} |
def attributs(self):
"""
The user attributes, defined as the fields on the :attr:`user` object.
:return: a :class:`dict` with the :attr:`user` object fields. Attributes may be
If the user do not exists, the returned :class:`dict` is empty.
:rtype: dict
... | The user attributes, defined as the fields on the :attr:`user` object.
:return: a :class:`dict` with the :attr:`user` object fields. Attributes may be
If the user do not exists, the returned :class:`dict` is empty.
:rtype: dict |
def stop_app(self):
"""Overrides superclass."""
try:
if self._conn:
# Be polite; let the dest know we're shutting down.
try:
self.closeSl4aSession()
except:
self.log.exception('Failed to gracefully shut d... | Overrides superclass. |
def check_trademark_symbol(text):
"""Use the trademark symbol instead of (TM)."""
err = "typography.symbols.trademark"
msg = u"(TM) is a goofy alphabetic approximation, use the symbol ™."
regex = "\(TM\)"
return existence_check(
text, [regex], err, msg, max_errors=3, require_padding=False) | Use the trademark symbol instead of (TM). |
def qteKeyPress(self, msgObj):
"""
Record the key presses reported by the key handler.
"""
# Unpack the data structure.
(srcObj, keysequence, macroName) = msgObj.data
key = keysequence.toQKeyEventList()[-1]
# If the current key did not complete a macro ignore it.... | Record the key presses reported by the key handler. |
def instance():
"""Return an PyVabamorf instance.
It returns the previously initialized instance or creates a new
one if nothing exists. Also creates new instance in case the
process has been forked.
"""
if not hasattr(Vabamorf, 'pid') or Vabamorf.pid != os.getpid():
... | Return an PyVabamorf instance.
It returns the previously initialized instance or creates a new
one if nothing exists. Also creates new instance in case the
process has been forked. |
def img(url, alt='', classes='', style=''):
'''
Image tag helper.
'''
if not url.startswith('http://') and not url[:1] == '/':
#add media_url for relative paths
url = settings.STATIC_URL + url
attr = {
'class': classes,
'alt': alt,
'style': style,
's... | Image tag helper. |
def get_payload(self):
"""Return Payload."""
ret = self._software_version
ret += bytes([self.hardware_version, self.product_group, self.product_type])
return ret | Return Payload. |
def deleteFile(self, CorpNum, MgtKeyType, MgtKey, FileID, UserID=None):
""" 첨부파일 삭제
args
CorpNum : 회원 사업자 번호
MgtKeyType : 관리번호 유형 one of ['SELL','BUY','TRUSTEE']
MgtKey : 파트너 관리번호
UserID : 팝빌 회원아이디
return
처리결... | 첨부파일 삭제
args
CorpNum : 회원 사업자 번호
MgtKeyType : 관리번호 유형 one of ['SELL','BUY','TRUSTEE']
MgtKey : 파트너 관리번호
UserID : 팝빌 회원아이디
return
처리결과. consist of code and message
raise
PopbillException |
def to_fastq_str(self):
"""
:return: string representation of this NGS read in FastQ format
"""
return "@" + self.name + "\n" + self.sequenceData +\
"\n" + "+" + self.name + "\n" + self.seq_qual | :return: string representation of this NGS read in FastQ format |
def analyze(self, input_directory, output_directory, **kwargs):
"""
Run all the analysis saved in self._analyses, sorted by test_id.
This is useful when Naarad() is used by other programs and multiple analyses are run
In naarad CLI mode, len(_analyses) == 1
:param: input_directory: location of log f... | Run all the analysis saved in self._analyses, sorted by test_id.
This is useful when Naarad() is used by other programs and multiple analyses are run
In naarad CLI mode, len(_analyses) == 1
:param: input_directory: location of log files
:param: output_directory: root directory for analysis output
:p... |
def _get_chain_parent_symbol(self, symbol, fullsymbol):
"""Gets the code element object for the parent of the specified
symbol in the fullsymbol chain."""
#We are only interested in the type of the variable immediately preceding our symbol
#in the chain so we can list its members.
... | Gets the code element object for the parent of the specified
symbol in the fullsymbol chain. |
def format_names(raw):
"""Format a string representing the names contained in the files.
"""
if raw:
raw = [
'{}:\n{}'.format(
header.lower(), ' '.join(func[0] for func in funcs)
)
for header, funcs in raw
]
return '\n'.join(raw)
... | Format a string representing the names contained in the files. |
def __reg_query_value(handle, value_name):
'''
Calls RegQueryValueEx
If PY2 ensure unicode string and expand REG_EXPAND_SZ before returning
Remember to catch not found exceptions when calling.
Args:
handle (object): open registry handle.
value_name (str)... | Calls RegQueryValueEx
If PY2 ensure unicode string and expand REG_EXPAND_SZ before returning
Remember to catch not found exceptions when calling.
Args:
handle (object): open registry handle.
value_name (str): Name of the value you wished returned
Returns:
... |
def _serialize_dict(cls, dict_):
"""
:type dict_ dict
:rtype: dict
"""
obj_serialized = {}
for key in dict_.keys():
item_serialized = cls.serialize(dict_[key])
if item_serialized is not None:
key = key.rstrip(cls._SUFFIX_KEY_OVE... | :type dict_ dict
:rtype: dict |
def create_model(schema, collection, class_name=None):
"""
Main entry point to creating a new mongothon model. Both
schema and Pymongo collection objects must be provided.
Returns a new class which can be used as a model class.
The class name of the model class by default is inferred
from the ... | Main entry point to creating a new mongothon model. Both
schema and Pymongo collection objects must be provided.
Returns a new class which can be used as a model class.
The class name of the model class by default is inferred
from the provided collection (converted to camel case).
Optionally, a cl... |
def snakecase(string):
"""Convert string into snake case.
Join punctuation with underscore
Args:
string: String to convert.
Returns:
string: Snake cased string.
"""
string = re.sub(r"[\-\.\s]", '_', str(string))
if not string:
return string
return lowercase(st... | Convert string into snake case.
Join punctuation with underscore
Args:
string: String to convert.
Returns:
string: Snake cased string. |
def createService(self, createServiceParameter,
description=None,
tags="Feature Service",
snippet=None):
"""
The Create Service operation allows users to create a hosted
feature service. You can use the API to create an empty host... | The Create Service operation allows users to create a hosted
feature service. You can use the API to create an empty hosted
feaure service from feature service metadata JSON.
Inputs:
createServiceParameter - create service object |
def MetricValueTypeFromPythonType(python_type):
"""Converts Python types to MetricMetadata.ValueType enum values."""
if python_type in (int, long):
return rdf_stats.MetricMetadata.ValueType.INT
elif python_type == float:
return rdf_stats.MetricMetadata.ValueType.FLOAT
else:
raise ValueError("Invalid... | Converts Python types to MetricMetadata.ValueType enum values. |
def import_string(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax... | Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If `silent` is True the ret... |
def Print(self, x, data, message, **kwargs): # pylint: disable=invalid-name
"""Calls tf.Print.
Args:
x: LaidOutTensor.
data: list of LaidOutTensor.
message: str.
**kwargs: keyword arguments to tf.print.
Returns:
LaidOutTensor.
"""
del data, message, kwargs
tf.log... | Calls tf.Print.
Args:
x: LaidOutTensor.
data: list of LaidOutTensor.
message: str.
**kwargs: keyword arguments to tf.print.
Returns:
LaidOutTensor. |
def SendTextMessage(self, Text):
"""Sends a text message over channel.
:Parameters:
Text : unicode
Text to send.
"""
if self.Type == cctReliable:
self.Stream.Write(Text)
elif self.Type == cctDatagram:
self.Stream.SendDatagram(Text)
... | Sends a text message over channel.
:Parameters:
Text : unicode
Text to send. |
def refresh(self):
"""
Fetches all current container names from the client, along with their id.
"""
if not self._client:
return
current_containers = self._client.containers(all=True)
self.clear()
for container in current_containers:
contai... | Fetches all current container names from the client, along with their id. |
def render(self):
"""Runs the render until thread flag is set.
Returns
-------
self
"""
while not self._stop_spinner.is_set():
self._render_frame()
time.sleep(0.001 * self._interval)
return self | Runs the render until thread flag is set.
Returns
-------
self |
def estimate(init_values,
estimator,
method,
loss_tol,
gradient_tol,
maxiter,
print_results,
use_hessian=True,
just_point=False,
**kwargs):
"""
Estimate the given choice model that is defined by ... | Estimate the given choice model that is defined by `estimator`.
Parameters
----------
init_vals : 1D ndarray.
Should contain the initial values to start the optimization process
with.
estimator : an instance of the EstimationObj class.
method : str, optional.
Should be a val... |
def _clean_data(cls, *args, **kwargs):
"""
Convert raw data into a dictionary with plot-type specific methods.
The result of the cleaning operation should be a dictionary.
If the dictionary contains a 'data' field it will be passed directly
(ensuring appropriate formatting). Oth... | Convert raw data into a dictionary with plot-type specific methods.
The result of the cleaning operation should be a dictionary.
If the dictionary contains a 'data' field it will be passed directly
(ensuring appropriate formatting). Otherwise, it should be a
dictionary of data-type spec... |
def wait_for_relation(service_name, relation_name, timeout=120):
"""Wait `timeout` seconds for a given relation to come up."""
start_time = time.time()
while True:
relation = unit_info(service_name, 'relations').get(relation_name)
if relation is not None and relation['state'] == 'up':
... | Wait `timeout` seconds for a given relation to come up. |
def as_string(self):
"""Return the command as a single string for the docker file"""
if type(self.instruction) is str:
return self.instruction
if self.action == "FROM" and not isinstance(self.command, six.string_types):
extra = "" if self.extra is NotSpecified else " {0}... | Return the command as a single string for the docker file |
def __getLogger(cls):
""" Get the logger for this object.
:returns: (Logger) A Logger object.
"""
if cls.__logger is None:
cls.__logger = opf_utils.initLogger(cls)
return cls.__logger | Get the logger for this object.
:returns: (Logger) A Logger object. |
def add_resource(self, name, file_path, ind_obj):
"""Link a resource to an individual."""
new_resource = Resource(name=name, individual=ind_obj, path=file_path)
self.session.add(new_resource)
self.save()
return new_resource | Link a resource to an individual. |
def path_qs(self):
"""Decoded path of URL with query."""
if not self.query_string:
return self.path
return "{}?{}".format(self.path, self.query_string) | Decoded path of URL with query. |
def is_ipfs_uri(value: str) -> bool:
"""
Return a bool indicating whether or not the value is a valid IPFS URI.
"""
parse_result = parse.urlparse(value)
if parse_result.scheme != "ipfs":
return False
if not parse_result.netloc and not parse_result.path:
return False
return T... | Return a bool indicating whether or not the value is a valid IPFS URI. |
def find_nearest_leaf(self, entry, search_node = None):
"""!
@brief Search nearest leaf to the specified clustering feature.
@param[in] entry (cfentry): Clustering feature.
@param[in] search_node (cfnode): Node from that searching should be started, if None then search proc... | !
@brief Search nearest leaf to the specified clustering feature.
@param[in] entry (cfentry): Clustering feature.
@param[in] search_node (cfnode): Node from that searching should be started, if None then search process will be started for the root.
@return (leaf_n... |
def createReference(self, fromnode, tonode, edge_data=None):
"""
Create a reference from fromnode to tonode
"""
if fromnode is None:
fromnode = self
fromident, toident = self.getIdent(fromnode), self.getIdent(tonode)
if fromident is None or toident is None:
... | Create a reference from fromnode to tonode |
def set_basic_params(self, msg_size=None, cheap=None, anti_loop_timeout=None):
"""
:param int msg_size: Set the max size of an alarm message in bytes. Default: 8192.
:param bool cheap: Use main alarm thread rather than create dedicated
threads for curl-based alarms
:param i... | :param int msg_size: Set the max size of an alarm message in bytes. Default: 8192.
:param bool cheap: Use main alarm thread rather than create dedicated
threads for curl-based alarms
:param int anti_loop_timeout: Tune the anti-loop alarm system. Default: 3 seconds. |
def do_stored_procedure_check(self, instance, proc):
"""
Fetch the metrics from the stored proc
"""
guardSql = instance.get('proc_only_if')
custom_tags = instance.get("tags", [])
if (guardSql and self.proc_check_guard(instance, guardSql)) or not guardSql:
se... | Fetch the metrics from the stored proc |
def quadratic_forms(h1, h2):
r"""
Quadrativ forms metric.
Notes
-----
UNDER DEVELOPMENT
This distance measure shows very strange behaviour. The expression
transpose(h1-h2) * A * (h1-h2) yields egative values that can not be processed by the
square root. Some examples::
... | r"""
Quadrativ forms metric.
Notes
-----
UNDER DEVELOPMENT
This distance measure shows very strange behaviour. The expression
transpose(h1-h2) * A * (h1-h2) yields egative values that can not be processed by the
square root. Some examples::
h1 h2 ... |
def page_index(request):
"""Index of all pages."""
letters = {}
for page in Page.query.order_by(Page.name):
letters.setdefault(page.name.capitalize()[0], []).append(page)
return Response(
generate_template("page_index.html", letters=sorted(letters.items()))
) | Index of all pages. |
def create_apppool(name):
'''
Create an IIS application pool.
.. note::
This function only validates against the application pool name, and will
return True even if the application pool already exists with a different
configuration. It will not modify the configuration of an existi... | Create an IIS application pool.
.. note::
This function only validates against the application pool name, and will
return True even if the application pool already exists with a different
configuration. It will not modify the configuration of an existing
application pool.
Args... |
def get_removed_obs_importance(self,obslist_dict=None,
reset_zero_weight=False):
"""get a dataframe the posterior uncertainty
as a result of losing some observations
Parameters
----------
obslist_dict : dict
dictionary of groups of ... | get a dataframe the posterior uncertainty
as a result of losing some observations
Parameters
----------
obslist_dict : dict
dictionary of groups of observations
that are to be treated as lost. key values become
row labels in returned dataframe. If No... |
def generalized_negative_binomial(mu=1, alpha=1, shape=_Null, dtype=_Null, **kwargs):
"""Draw random samples from a generalized negative binomial distribution.
Samples are distributed according to a generalized negative binomial
distribution parametrized by *mu* (mean) and *alpha* (dispersion).
*alpha*... | Draw random samples from a generalized negative binomial distribution.
Samples are distributed according to a generalized negative binomial
distribution parametrized by *mu* (mean) and *alpha* (dispersion).
*alpha* is defined as *1/k* where *k* is the failure limit of the
number of unsuccessful experim... |
def ConsultarTributos(self, sep="||"):
"Retorna un listado de tributos con código, descripción y signo."
ret = self.client.consultarTributos(
auth={
'token': self.Token, 'sign': self.Sign,
'cuit': self.Cuit, },
... | Retorna un listado de tributos con código, descripción y signo. |
def insert(self, key, value):
"""Inserts a key and value in the map if the map does not already
contain the key.
:type key: :class: '~opencensus.tags.tag_key.TagKey'
:param key: a tag key to insert into the map
:type value: :class: '~opencensus.tags.tag_value.TagValue'
... | Inserts a key and value in the map if the map does not already
contain the key.
:type key: :class: '~opencensus.tags.tag_key.TagKey'
:param key: a tag key to insert into the map
:type value: :class: '~opencensus.tags.tag_value.TagValue'
:param value: a tag value that is associa... |
def copy_security(source,
target,
obj_type='file',
copy_owner=True,
copy_group=True,
copy_dacl=True,
copy_sacl=True):
r'''
Copy the security descriptor of the Source to the Target. You can specify a
s... | r'''
Copy the security descriptor of the Source to the Target. You can specify a
specific portion of the security descriptor to copy using one of the
`copy_*` parameters.
.. note::
At least one `copy_*` parameter must be ``True``
.. note::
The user account running this command must... |
def publish_event(event_t, data=None, extra_channels=None, wait=None):
"""
Publish an event ot any subscribers.
:param event_t: event type
:param data: event data
:param extra_channels:
:param wait:
:return:
"""
event = Event(event_t, data)
pubsub.publish("shoebot", event)
... | Publish an event ot any subscribers.
:param event_t: event type
:param data: event data
:param extra_channels:
:param wait:
:return: |
def equirectangular_distance(self, other):
"""
Return the approximate equirectangular when the location is close to
the center of the cluster.
For small distances, Pythagoras’ theorem can be used on an
equirectangular projection.
Equirectangular formula::
x... | Return the approximate equirectangular when the location is close to
the center of the cluster.
For small distances, Pythagoras’ theorem can be used on an
equirectangular projection.
Equirectangular formula::
x = Δλ ⋅ cos φm
y = Δφ
d = R ⋅ √(x² + y)... |
def summarize(self, n_timescales_to_report=5):
"""Some summary information."""
nonzeros = np.sum(np.abs(self.eigenvectors_) > 0, axis=0)
active = '[%s]' % ', '.join(['%d/%d' % (n, self.n_features) for n in nonzeros[:n_timescales_to_report]])
return """K-sparse time-structure Independent... | Some summary information. |
def meaning(phrase, source_lang="en", dest_lang="en", format="json"):
"""
make calls to the glosbe API
:param phrase: word for which meaning is to be found
:param source_lang: Defaults to : "en"
:param dest_lang: Defaults to : "en" For eg: "fr" for french
:param format: ... | make calls to the glosbe API
:param phrase: word for which meaning is to be found
:param source_lang: Defaults to : "en"
:param dest_lang: Defaults to : "en" For eg: "fr" for french
:param format: response structure type. Defaults to: "json"
:returns: returns a json object as st... |
def clean_data(data):
""" Shift to lower case, replace unknowns with UNK, and listify """
new_data = []
VALID = 'abcdefghijklmnopqrstuvwxyz123456789"\'?!.,:; '
for sample in data:
new_sample = []
for char in sample[1].lower(): # Just grab the string, not the label
if char in... | Shift to lower case, replace unknowns with UNK, and listify |
def get_context(self, arr, expr, context):
"""
Returns a context dictionary for use in evaluating the expression.
:param arr: The input array.
:param expr: The input expression.
:param context: Evaluation context.
"""
expression_names = [x for x in self.get_expr... | Returns a context dictionary for use in evaluating the expression.
:param arr: The input array.
:param expr: The input expression.
:param context: Evaluation context. |
def _add_namespace(self, namespace):
"""Add an included and possibly renamed Namespace."""
src_name = namespace.source_name
if "*" in src_name:
self._regex_map.append((namespace_to_regex(src_name), namespace))
else:
self._add_plain_namespace(namespace) | Add an included and possibly renamed Namespace. |
def timezone(zone):
r''' Return a datetime.tzinfo implementation for the given timezone
>>> from datetime import datetime, timedelta
>>> utc = timezone('UTC')
>>> eastern = timezone('US/Eastern')
>>> eastern.zone
'US/Eastern'
>>> timezone(unicode('US/Eastern')) is eastern
True
>>> u... | r''' Return a datetime.tzinfo implementation for the given timezone
>>> from datetime import datetime, timedelta
>>> utc = timezone('UTC')
>>> eastern = timezone('US/Eastern')
>>> eastern.zone
'US/Eastern'
>>> timezone(unicode('US/Eastern')) is eastern
True
>>> utc_dt = datetime(2002, 1... |
def import_complex_gateway_to_graph(diagram_graph, process_id, process_attributes, element):
"""
Adds to graph the new element that represents BPMN complex gateway.
In addition to attributes inherited from Gateway type, complex gateway
has additional attribute default flow (default value... | Adds to graph the new element that represents BPMN complex gateway.
In addition to attributes inherited from Gateway type, complex gateway
has additional attribute default flow (default value - none).
:param diagram_graph: NetworkX graph representing a BPMN process diagram,
:param proce... |
def apply_all(self, force=False, quiet=False):
""" Apply all patches in series file """
self._check()
top = self.db.top_patch()
if top:
patches = self.series.patches_after(top)
else:
patches = self.series.patches()
if not patches:
rais... | Apply all patches in series file |
def delete_resource(self, session, data, api_type, obj_id):
"""
Delete a resource.
:param session: SQLAlchemy session
:param data: JSON data provided with the request
:param api_type: Type of the resource
:param obj_id: ID of the resource
"""
resource = s... | Delete a resource.
:param session: SQLAlchemy session
:param data: JSON data provided with the request
:param api_type: Type of the resource
:param obj_id: ID of the resource |
def unpack_rsp(cls, rsp_pb):
"""Convert from PLS response to user response"""
if rsp_pb.retType != RET_OK:
return RET_ERROR, rsp_pb.retMsg, None
return RET_OK, "", None | Convert from PLS response to user response |
def process_block(self, current_block, previous_block, text):
"""
Processes a block and setup its folding info.
This method call ``detect_fold_level`` and handles most of the tricky
corner cases so that all you have to do is focus on getting the proper
fold level foreach meaning... | Processes a block and setup its folding info.
This method call ``detect_fold_level`` and handles most of the tricky
corner cases so that all you have to do is focus on getting the proper
fold level foreach meaningful block, skipping the blank ones.
:param current_block: current block t... |
def make_fileitem_filepath(filepath, condition='contains', negate=False, preserve_case=False):
"""
Create a node for FileItem/FilePath
:return: A IndicatorItem represented as an Element node
"""
document = 'FileItem'
search = 'FileItem/FilePath'
content_type = 'string'
content = fil... | Create a node for FileItem/FilePath
:return: A IndicatorItem represented as an Element node |
def main():
"""
Entry point for GNS3 server
"""
if not sys.platform.startswith("win"):
if "--daemon" in sys.argv:
daemonize()
from gns3server.run import run
run() | Entry point for GNS3 server |
def sendFuture(self, future):
"""Send a Future to be executed remotely."""
future = copy.copy(future)
future.greenlet = None
future.children = {}
try:
if shared.getConst(hash(future.callable), timeout=0):
# Enforce name reference passing if already sh... | Send a Future to be executed remotely. |
def append_response(self, response):
"""Append the response to the stack of responses.
:param tornado.httpclient.HTTPResponse response: The HTTP response
"""
self._responses.append(response)
if 'Warning' in response.headers:
LOGGER.warning(
'HTTP %s ... | Append the response to the stack of responses.
:param tornado.httpclient.HTTPResponse response: The HTTP response |
def split_size(size):
'''Split the file size into several chunks.'''
rem = size % CHUNK_SIZE
if rem == 0:
cnt = size // CHUNK_SIZE
else:
cnt = size // CHUNK_SIZE + 1
chunks = []
for i in range(cnt):
pos = i * CHUNK_SIZE
if i == cnt - 1:
disp = size - ... | Split the file size into several chunks. |
def writeUTFBytes(self, value):
"""
Writes a UTF-8 string. Similar to L{writeUTF}, but does
not prefix the string with a 16-bit length word.
@type value: C{str}
@param value: The string value to be written.
"""
val = None
if isinstance(value, unicode):
... | Writes a UTF-8 string. Similar to L{writeUTF}, but does
not prefix the string with a 16-bit length word.
@type value: C{str}
@param value: The string value to be written. |
def iter_doc_objs(self, **kwargs):
"""Returns a pair: (doc_id, nexson_blob)
for each document in this repository.
Order is arbitrary.
"""
_LOG = get_logger('TypeAwareGitShard')
try:
for doc_id, fp in self.iter_doc_filepaths(**kwargs):
if not se... | Returns a pair: (doc_id, nexson_blob)
for each document in this repository.
Order is arbitrary. |
def sample_bitstrings(self, n_samples):
"""
Sample bitstrings from the distribution defined by the wavefunction.
:param n_samples: The number of bitstrings to sample
:return: An array of shape (n_samples, n_qubits)
"""
possible_bitstrings = np.array(list(itertools.produc... | Sample bitstrings from the distribution defined by the wavefunction.
:param n_samples: The number of bitstrings to sample
:return: An array of shape (n_samples, n_qubits) |
def call(self, event, *event_args):
"""Call the single registered listener for ``event``.
The listener will be called with any extra arguments passed to
:meth:`call` first, and then the extra arguments passed to :meth:`on`
Raises :exc:`AssertionError` if there is none or multiple liste... | Call the single registered listener for ``event``.
The listener will be called with any extra arguments passed to
:meth:`call` first, and then the extra arguments passed to :meth:`on`
Raises :exc:`AssertionError` if there is none or multiple listeners for
``event``. Returns the listene... |
def parse_config_files_and_bindings(config_files,
bindings,
finalize_config=True,
skip_unknown=False):
"""Parse a list of config files followed by extra Gin bindings.
This function is equivalent to:
f... | Parse a list of config files followed by extra Gin bindings.
This function is equivalent to:
for config_file in config_files:
gin.parse_config_file(config_file, skip_configurables)
gin.parse_config(bindings, skip_configurables)
if finalize_config:
gin.finalize()
Args:
config... |
def z2r(z):
"""
Function that calculates the inverse Fisher z-transformation
Parameters
----------
z : int or ndarray
Fishers z transformed correlation value
Returns
----------
result : int or ndarray
Correlation value
"""
with np.errstate(invalid='ignore', di... | Function that calculates the inverse Fisher z-transformation
Parameters
----------
z : int or ndarray
Fishers z transformed correlation value
Returns
----------
result : int or ndarray
Correlation value |
def send_dm_sos(self, message: str) -> None:
"""
Send DM to owner if something happens.
:param message: message to send to owner.
:returns: None.
"""
if self.owner_handle:
try:
# twitter changed the DM API and tweepy (as of 2019-03-08)
... | Send DM to owner if something happens.
:param message: message to send to owner.
:returns: None. |
def value(self):
"""
Take last known value as the value
"""
try:
value = self.lastValue
except IndexError:
value = "NaN"
except ValueError:
value = "NaN"
return value | Take last known value as the value |
def module2md(self, module):
"""Takes an imported module object and create a Markdown string containing functions and classes.
"""
modname = module.__name__
path = self.get_src_path(module, append_base=False)
path = "[{}]({})".format(path, os.path.join(self.github_link, path))
... | Takes an imported module object and create a Markdown string containing functions and classes. |
def load(self):
""" Load each path in order. Remember paths already loaded and only load new ones. """
data = self.dict_class()
for path in self.paths:
if path in self.paths_loaded: continue
try:
with open(path, 'r') as file:
... | Load each path in order. Remember paths already loaded and only load new ones. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.