code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def PriceHourly(self):
"""Returns the total hourly price for the server.
Sums unit prices with unit volumes.
>>> clc.v2.Server("NY1BTDIPHYP0101").PriceHourly()
0.02857
"""
units = self.PriceUnits()
return(units['cpu']*self.cpu+units['memory']*self.memory+units['storage']*self.storage+units['managed_o... | Returns the total hourly price for the server.
Sums unit prices with unit volumes.
>>> clc.v2.Server("NY1BTDIPHYP0101").PriceHourly()
0.02857 |
def sequencetyper(self):
"""
Determines the sequence type of each strain based on comparisons to sequence type profiles
"""
for sample in self.metadata.samples:
if sample.general.bestassemblyfile != 'NA':
if type(sample[self.analysistype].allelenames) == list:... | Determines the sequence type of each strain based on comparisons to sequence type profiles |
def fields(self):
"""
return all the fields and their raw values for this Orm instance. This
property returns a dict with the field names and their current values
if you want to control the values for outputting to an api, use .jsonable()
"""
return {k:getattr(self, k, N... | return all the fields and their raw values for this Orm instance. This
property returns a dict with the field names and their current values
if you want to control the values for outputting to an api, use .jsonable() |
def meta_wrapped(f):
"""
Add a field label, errors, and a description (if it exists) to
a field.
"""
@wraps(f)
def wrapped(self, field, *args, **kwargs):
html = "{label}{errors}{original}<small>{description}</small>".format(
label=field.label(class_='control-label'),
... | Add a field label, errors, and a description (if it exists) to
a field. |
def validate_timestamp_and_nonce(self, client_key, timestamp, nonce,
request, request_token=None,
access_token=None):
"""Validate the timestamp and nonce is used or not."""
log.debug('Validate timestamp and nonce %r', client_key)
... | Validate the timestamp and nonce is used or not. |
def proximal_l2(space, lam=1, g=None):
r"""Proximal operator factory of the l2-norm/distance.
Function for the proximal operator of the functional ``F`` where ``F``
is the l2-norm (or distance to g, if given)::
``F(x) = lam ||x - g||_2``
Parameters
----------
space : `LinearSpace`
... | r"""Proximal operator factory of the l2-norm/distance.
Function for the proximal operator of the functional ``F`` where ``F``
is the l2-norm (or distance to g, if given)::
``F(x) = lam ||x - g||_2``
Parameters
----------
space : `LinearSpace`
Domain of F(x). Needs to be a Hilbert... |
def nla_get_u64(nla):
"""Return value of 64 bit integer attribute as an int().
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L649
Positional arguments:
nla -- 64 bit integer attribute (nlattr class instance).
Returns:
Payload as an int().
"""
tmp = c_uint64(0)
if nl... | Return value of 64 bit integer attribute as an int().
https://github.com/thom311/libnl/blob/libnl3_2_25/lib/attr.c#L649
Positional arguments:
nla -- 64 bit integer attribute (nlattr class instance).
Returns:
Payload as an int(). |
def parents(self):
"""~TermList: The direct parents of the `Term`.
"""
if self._parents is None:
bottomups = tuple(Relationship.bottomup())
self._parents = TermList()
self._parents.extend(
[ other
for rship,others in six.it... | ~TermList: The direct parents of the `Term`. |
def get_dos_from_id(self, task_id):
"""
Overrides the get_dos_from_id for the MIT gridfs format.
"""
args = {'task_id': task_id}
fields = ['calculations']
structure = self.get_structure_from_id(task_id)
dosid = None
for r in self.query(fields, args):
... | Overrides the get_dos_from_id for the MIT gridfs format. |
def grab_gpus(num_gpus=1, gpu_select=None, gpu_fraction=0.95, max_procs=-1):
"""
Checks for gpu availability and sets CUDA_VISIBLE_DEVICES as such.
Note that this function does not do anything to 'reserve' gpus, it only
limits what GPUS your program can see by altering the CUDA_VISIBLE_DEVICES
vari... | Checks for gpu availability and sets CUDA_VISIBLE_DEVICES as such.
Note that this function does not do anything to 'reserve' gpus, it only
limits what GPUS your program can see by altering the CUDA_VISIBLE_DEVICES
variable. Other programs can still come along and snatch your gpu. This
function is more ... |
def outputPoint(self):
"""
Returns a scene space point that the connection \
will draw to as its output source. If the connection \
has a node defined, then it will calculate the output \
point based on the position of the node, factoring in \
preference for output locat... | Returns a scene space point that the connection \
will draw to as its output source. If the connection \
has a node defined, then it will calculate the output \
point based on the position of the node, factoring in \
preference for output location and fixed positions. If \
the... |
def _compute_raw_moments(self, n_counter, k_counter):
r"""
Compute :math:`X_i`
Gamma type 1: :math:`X_i = \frac {\beta_i}{\beta_0}Y_0 + Y_i`
Gamma type 2: :math:`X_i = \sum_{k=0}^{i} \frac {\beta_i}{\beta_k}Y_k`
:param n_counter: a list of :class:`~means.core.descriptors.Moment... | r"""
Compute :math:`X_i`
Gamma type 1: :math:`X_i = \frac {\beta_i}{\beta_0}Y_0 + Y_i`
Gamma type 2: :math:`X_i = \sum_{k=0}^{i} \frac {\beta_i}{\beta_k}Y_k`
:param n_counter: a list of :class:`~means.core.descriptors.Moment`\s representing central moments
:type n_counter: list... |
def isMasterReqLatencyTooHigh(self):
"""
Return whether the request latency of the master instance is greater
than the acceptable threshold
"""
# TODO for now, view_change procedure can take more that 15 minutes
# (5 minutes for catchup and 10 minutes for primary's answer... | Return whether the request latency of the master instance is greater
than the acceptable threshold |
def _ancestors_or_self(
self, qname: Union[QualName, bool] = None) -> List[InstanceNode]:
"""XPath - return the list of receiver's ancestors including itself."""
res = [] if qname and self.qual_name != qname else [self]
return res + self.up()._ancestors(qname) | XPath - return the list of receiver's ancestors including itself. |
def _check_portname(name):
'''
Check if portname is valid and whether or not the directory exists in the
ports tree.
'''
if not isinstance(name, string_types) or '/' not in name:
raise SaltInvocationError(
'Invalid port name \'{0}\' (category required)'.format(name)
)
... | Check if portname is valid and whether or not the directory exists in the
ports tree. |
def register_calculator_view(request):
"""Register a calculator."""
if request.method == "POST":
form = CalculatorRegistrationForm(request.POST)
logger.debug(form)
if form.is_valid():
obj = form.save()
obj.user = request.user
obj.save()
mes... | Register a calculator. |
def ip_hide_as_path_holder_as_path_access_list_instance(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
ip = ET.SubElement(config, "ip", xmlns="urn:brocade.com:mgmt:brocade-common-def")
hide_as_path_holder = ET.SubElement(ip, "hide-as-path-holder", xmlns... | Auto Generated Code |
def run(self, *args):
"""List, add or delete organizations and domains from the registry.
By default, it prints the list of organizations available on
the registry.
"""
params = self.parser.parse_args(args)
organization = params.organization
domain = params.doma... | List, add or delete organizations and domains from the registry.
By default, it prints the list of organizations available on
the registry. |
def remove_old(self, max_log_time):
"""Remove all logs which are older than the specified time."""
files = glob.glob('{}/queue-*'.format(self.log_dir))
files = list(map(lambda x: os.path.basename(x), files))
for log_file in files:
# Get time stamp from filename
n... | Remove all logs which are older than the specified time. |
def ingest_data(self, data, cat_name, id_col, ra_col='_RAJ2000', dec_col='_DEJ2000', cat_loc='', append=False, count=-1):
"""
Ingest a data file and regroup sources
Parameters
----------
data: str, pandas.DataFrame, astropy.table.Table
The path to the exporte... | Ingest a data file and regroup sources
Parameters
----------
data: str, pandas.DataFrame, astropy.table.Table
The path to the exported VizieR data or the data table
cat_name: str
The name of the added catalog
id_col: str
The name of th... |
def save(self, filename="temp.pkl"):
"""
Save TM in the filename specified above
"""
output = open(filename, 'wb')
cPickle.dump(self.tm, output, protocol=cPickle.HIGHEST_PROTOCOL) | Save TM in the filename specified above |
def send_ether_over_wpa(self, pkt, **kwargs):
"""Send an Ethernet packet using the WPA channel
Extra arguments will be ignored, and are just left for compatibility
"""
payload = LLC() / SNAP() / pkt[Ether].payload
dest = pkt.dst
if dest == "ff:ff:ff:ff:ff:ff":
... | Send an Ethernet packet using the WPA channel
Extra arguments will be ignored, and are just left for compatibility |
def colorbar(self, cmap, position="right",
label="", clim=("", ""),
border_width=0.0, border_color="black",
**kwargs):
"""Show a ColorBar
Parameters
----------
cmap : str | vispy.color.ColorMap
Either the name of the ColorMa... | Show a ColorBar
Parameters
----------
cmap : str | vispy.color.ColorMap
Either the name of the ColorMap to be used from the standard
set of names (refer to `vispy.color.get_colormap`),
or a custom ColorMap object.
The ColorMap is used to apply a g... |
def exit_with_exc_info(code=1, message='', print_tb=False, exception=None):
'''Exits the program, printing information about the last exception (if
any) and an optional error message. Uses *exception* instead if provided.
:param code: Exit code.
:type code: integer (valid exit code, 0-255)
:param ... | Exits the program, printing information about the last exception (if
any) and an optional error message. Uses *exception* instead if provided.
:param code: Exit code.
:type code: integer (valid exit code, 0-255)
:param message: Message to be printed after the exception information.
:type message: ... |
def timestamp(num_params, p_levels, k_choices, N):
"""
Returns a uniform timestamp with parameter values for file identification
"""
string = "_v%s_l%s_gs%s_k%s_N%s_%s.txt" % (num_params,
p_levels,
k_choices,
... | Returns a uniform timestamp with parameter values for file identification |
def command_line_arguments(command_line_parameters):
"""Defines the command line parameters that are accepted."""
# create parser
parser = argparse.ArgumentParser(description='Execute baseline algorithms with default parameters', formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# add parameters
# - t... | Defines the command line parameters that are accepted. |
def send_login_signal(self, request, user, profile, client):
"""
Send a signal that a user logged in. This signal should be sent only if
the user was *not* logged into Django.
"""
signals.login.send(sender=profile.__class__, user=user,
profile=profile, client=client, ... | Send a signal that a user logged in. This signal should be sent only if
the user was *not* logged into Django. |
def update_utxoset(self, transaction):
"""Update the UTXO set given ``transaction``. That is, remove
the outputs that the given ``transaction`` spends, and add the
outputs that the given ``transaction`` creates.
Args:
transaction (:obj:`~bigchaindb.models.Transaction`): A ne... | Update the UTXO set given ``transaction``. That is, remove
the outputs that the given ``transaction`` spends, and add the
outputs that the given ``transaction`` creates.
Args:
transaction (:obj:`~bigchaindb.models.Transaction`): A new
transaction incoming into the sy... |
def list_from_env(key, default=""):
"""
Splits a string in the format "a,b,c,d,e,f" into
['a', 'b', 'c', 'd', 'e', 'f', ]
"""
try:
val = os.environ.get(key, default)
return val.split(',')
except (KeyError, ValueError):
return [] | Splits a string in the format "a,b,c,d,e,f" into
['a', 'b', 'c', 'd', 'e', 'f', ] |
def run(self, steps=None, resume=False, redo=None):
"""
Run a Stimela recipe.
steps : recipe steps to run
resume : resume recipe from last run
redo : Re-run an old recipe from a .last file
"""
recipe = {
"name" : self.name,
... | Run a Stimela recipe.
steps : recipe steps to run
resume : resume recipe from last run
redo : Re-run an old recipe from a .last file |
def get_force_single(self, component_info=None, data=None, component_position=None):
"""Get a single force data channel."""
components = []
append_components = components.append
for _ in range(component_info.plate_count):
component_position, plate = QRTPacket._get_exact(
... | Get a single force data channel. |
def download(self, path, args=[], filepath=None, opts={},
compress=True, **kwargs):
"""Makes a request to the IPFS daemon to download a file.
Downloads a file or files from IPFS into the current working
directory, or the directory given by ``filepath``.
Raises
... | Makes a request to the IPFS daemon to download a file.
Downloads a file or files from IPFS into the current working
directory, or the directory given by ``filepath``.
Raises
------
~ipfsapi.exceptions.ErrorResponse
~ipfsapi.exceptions.ConnectionError
~ipfsapi.ex... |
def plugin_method(*plugin_names):
"""Plugin Method decorator.
Signs a web handler function with the plugins to be applied as attributes.
Args:
plugin_names (list): A list of plugin callable names
Returns:
A wrapped handler callable.
Examples:
>>> @plugin_method('json', 'bi... | Plugin Method decorator.
Signs a web handler function with the plugins to be applied as attributes.
Args:
plugin_names (list): A list of plugin callable names
Returns:
A wrapped handler callable.
Examples:
>>> @plugin_method('json', 'bill')
... def method():
..... |
def _get_attrs(self):
"""An internal helper for the representation methods"""
attrs = []
attrs.append(("N Blocks", self.n_blocks, "{}"))
bds = self.bounds
attrs.append(("X Bounds", (bds[0], bds[1]), "{:.3f}, {:.3f}"))
attrs.append(("Y Bounds", (bds[2], bds[3]), "{:.3f}, {... | An internal helper for the representation methods |
def set_source_ip_for_interface(source_ip_address, desired_source_ip_address, device_num=0):
"""Configures the source IP address for a Linux interface
:param source_ip_address: (str) Source IP address to change
:param desired_source_ip_address: (str) IP address to configure as the source in outgoing packet... | Configures the source IP address for a Linux interface
:param source_ip_address: (str) Source IP address to change
:param desired_source_ip_address: (str) IP address to configure as the source in outgoing packets
:param device_num: (int) Integer interface device number to configure
:return: None
:r... |
def temporal_participation_coeff(tnet, communities=None, decay=None, removeneg=False):
r'''
Temporal participation coefficient is a measure of diversity of connections across communities for individual nodes.
Parameters
----------
tnet : array, dict
graphlet or contact sequence input. Only ... | r'''
Temporal participation coefficient is a measure of diversity of connections across communities for individual nodes.
Parameters
----------
tnet : array, dict
graphlet or contact sequence input. Only positive matrices considered.
communities : array
community vector. Either 1D (... |
def compile_tag_re(self, tags):
"""
Return the regex used to look for Mustache tags compiled to work with
specific opening tags, close tags, and tag types.
"""
return re.compile(self.raw_tag_re % tags, self.re_flags) | Return the regex used to look for Mustache tags compiled to work with
specific opening tags, close tags, and tag types. |
def handle_404(request, exception):
'''Handle 404 Not Found
This handler should be used to handle error http 404 not found for all
endpoints or if resource not available.
'''
error = format_error(title='Resource not found', detail=str(exception))
return json(return_an_error(error), status=HTTPSt... | Handle 404 Not Found
This handler should be used to handle error http 404 not found for all
endpoints or if resource not available. |
def median_high(data):
"""Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned.
"""
data = sorted(data)
n = len(data)
if n == 0:
raise StatisticsError("no median for emp... | Return the high median of data.
When the number of data points is odd, the middle value is returned.
When it is even, the larger of the two middle values is returned. |
def is_text_file(file_path: str) -> bool:
"""Returns if a file contains only ASCII or UTF-8 encoded text.
:param file_path: path to the file being checked
:return: True if the file is a text file, False if it is binary.
"""
import codecs
expanded_path = os.path.abspath(os.path.expanduser(file_... | Returns if a file contains only ASCII or UTF-8 encoded text.
:param file_path: path to the file being checked
:return: True if the file is a text file, False if it is binary. |
def addFeatureSet(self, featureSet):
"""
Adds the specified featureSet to this dataset.
"""
id_ = featureSet.getId()
self._featureSetIdMap[id_] = featureSet
self._featureSetIds.append(id_)
name = featureSet.getLocalId()
self._featureSetNameMap[name] = feat... | Adds the specified featureSet to this dataset. |
def add_mountains(self):
"""
instead of the add_blocks function which was to produce
line shaped walls for blocking path finding agents, this
function creates more natural looking blocking areas like
mountains
"""
from noise import pnoise2
import random
... | instead of the add_blocks function which was to produce
line shaped walls for blocking path finding agents, this
function creates more natural looking blocking areas like
mountains |
def _make_eval_func(self, tensors, session, feed_dict, fetches,
callback=None):
"""Construct a function that evaluates a `Tensor` or list of `Tensor`s."""
if not isinstance(tensors, list):
tensors = [tensors]
num_tensors = len(tensors)
def eval_func(x):
"""Function to ... | Construct a function that evaluates a `Tensor` or list of `Tensor`s. |
def _load_options(self, container, **options):
"""
Select backend specific loading options.
"""
# Force set dict option if available in backend. For example,
# options["object_hook"] will be OrderedDict if 'container' was
# OrderedDict in JSON backend.
for opt in ... | Select backend specific loading options. |
def numberOfConnectedDistalSynapses(self, cells=None):
"""
Returns the number of connected distal synapses on these cells.
Parameters:
----------------------------
@param cells (iterable)
Indices of the cells. If None return count for all cells.
"""
if cells is None:
cell... | Returns the number of connected distal synapses on these cells.
Parameters:
----------------------------
@param cells (iterable)
Indices of the cells. If None return count for all cells. |
def parse_results_mol2(mol2_outpath):
"""Parse a DOCK6 mol2 output file, return a Pandas DataFrame of the results.
Args:
mol2_outpath (str): Path to mol2 output file
Returns:
DataFrame: Pandas DataFrame of the results
"""
docked_ligands = pd.DataFrame()
lines = [line.strip() ... | Parse a DOCK6 mol2 output file, return a Pandas DataFrame of the results.
Args:
mol2_outpath (str): Path to mol2 output file
Returns:
DataFrame: Pandas DataFrame of the results |
def set_server_admin_password(self, server_name, admin_password):
'''
Reset the administrator password for a server.
server_name:
Name of the server to change the password.
admin_password:
The new administrator password for the server.
'''
_valida... | Reset the administrator password for a server.
server_name:
Name of the server to change the password.
admin_password:
The new administrator password for the server. |
def decrypt(self, data, nounce=None):
"""Decrypt data with counter or specified nounce."""
if nounce is None:
nounce = self._in_counter.to_bytes(length=8, byteorder='little')
self._in_counter += 1
decrypted = self._enc_in.open(
b'\x00\x00\x00\x00' + nounce, d... | Decrypt data with counter or specified nounce. |
def set_connection_logging(self, loadbalancer, val):
"""
Sets the connection logging for the given load balancer.
"""
uri = "/loadbalancers/%s/connectionlogging" % utils.get_id(loadbalancer)
val = str(val).lower()
req_body = {"connectionLogging": {
"enable... | Sets the connection logging for the given load balancer. |
def mlem(op, x, data, niter, callback=None, **kwargs):
"""Maximum Likelihood Expectation Maximation algorithm.
Attempts to solve::
max_x L(x | data)
where ``L(x | data)`` is the Poisson likelihood of ``x`` given ``data``.
The likelihood depends on the forward operator ``op`` such that
(a... | Maximum Likelihood Expectation Maximation algorithm.
Attempts to solve::
max_x L(x | data)
where ``L(x | data)`` is the Poisson likelihood of ``x`` given ``data``.
The likelihood depends on the forward operator ``op`` such that
(approximately)::
op(x) = data
Parameters
----... |
def reset(self):
"""
Clear the values of all attributes of the transaction store.
"""
self.getsCounter = 0
# dictionary of processed requests for each client. Value for each
# client is a dictionary with request id as key and transaction id as
# value
sel... | Clear the values of all attributes of the transaction store. |
def chooseStep(self, divisors=None, binary=False):
"""Choose a nice, pretty size for the steps between axis labels.
Our main constraint is that the number of divisions must be taken
from the divisors list. We pick a number of divisions and a step
size that minimizes the amount of whites... | Choose a nice, pretty size for the steps between axis labels.
Our main constraint is that the number of divisions must be taken
from the divisors list. We pick a number of divisions and a step
size that minimizes the amount of whitespace ("slop") that would
need to be included outside o... |
def available_modes_with_ids(self):
"""Return list of objects containing available mode name and id."""
if not self._available_mode_ids:
all_modes = FIXED_MODES.copy()
self._available_mode_ids = all_modes
modes = self.get_available_modes()
try:
... | Return list of objects containing available mode name and id. |
def guess_media_type(filepath):
"""Returns the media-type of the file at the given ``filepath``"""
o = subprocess.check_output(['file', '--mime-type', '-Lb', filepath])
o = o.strip()
return o | Returns the media-type of the file at the given ``filepath`` |
def volume_adjusted_moving_average(close_data, volume, period):
"""
Volume Adjusted Moving Average.
Formula:
VAMA = SUM(CLOSE * VolumeRatio) / period
"""
catch_errors.check_for_input_len_diff(close_data, volume)
catch_errors.check_for_period_error(close_data, period)
avg_vol = np.mean(... | Volume Adjusted Moving Average.
Formula:
VAMA = SUM(CLOSE * VolumeRatio) / period |
def exists(name, path=None):
'''
Returns whether the named container exists.
path
path to the container parent directory (default: /var/lib/lxc)
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt '*' lxc.exists name
'''
_exists = name in ls_(path... | Returns whether the named container exists.
path
path to the container parent directory (default: /var/lib/lxc)
.. versionadded:: 2015.8.0
CLI Example:
.. code-block:: bash
salt '*' lxc.exists name |
def get_pandasframe(self):
"""The method loads data from dataset"""
if self.dataset:
self._load_dimensions()
return self._get_pandasframe_one_dataset()
return self._get_pandasframe_across_datasets() | The method loads data from dataset |
def get_site(self, site_id):
"""
Returns site data.
http://dev.wheniwork.com/#get-existing-site
"""
url = "/2/sites/%s" % site_id
return self.site_from_json(self._get_resource(url)["site"]) | Returns site data.
http://dev.wheniwork.com/#get-existing-site |
def add_trendline(self,date0,date1,on='close',text=None,**kwargs):
"""
Adds a trendline to the QuantFigure.
Given 2 dates, the trendline is connected on the data points
that correspond to those dates.
Parameters:
date0 : string
Trendline starting date
date1 : string
Trendline end date
on ... | Adds a trendline to the QuantFigure.
Given 2 dates, the trendline is connected on the data points
that correspond to those dates.
Parameters:
date0 : string
Trendline starting date
date1 : string
Trendline end date
on : string
Indicate the data series in which the
trendline should be... |
def endings(self):
"""The list of word endings.
Ambiguous cases are separated with pipe character by default.
Use :py:meth:`~estnltk.text.Text.get_analysis_element` to specify custom separator for ambiguous entries.
"""
if not self.is_tagged(ANALYSIS):
self.tag_analy... | The list of word endings.
Ambiguous cases are separated with pipe character by default.
Use :py:meth:`~estnltk.text.Text.get_analysis_element` to specify custom separator for ambiguous entries. |
def dumpdb(args):
"""
cldf dumpdb <DATASET> <SQLITE_DB_PATH> [<METADATA_PATH>]
"""
if len(args.args) < 2:
raise ParserError('not enough arguments') # pragma: no cover
ds = _get_dataset(args)
db = Database(ds, fname=args.args[1])
mdpath = Path(args.args[2]) if len(args.args) > 2 else... | cldf dumpdb <DATASET> <SQLITE_DB_PATH> [<METADATA_PATH>] |
def write_conll(self, fname):
"""
Serializes the dataset in CONLL format to fname
"""
if 'label' not in self.fields:
raise InvalidFieldsException("dataset is not in CONLL format: missing label field")
def instance_to_conll(inst):
tab = [v for k, v in inst... | Serializes the dataset in CONLL format to fname |
def fail(self, cmd, title=None, message=None):
"""Send back captured exceptions"""
if message is None:
message = self.handle_exc()
else:
message = escape(message)
self.db.send(
'Echo|%s' % dump({
'for': escape(title or '%s failed' % cmd... | Send back captured exceptions |
def _knit(fin, fout,
opts_knit='progress=FALSE, verbose=FALSE',
opts_chunk='eval=FALSE'):
"""Use knitr to convert r markdown (or anything knitr supports)
to markdown.
fin / fout - strings, input / output filenames.
opts_knit - string, options to pass to knit
... | Use knitr to convert r markdown (or anything knitr supports)
to markdown.
fin / fout - strings, input / output filenames.
opts_knit - string, options to pass to knit
opts_shunk - string, chunk options
options are passed verbatim to knitr:knit running in Rscript. |
def map_pixel_inv(row, col, cellx, celly, xmin, ymax):
'''
Usage:
map_pixel(xcoord, ycoord, x_cell_size, y_cell_size, xmin, ymax)
where:
xmin is leftmost X coordinate in system
ymax is topmost Y coordinate in system
Example:
raster = HMISea.tif
... | Usage:
map_pixel(xcoord, ycoord, x_cell_size, y_cell_size, xmin, ymax)
where:
xmin is leftmost X coordinate in system
ymax is topmost Y coordinate in system
Example:
raster = HMISea.tif
ndv, xsize, ysize, geot, projection, datatype = get_geo_info(rast... |
def close(self):
"""Close the stream."""
self.flush()
self.stream.close()
logging.StreamHandler.close(self) | Close the stream. |
def _proxy(self):
"""
Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: CountryContext for this CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.... | Generate an instance context for the instance, the context is capable of
performing various actions. All instance actions are proxied to the context
:returns: CountryContext for this CountryInstance
:rtype: twilio.rest.voice.v1.dialing_permissions.country.CountryContext |
def datashape_type_to_numpy(type_):
"""
Given a datashape type, return the associated numpy type. Maps
datashape's DateTime type to numpy's `datetime64[ns]` dtype, since the
numpy datetime returned by datashape isn't supported by pipeline.
Parameters
----------
type_: datashape.coretypes.Ty... | Given a datashape type, return the associated numpy type. Maps
datashape's DateTime type to numpy's `datetime64[ns]` dtype, since the
numpy datetime returned by datashape isn't supported by pipeline.
Parameters
----------
type_: datashape.coretypes.Type
The datashape type.
Returns
... |
def process_response(self, request, response):
"""Sets the cache, if needed."""
if not hasattr(request, '_cache_update_cache') or not request._cache_update_cache:
# We don't need to update the cache, just return.
return response
if request.method != 'GET':
# T... | Sets the cache, if needed. |
def plot_final(self, ax):
'''
Plots the final de-trended light curve.
'''
# Plot the light curve
bnmask = np.array(
list(set(np.concatenate([self.badmask, self.nanmask]))), dtype=int)
def M(x): return np.delete(x, bnmask)
if (self.cadence == 'lc') o... | Plots the final de-trended light curve. |
async def handle_json_response(responses):
"""
get the json data response
:param responses: the json response
:return the json data without 'root' node
"""
json_data = {}
if responses.status != 200:
err_msg = HttpProcessingError(code=responses.status,
... | get the json data response
:param responses: the json response
:return the json data without 'root' node |
def hide_routemap_holder_route_map_content_set_origin_origin_igp(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
hide_routemap_holder = ET.SubElement(config, "hide-routemap-holder", xmlns="urn:brocade.com:mgmt:brocade-ip-policy")
route_map = ET.SubElemen... | Auto Generated Code |
def set_kill_on_exit_mode(bKillOnExit = False):
"""
Defines the behavior of the debugged processes when the debugging
thread dies. This method only affects the calling thread.
Works on the following platforms:
- Microsoft Windows XP and above.
- Wine (Windows Emulator... | Defines the behavior of the debugged processes when the debugging
thread dies. This method only affects the calling thread.
Works on the following platforms:
- Microsoft Windows XP and above.
- Wine (Windows Emulator).
Fails on the following platforms:
- Microsoft ... |
def prepare(self, config_file=None, user=None, password=None, **kwargs):
"""登录的统一接口
:param config_file 登录数据文件,若无则选择参数登录模式
:param user: 各家券商的账号或者雪球的用户名
:param password: 密码, 券商为加密后的密码,雪球为明文密码
:param account: [雪球登录需要]雪球手机号(邮箱手机二选一)
:param portfolio_code: [雪球登录需要]组合代码
... | 登录的统一接口
:param config_file 登录数据文件,若无则选择参数登录模式
:param user: 各家券商的账号或者雪球的用户名
:param password: 密码, 券商为加密后的密码,雪球为明文密码
:param account: [雪球登录需要]雪球手机号(邮箱手机二选一)
:param portfolio_code: [雪球登录需要]组合代码
:param portfolio_market: [雪球登录需要]交易市场,
可选['cn', 'us', 'hk'] 默认 'cn' |
def query_parent_objects(self, context, query=None):
"""Return the objects of the same type from the parent object
:param query: Catalog query to narrow down the objects
:type query: dict
:returns: Content objects of the same portal type in the parent
"""
# return the o... | Return the objects of the same type from the parent object
:param query: Catalog query to narrow down the objects
:type query: dict
:returns: Content objects of the same portal type in the parent |
def clear_sonos_playlist(self, sonos_playlist, update_id=0):
"""Clear all tracks from a Sonos playlist.
This is a convenience method for :py:meth:`reorder_sonos_playlist`.
Example::
device.clear_sonos_playlist(sonos_playlist)
Args:
sonos_playlist
... | Clear all tracks from a Sonos playlist.
This is a convenience method for :py:meth:`reorder_sonos_playlist`.
Example::
device.clear_sonos_playlist(sonos_playlist)
Args:
sonos_playlist
(:py:class:`~.soco.data_structures.DidlPlaylistContainer`):
... |
def insertOntology(self, ontology):
"""
Inserts the specified ontology into this repository.
"""
try:
models.Ontology.create(
id=ontology.getName(),
name=ontology.getName(),
dataurl=ontology.getDataUrl(),
... | Inserts the specified ontology into this repository. |
def rotate_in_plane(chi, phase):
"""For transforming spins between the coprecessing and coorbital frames"""
v = chi.T
sp = np.sin(phase)
cp = np.cos(phase)
res = 1.*v
res[0] = v[0]*cp + v[1]*sp
res[1] = v[1]*cp - v[0]*sp
return res.T | For transforming spins between the coprecessing and coorbital frames |
def padRectEqually(rect, padding, bounds, clipExcess = True):
"""
Applies equal padding to all sides of a rectangle,
ensuring the padded rectangle falls within the specified bounds.
The input rectangle, bounds, and return value are all a tuple of (x,y,w,h).
"""
return padRect(rect, padding, padding, padding, pa... | Applies equal padding to all sides of a rectangle,
ensuring the padded rectangle falls within the specified bounds.
The input rectangle, bounds, and return value are all a tuple of (x,y,w,h). |
def _get_rh_methods(rh):
"""Yield all HTTP methods in ``rh`` that are decorated
with schema.validate"""
for k, v in vars(rh).items():
if all([
k in HTTP_METHODS,
is_method(v),
hasattr(v, "input_schema")
]):
yield (k, v) | Yield all HTTP methods in ``rh`` that are decorated
with schema.validate |
def do__relative_load(self, args: argparse.Namespace) -> None:
"""Run commands in script file that is encoded as either ASCII or UTF-8 text"""
file_path = args.file_path
# NOTE: Relative path is an absolute path, it is just relative to the current script directory
relative_path = os.path... | Run commands in script file that is encoded as either ASCII or UTF-8 text |
def data_check(data,target):
""" Checks data type
Parameters
----------
data : pd.DataFrame or np.array
Field to specify the time series data that will be used.
target : int or str
Target column
Returns
----------
transformed_data : np.array
Raw dat... | Checks data type
Parameters
----------
data : pd.DataFrame or np.array
Field to specify the time series data that will be used.
target : int or str
Target column
Returns
----------
transformed_data : np.array
Raw data array for use in the model
... |
def emit(self, record):
"""Store the message, not only the record."""
self.records.append(Record(levelno=record.levelno, levelname=record.levelname,
message=self.format(record)))
return super(SetupLogChecker, self).emit(record) | Store the message, not only the record. |
def extract_common(self, keys):
"""
Return a new segmentlistdict containing only those
segmentlists associated with the keys in keys, with each
set to their mutual intersection. The offsets are
preserved.
"""
keys = set(keys)
new = self.__class__()
intersection = self.intersection(keys)
for key in ... | Return a new segmentlistdict containing only those
segmentlists associated with the keys in keys, with each
set to their mutual intersection. The offsets are
preserved. |
def get_num_nodes(properties=None, hadoop_conf_dir=None, offline=False):
"""
Get the number of task trackers in the Hadoop cluster.
All arguments are passed to :func:`get_task_trackers`.
"""
return len(get_task_trackers(properties, hadoop_conf_dir, offline)) | Get the number of task trackers in the Hadoop cluster.
All arguments are passed to :func:`get_task_trackers`. |
def make_parser(defaults=None):
"""
:param defaults: Default option values
"""
if defaults is None:
defaults = DEFAULTS
ctypes = API.list_types()
ctypes_s = ", ".join(ctypes)
type_help = "Select type of %s config files from " + \
ctypes_s + " [Automatically detected by file ... | :param defaults: Default option values |
def load_notebook_node(notebook_path):
"""Returns a notebook object with papermill metadata loaded from the specified path.
Args:
notebook_path (str): Path to the notebook file.
Returns:
nbformat.NotebookNode
"""
nb = nbformat.reads(papermill_io.read(notebook_path), as_version=4)
... | Returns a notebook object with papermill metadata loaded from the specified path.
Args:
notebook_path (str): Path to the notebook file.
Returns:
nbformat.NotebookNode |
def _splitit(self, line, isheader):
"""Split each element of line to fit the column width
Each element is turned into a list, result of the wrapping of the
string to the desired width
"""
line_wrapped = []
for cell, width in zip(line, self._width):
array = [... | Split each element of line to fit the column width
Each element is turned into a list, result of the wrapping of the
string to the desired width |
def elbow_method(data, k_min, k_max, distance='euclidean'):
"""
Calculates and plots the plot of variance explained - number of clusters
Implementation reference: https://github.com/sarguido/k-means-clustering.rst
:param data: The dataset
:param k_min: lowerbound of the cluster ... | Calculates and plots the plot of variance explained - number of clusters
Implementation reference: https://github.com/sarguido/k-means-clustering.rst
:param data: The dataset
:param k_min: lowerbound of the cluster range
:param k_max: upperbound of the cluster range
:param dista... |
def index_resolver(index, strict=False):
"""Returns a function that accepts a value and returns index[value]."""
if strict:
return lambda id_: index[id_]
else:
return index.get | Returns a function that accepts a value and returns index[value]. |
def get_departures(self, station):
"""
Fetch the current departure times from this station
http://webservices.ns.nl/ns-api-avt?station=${Naam of afkorting Station}
@param station: station to lookup
"""
url = 'http://webservices.ns.nl/ns-api-avt?station=' + station
... | Fetch the current departure times from this station
http://webservices.ns.nl/ns-api-avt?station=${Naam of afkorting Station}
@param station: station to lookup |
def create(obj: PersistedObject, obj_type: Type[T], extensions_supported: Iterable[str]):
"""
Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
:param obj:
:param obj_type:
... | Helper method provided because we actually can't put that in the constructor, it creates a bug in Nose tests
https://github.com/nose-devs/nose/issues/725
:param obj:
:param obj_type:
:param extensions_supported:
:return: |
def get_subtask_fields(config_class):
"""Get all configurable subtask fields from a Config class.
Parameters
----------
config_class : ``lsst.pipe.base.Config``-type
The configuration class (not an instance) corresponding to a Task.
Returns
-------
subtask_fields : `dict`
M... | Get all configurable subtask fields from a Config class.
Parameters
----------
config_class : ``lsst.pipe.base.Config``-type
The configuration class (not an instance) corresponding to a Task.
Returns
-------
subtask_fields : `dict`
Mapping where keys are the config attribute na... |
def get_configuration_dict(self, secret_attrs=False):
"""Type-specific configuration for backward compatibility"""
cd = {'repo_nexml2json': self.repo_nexml2json,
'number_of_shards': len(self._shards),
'initialization': self._filepath_args,
'shards': [],
... | Type-specific configuration for backward compatibility |
def rows(self, offs):
'''
Iterate over raw indx, bytes tuples from a given offset.
'''
lkey = s_common.int64en(offs)
for lkey, byts in self.slab.scanByRange(lkey, db=self.db):
indx = s_common.int64un(lkey)
yield indx, byts | Iterate over raw indx, bytes tuples from a given offset. |
def rank_dated_files(pattern, dir, descending=True):
"""Search a directory for files that match a pattern. Return an ordered list of these files by filename.
Args:
pattern: The glob pattern to search for.
dir: Path to directory where the files will be searched for.
descending: Default T... | Search a directory for files that match a pattern. Return an ordered list of these files by filename.
Args:
pattern: The glob pattern to search for.
dir: Path to directory where the files will be searched for.
descending: Default True, will sort alphabetically by descending order.
Retu... |
def amplify_ground_shaking(T, vs30, gmvs):
"""
:param T: period
:param vs30: velocity
:param gmvs: ground motion values for the current site in units of g
"""
gmvs[gmvs > MAX_GMV] = MAX_GMV # accelerations > 5g are absurd
interpolator = interpolate.interp1d(
[0, 0.1, 0.2, 0.3, 0.4, ... | :param T: period
:param vs30: velocity
:param gmvs: ground motion values for the current site in units of g |
def find_equips(
self,
name,
iexact,
environment,
equip_type,
group,
ip,
pagination):
"""
Find vlans by all search parameters
:param name: Filter by vlan name column
:param iexact: Filter by ... | Find vlans by all search parameters
:param name: Filter by vlan name column
:param iexact: Filter by name will be exact?
:param environment: Filter by environment ID related
:param equip_type: Filter by equipment_type ID related
:param group: Filter by equipment group ID related... |
def main():
'''
Parse command line options and launch the interpreter
'''
parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path..]",
version=xtuml.version.complete_string,
formatter=optparse.TitledHelp... | Parse command line options and launch the interpreter |
def focal(self):
"""
Get the focal length in pixels for the camera.
Returns
------------
focal : (2,) float
Focal length in pixels
"""
if self._focal is None:
# calculate focal length from FOV
focal = [(px / 2.0) / np.tan(np.radi... | Get the focal length in pixels for the camera.
Returns
------------
focal : (2,) float
Focal length in pixels |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.