code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def people_findByEmail(email):
"""Returns User object."""
method = 'flickr.people.findByEmail'
data = _doget(method, find_email=email)
user = User(data.rsp.user.id, username=data.rsp.user.username.text)
return user | Returns User object. |
def _init_groups(self):
"""
初始化group数据
:return:
"""
for group_id, conf in self.group_conf.items():
self.parent_input_dict[group_id] = Queue(conf.get('input_max_size', 0))
self.parent_output_dict[group_id] = Queue(conf.get('output_max_size', 0)) | 初始化group数据
:return: |
def notch(self, frequency, type='iir', filtfilt=True, **kwargs):
"""Notch out a frequency in this `TimeSeries`.
Parameters
----------
frequency : `float`, `~astropy.units.Quantity`
frequency (default in Hertz) at which to apply the notch
type : `str`, optional
... | Notch out a frequency in this `TimeSeries`.
Parameters
----------
frequency : `float`, `~astropy.units.Quantity`
frequency (default in Hertz) at which to apply the notch
type : `str`, optional
type of filter to apply, currently only 'iir' is supported
*... |
def start(self, *_):
""" reading box configurations and starting timers to start/monitor/kill processes """
try:
box_configurations = self.bc_dao.run_query(QUERY_PROCESSES_FOR_BOX_ID(self.box_id))
for box_config in box_configurations:
handler = RepeatTimer(TRIGGE... | reading box configurations and starting timers to start/monitor/kill processes |
def isemptyfile(filepath):
"""Determine if the file both exists and isempty
Args:
filepath (str, path): file path
Returns:
bool
"""
exists = os.path.exists(safepath(filepath))
if exists:
filesize = os.path.getsize(safepath(filepath))
return filesize == 0
els... | Determine if the file both exists and isempty
Args:
filepath (str, path): file path
Returns:
bool |
def ensure(self, connection, func, *args, **kwargs):
"""Perform an operation until success
Repeats in the face of connection errors, pursuant to retry policy.
"""
channel = None
while 1:
try:
if channel is None:
channel = connectio... | Perform an operation until success
Repeats in the face of connection errors, pursuant to retry policy. |
def nn(self, x, k=1, eps=0, p=2, distance_upper_bound=np.inf):
"""
Query the tree for nearest neighbors
Parameters
----------
x : array_like, last dimension self.m
An array of points to query.
k : integer
The number of nearest neighbors to return.... | Query the tree for nearest neighbors
Parameters
----------
x : array_like, last dimension self.m
An array of points to query.
k : integer
The number of nearest neighbors to return.
eps : nonnegative float
Return approximate nearest neighbors; ... |
def compute_depth(self):
"""
Recursively computes true depth of the subtree. Should only
be needed for debugging. Unless something is wrong, the
depth field should reflect the correct depth of the subtree.
"""
left_depth = self.left_node.compute_depth() if self.left_node ... | Recursively computes true depth of the subtree. Should only
be needed for debugging. Unless something is wrong, the
depth field should reflect the correct depth of the subtree. |
def __geomToPointList(self, geom):
""" converts a geometry object to a common.Geometry object """
if arcpyFound and isinstance(geom, arcpy.Polyline):
feature_geom = []
fPart = []
wkt = None
wkid = None
for part in geom:
fPart = ... | converts a geometry object to a common.Geometry object |
def load_step_specifications(self, file_name, short=False,
dataset_number=None):
""" Load a table that contains step-type definitions.
This function loads a file containing a specification for each step or
for each (cycle_number, step_number) combinations if sho... | Load a table that contains step-type definitions.
This function loads a file containing a specification for each step or
for each (cycle_number, step_number) combinations if short==False. The
step_cycle specifications that are allowed are stored in the variable
cellreader.list_of_step_t... |
def configure(self, cnf={}, **kw):
"""
Configure resources of the widget.
To get the list of options for this widget, call the method :meth:`~TickScale.keys`.
See :meth:`~TickScale.__init__` for a description of the widget specific option.
"""
kw.update(cnf)
rein... | Configure resources of the widget.
To get the list of options for this widget, call the method :meth:`~TickScale.keys`.
See :meth:`~TickScale.__init__` for a description of the widget specific option. |
def last_valid_index(self):
"""Returns index of last non-NaN/NULL value.
Return:
Scalar of index name.
"""
def last_valid_index_builder(df):
df.index = pandas.RangeIndex(len(df.index))
return df.apply(lambda df: df.last_valid_index())
func =... | Returns index of last non-NaN/NULL value.
Return:
Scalar of index name. |
def open(self, verbose):
"""
open the serial port using the configuration data
returns a reference to this instance
"""
# open a serial port
if verbose:
print('\nOpening Arduino Serial port %s ' % self.port_id)
try:
# in case the port is ... | open the serial port using the configuration data
returns a reference to this instance |
def get_organisations(self, service_desk_id=None, start=0, limit=50):
"""
Returns a list of organizations in the Jira instance. If the user is not an agent,
the resource returns a list of organizations the user is a member of.
:param service_desk_id: OPTIONAL: str Get organizations from... | Returns a list of organizations in the Jira instance. If the user is not an agent,
the resource returns a list of organizations the user is a member of.
:param service_desk_id: OPTIONAL: str Get organizations from single Service Desk
:param start: OPTIONAL: int The starting index of the returne... |
def _build_likelihood(self):
"""
This function computes the optimal density for v, q*(v), up to a constant
"""
# get the (marginals of) q(f): exactly predicting!
fmean, fvar = self._build_predict(self.X, full_cov=False)
return tf.reduce_sum(self.likelihood.variational_exp... | This function computes the optimal density for v, q*(v), up to a constant |
def from_desmond(cls, path, **kwargs):
"""
Loads a topology from a Desmond DMS file located at `path`.
Arguments
---------
path : str
Path to a Desmond DMS file
"""
dms = DesmondDMSFile(path)
pos = kwargs.pop('positions', dms.getPositions())
... | Loads a topology from a Desmond DMS file located at `path`.
Arguments
---------
path : str
Path to a Desmond DMS file |
def from_packages(cls, parse_context, rev='', packages=None, **kwargs):
"""
:param list packages: The package import paths within the remote library; by default just the
root package will be available (equivalent to passing `packages=['']`).
:param string rev: Identifies which vers... | :param list packages: The package import paths within the remote library; by default just the
root package will be available (equivalent to passing `packages=['']`).
:param string rev: Identifies which version of the remote library to download. This could be a
commit... |
def _should_really_index(self, instance):
"""Return True if according to should_index the object should be indexed."""
if self._should_index_is_method:
is_method = inspect.ismethod(self.should_index)
try:
count_args = len(inspect.signature(self.should_index).param... | Return True if according to should_index the object should be indexed. |
def get_metrics(self, from_time=None, to_time=None, metrics=None,
ifs=[], storageIds=[], view=None):
"""
This endpoint is not supported as of v6. Use the timeseries API
instead. To get all metrics for a host with the timeseries API use
the query:
'select * where hostId = $HOST_ID'.
To ge... | This endpoint is not supported as of v6. Use the timeseries API
instead. To get all metrics for a host with the timeseries API use
the query:
'select * where hostId = $HOST_ID'.
To get specific metrics for a host use a comma-separated list of
the metric names as follows:
'select $METRIC_NAME1... |
def interleaved_filename(file_path):
"""Return filename used to represent a set of paired-end files. Assumes Illumina-style naming
conventions where each file has _R1_ or _R2_ in its name."""
if not isinstance(file_path, tuple):
raise OneCodexException("Cannot get the interleaved filename without a ... | Return filename used to represent a set of paired-end files. Assumes Illumina-style naming
conventions where each file has _R1_ or _R2_ in its name. |
def _which(executable, flags=os.X_OK, abspath_only=False, disallow_symlinks=False):
"""Borrowed from Twisted's :mod:twisted.python.proutils .
Search PATH for executable files with the given name.
On newer versions of MS-Windows, the PATHEXT environment variable will be
set to the list of file extensio... | Borrowed from Twisted's :mod:twisted.python.proutils .
Search PATH for executable files with the given name.
On newer versions of MS-Windows, the PATHEXT environment variable will be
set to the list of file extensions for files considered executable. This
will normally include things like ".EXE". This... |
def start_commit(self, repo_name, branch=None, parent=None, description=None):
"""
Begins the process of committing data to a Repo. Once started you can
write to the Commit with PutFile and when all the data has been
written you must finish the Commit with FinishCommit. NOTE, data is
... | Begins the process of committing data to a Repo. Once started you can
write to the Commit with PutFile and when all the data has been
written you must finish the Commit with FinishCommit. NOTE, data is
not persisted until FinishCommit is called. A Commit object is
returned.
Para... |
def get_windzone(conn, geometry):
'Find windzone from map.'
# TODO@Günni
if geometry.geom_type in ['Polygon', 'MultiPolygon']:
coords = geometry.centroid
else:
coords = geometry
sql = """
SELECT zone FROM oemof_test.windzones
WHERE st_contains(geom, ST_PointFromText('... | Find windzone from map. |
def res_block(nf, dense:bool=False, norm_type:Optional[NormType]=NormType.Batch, bottle:bool=False, **conv_kwargs):
"Resnet block of `nf` features. `conv_kwargs` are passed to `conv_layer`."
norm2 = norm_type
if not dense and (norm_type==NormType.Batch): norm2 = NormType.BatchZero
nf_inner = nf//2 if bo... | Resnet block of `nf` features. `conv_kwargs` are passed to `conv_layer`. |
def get_tour_list(self):
"""
Inquire all tour list
:rtype: list
"""
resp = json.loads(urlopen(self.tour_list_url.format(1)).read().decode('utf-8'))
total_count = resp['response']['body']['totalCount']
# Get total count
resp = json.loads(urlopen(self.tour... | Inquire all tour list
:rtype: list |
def fill_tree(self, tree, input_dict):
"""
fills a tree with nested parameters
Args:
tree: QtGui.QTreeView
parameters: dictionary or Parameter object
Returns:
"""
def add_element(item, key, value):
child_name = QtGui.QStandardItem(ke... | fills a tree with nested parameters
Args:
tree: QtGui.QTreeView
parameters: dictionary or Parameter object
Returns: |
def from_string(cls, cl_function, dependencies=()):
"""Parse the given CL function into a SimpleCLFunction object.
Args:
cl_function (str): the function we wish to turn into an object
dependencies (list or tuple of CLLibrary): The list of CL libraries this function depends on
... | Parse the given CL function into a SimpleCLFunction object.
Args:
cl_function (str): the function we wish to turn into an object
dependencies (list or tuple of CLLibrary): The list of CL libraries this function depends on
Returns:
SimpleCLFunction: the CL data type ... |
def setStimRisefall(self):
"""Sets the Risefall of the StimulusModel's tone from values pulled from
this widget"""
rf = self.ui.risefallSpnbx.value()
self.tone.setRisefall(rf) | Sets the Risefall of the StimulusModel's tone from values pulled from
this widget |
def log_level(self, subsystem, level, **kwargs):
r"""Changes the logging output of a running daemon.
.. code-block:: python
>>> c.log_level("path", "info")
{'Message': "Changed log level of 'path' to 'info'\n"}
Parameters
----------
subsystem : str
... | r"""Changes the logging output of a running daemon.
.. code-block:: python
>>> c.log_level("path", "info")
{'Message': "Changed log level of 'path' to 'info'\n"}
Parameters
----------
subsystem : str
The subsystem logging identifier (Use ``"all"`` f... |
def process_view(self, request, view_func, view_args, view_kwargs):
"""Run the profiler on _view_func_."""
profiler = getattr(request, 'profiler', None)
if profiler:
# Make sure profiler variables don't get passed into view_func
original_get = request.GET
requ... | Run the profiler on _view_func_. |
def _wrapped_method_with_watch_fn(self, f, *args, **kwargs):
"""A wrapped method with a watch function.
When this method is called, it will call the underlying method with
the same arguments, *except* that if the ``watch`` argument isn't
:data:`None`, it will be replaced with a wrapper ... | A wrapped method with a watch function.
When this method is called, it will call the underlying method with
the same arguments, *except* that if the ``watch`` argument isn't
:data:`None`, it will be replaced with a wrapper around that watch
function, so that the watch function will be c... |
def get_updates(self, offset=None, limit=None, timeout=20, allowed_updates=None):
"""
Use this method to receive incoming updates using long polling (wiki). An Array of Update objects is returned.
:param allowed_updates: Array of string. List the types of updates you want your bot to receive.
... | Use this method to receive incoming updates using long polling (wiki). An Array of Update objects is returned.
:param allowed_updates: Array of string. List the types of updates you want your bot to receive.
:param offset: Integer. Identifier of the first update to be returned.
:param limit: Int... |
def maybe_start_recording(tokens, index):
"""Return a new _RSTCommentBlockRecorder when its time to record."""
if tokens[index].type == TokenType.BeginRSTComment:
return _RSTCommentBlockRecorder(index, tokens[index].line)
return None | Return a new _RSTCommentBlockRecorder when its time to record. |
def _set_lim_and_transforms(self):
"""Setup the key transforms for the axes."""
# Most of the transforms are set up correctly by LambertAxes
LambertAxes._set_lim_and_transforms(self)
# Transform for latitude ticks. These are typically unused, but just
# in case we need them...
... | Setup the key transforms for the axes. |
def main(args=None):
"""Main command-line interface entrypoint.
Runs the given subcommand or argument that were specified. If not given a
``args`` parameter, assumes the arguments are passed on the command-line.
Args:
args (list): list of command-line arguments
Returns:
Zero on succe... | Main command-line interface entrypoint.
Runs the given subcommand or argument that were specified. If not given a
``args`` parameter, assumes the arguments are passed on the command-line.
Args:
args (list): list of command-line arguments
Returns:
Zero on success, non-zero otherwise. |
def disable_beacons(self):
'''
Enable beacons
'''
self.opts['beacons']['enabled'] = False
# Fire the complete event back along with updated list of beacons
evt = salt.utils.event.get_event('minion', opts=self.opts)
evt.fire_event({'complete': True, 'beacons': se... | Enable beacons |
def check(text):
"""Check the text."""
err = "uncomparables.misc"
msg = "Comparison of an uncomparable: '{}' is not comparable."
comparators = [
"most",
"more",
"less",
"least",
"very",
"quite",
"largely",
"extremely",
"increasingl... | Check the text. |
def _get_command(classes):
"""Associates each command class with command depending on setup.cfg
"""
commands = {}
setup_file = os.path.join(
os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')),
'setup.cfg')
for line in open(setup_file, 'r'):
for cl in classes:
... | Associates each command class with command depending on setup.cfg |
def groups_remove_owner(self, room_id, user_id, **kwargs):
"""Removes the role of owner from a user in the current Group."""
return self.__call_api_post('groups.removeOwner', roomId=room_id, userId=user_id, kwargs=kwargs) | Removes the role of owner from a user in the current Group. |
def _create_input_transactions(self, addy):
# type: (Address) -> None
"""
Creates transactions for the specified input address.
"""
self._transactions.append(ProposedTransaction(
address=addy,
tag=self.tag,
# Spend the entire address balance; ... | Creates transactions for the specified input address. |
def update_one(self, filter_, document, **kwargs):
"""update method
"""
self._valide_update_document(document)
return self.__collect.update_one(filter_, document, **kwargs) | update method |
def get_pret_embs(self, word_dims=None):
"""Read pre-trained embedding file
Parameters
----------
word_dims : int or None
vector size. Use `None` for auto-infer
Returns
-------
numpy.ndarray
T x C numpy NDArray
"""
assert (... | Read pre-trained embedding file
Parameters
----------
word_dims : int or None
vector size. Use `None` for auto-infer
Returns
-------
numpy.ndarray
T x C numpy NDArray |
def select_limit(self, table, cols='*', offset=0, limit=MAX_ROWS_PER_QUERY):
"""Run a select query with an offset and limit parameter."""
return self.fetch(self._select_limit_statement(table, cols, offset, limit)) | Run a select query with an offset and limit parameter. |
def calc_uniform_lim_glorot(inmaps, outmaps, kernel=(1, 1)):
r"""Calculates the lower bound and the upper bound of the uniform distribution proposed by Glorot et al.
.. math::
b &= \sqrt{\frac{6}{NK + M}}\\
a &= -b
Args:
inmaps (int): Map size of an input Variable, :math:`N`.
... | r"""Calculates the lower bound and the upper bound of the uniform distribution proposed by Glorot et al.
.. math::
b &= \sqrt{\frac{6}{NK + M}}\\
a &= -b
Args:
inmaps (int): Map size of an input Variable, :math:`N`.
outmaps (int): Map size of an output Variable, :math:`M`.
... |
def get_types(self):
"""
Retrieve a set of all recognized content types for this
translator object.
"""
# Convert translators into a set of content types
content_types = set()
for name in self.translators:
content_types |= type_names[name]
re... | Retrieve a set of all recognized content types for this
translator object. |
def coerce_value(type, value):
# type: (Any, Any) -> Union[List, Dict, int, float, bool, str, None]
"""Given a type and any value, return a runtime value coerced to match the type."""
if isinstance(type, GraphQLNonNull):
# Note: we're not checking that the result of coerceValue is
# non-null... | Given a type and any value, return a runtime value coerced to match the type. |
def _converged(self):
"""Check convergence based on maximum absolute difference
Returns
-------
converged : boolean
Whether the parameter estimation converged.
max_diff : float
Maximum absolute difference between prior and posterior.
"""
... | Check convergence based on maximum absolute difference
Returns
-------
converged : boolean
Whether the parameter estimation converged.
max_diff : float
Maximum absolute difference between prior and posterior. |
def getKwCtrlConf(self, kw, fmt='dict'):
""" return keyword's control configuration, followed after '!epics' notation
:param kw: keyword name
:param fmt: return format, 'raw', 'dict', 'json', default is 'dict'
"""
try:
confd = self.ctrlconf_dict[kw]
if fmt... | return keyword's control configuration, followed after '!epics' notation
:param kw: keyword name
:param fmt: return format, 'raw', 'dict', 'json', default is 'dict' |
def set_delegate(address=None, pubkey=None, secret=None):
"""Set delegate parameters. Call set_delegate with no arguments to clear."""
c.DELEGATE['ADDRESS'] = address
c.DELEGATE['PUBKEY'] = pubkey
c.DELEGATE['PASSPHRASE'] = secret | Set delegate parameters. Call set_delegate with no arguments to clear. |
def save(self, data, xparent=None):
"""
Parses the element from XML to Python.
:param data | <variant>
xparent | <xml.etree.ElementTree.Element> || None
:return <xml.etree.ElementTree.Element>
"""
if xparent is not None:
... | Parses the element from XML to Python.
:param data | <variant>
xparent | <xml.etree.ElementTree.Element> || None
:return <xml.etree.ElementTree.Element> |
def add_data(self, address, data):
"""! @brief Add a chunk of data to be programmed.
The data may cross flash memory region boundaries, as long as the regions are contiguous.
@param self
@param address Integer address for where the first byte of _data_ should be written... | ! @brief Add a chunk of data to be programmed.
The data may cross flash memory region boundaries, as long as the regions are contiguous.
@param self
@param address Integer address for where the first byte of _data_ should be written.
@param data A list of byte values to... |
def add_graph(self, graph):
"""Adds a `Graph` protocol buffer to the event file."""
event = event_pb2.Event(graph_def=graph.SerializeToString())
self._add_event(event, None) | Adds a `Graph` protocol buffer to the event file. |
def tryload(self, cfgstr=None, on_error='raise'):
"""
Like load, but returns None if the load fails due to a cache miss.
Args:
on_error (str): How to handle non-io errors errors. Either raise,
which re-raises the exception, or clear which deletes the cache
... | Like load, but returns None if the load fails due to a cache miss.
Args:
on_error (str): How to handle non-io errors errors. Either raise,
which re-raises the exception, or clear which deletes the cache
and returns None. |
def delete_namespaced_endpoints(self, name, namespace, **kwargs): # noqa: E501
"""delete_namespaced_endpoints # noqa: E501
delete Endpoints # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>... | delete_namespaced_endpoints # noqa: E501
delete Endpoints # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_endpoints(name, namespace, async_req=True)
>>> re... |
def add_line(self, line, source, *lineno):
"""Add a line to the result"""
self.result.append(line, source, *lineno) | Add a line to the result |
def skipgram_fasttext_batch(centers, contexts, num_tokens, subword_lookup,
dtype, index_dtype):
"""Create a batch for SG training objective with subwords."""
contexts = mx.nd.array(contexts[2], dtype=index_dtype)
data, row, col = subword_lookup(centers)
centers = mx.nd.array(... | Create a batch for SG training objective with subwords. |
def Nu_vertical_cylinder_Eigenson_Morgan(Pr, Gr, turbulent=None):
r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by [2]_,
presented in [3]_ and in more detail in [4]_.
.. math::
Nu_H = 0.48 Ra_H^{0.25},\; 10... | r'''Calculates Nusselt number for natural convection around a vertical
isothermal cylinder according to the results of [1]_ correlated by [2]_,
presented in [3]_ and in more detail in [4]_.
.. math::
Nu_H = 0.48 Ra_H^{0.25},\; 10^{9} < Ra
Nu_H = 51.5 + 0.0000726 Ra_H^{0.63},\; 10^{9} < Ra ... |
def from_table(table, engine, limit=None):
"""
Select data in a database table and put into prettytable.
Create a :class:`prettytable.PrettyTable` from :class:`sqlalchemy.Table`.
**中文文档**
将数据表中的数据放入prettytable中.
"""
sql = select([table])
if limit is not None:
sql = sql.limit(l... | Select data in a database table and put into prettytable.
Create a :class:`prettytable.PrettyTable` from :class:`sqlalchemy.Table`.
**中文文档**
将数据表中的数据放入prettytable中. |
def _get_converter(self, convert_to=None):
'''see convert and save. This is a helper function that returns
the proper conversion function, but doesn't call it. We do this
so that in the case of convert, we do the conversion and return
a string. In the case of save, we save the ... | see convert and save. This is a helper function that returns
the proper conversion function, but doesn't call it. We do this
so that in the case of convert, we do the conversion and return
a string. In the case of save, we save the recipe to file for the
user.
P... |
def get_object(self):
"""
Get the object for previewing.
Raises a http404 error if the object is not found.
"""
obj = super(DeleteView, self).get_object()
if not obj:
raise http.Http404
return obj | Get the object for previewing.
Raises a http404 error if the object is not found. |
def partition(self):
"""Partitions all tasks into groups of tasks. A group is
represented by a task_store object that indexes a sub-
set of tasks."""
step = int(math.ceil(self.num_tasks / float(self.partitions)))
if self.indices == None:
slice_ind = list(range(0... | Partitions all tasks into groups of tasks. A group is
represented by a task_store object that indexes a sub-
set of tasks. |
async def _connect_polling(self, url, headers, engineio_path):
"""Establish a long-polling connection to the Engine.IO server."""
if aiohttp is None: # pragma: no cover
self.logger.error('aiohttp not installed -- cannot make HTTP '
'requests!')
retu... | Establish a long-polling connection to the Engine.IO server. |
async def handle_client_ping(self, client_addr, _: Ping):
""" Handle an Ping message. Pong the client """
await ZMQUtils.send_with_addr(self._client_socket, client_addr, Pong()) | Handle an Ping message. Pong the client |
def get_help_datapacks(module_name, server_prefix):
"""
Get the help datapacks for a module
Args:
module_name (str): The module to get help data for
server_prefix (str): The command prefix for this server
Returns:
datapacks (list): The help datapacks for the module
"""
... | Get the help datapacks for a module
Args:
module_name (str): The module to get help data for
server_prefix (str): The command prefix for this server
Returns:
datapacks (list): The help datapacks for the module |
def walk(self, dispatcher, node):
"""
Walk through the node with a custom dispatcher for extraction of
details that are required.
"""
deferrable_handlers = {
Declare: self.declare,
Resolve: self.register_reference,
}
layout_handlers = {
... | Walk through the node with a custom dispatcher for extraction of
details that are required. |
def getLipdNames(D=None):
"""
Get a list of all LiPD names in the library
| Example
| names = lipd.getLipdNames(D)
:return list f_list: File list
"""
_names = []
try:
if not D:
print("Error: LiPD data not provided. Pass LiPD data into the function.")
else:
... | Get a list of all LiPD names in the library
| Example
| names = lipd.getLipdNames(D)
:return list f_list: File list |
def _get_distance_scaling(self, C, mag, rhypo):
"""
Returns the distance scalig term
"""
return (C["a3"] * np.log(rhypo)) + (C["a4"] + C["a5"] * mag) * rhypo | Returns the distance scalig term |
def from_rgb(r, g=None, b=None):
"""
Return the nearest xterm 256 color code from rgb input.
"""
c = r if isinstance(r, list) else [r, g, b]
best = {}
for index, item in enumerate(colors):
d = __distance(item, c)
if(not best or d <= best['distance']):
best = {'distan... | Return the nearest xterm 256 color code from rgb input. |
def draw_address(canvas):
""" Draws the business address """
business_details = (
u'COMPANY NAME LTD',
u'STREET',
u'TOWN',
U'COUNTY',
U'POSTCODE',
U'COUNTRY',
u'',
u'',
u'Phone: +00 (0) 000 000 000',
u'Email: example@example.com',
... | Draws the business address |
def list_objects(self, query=None, limit=-1, offset=-1):
"""List of all objects in the database. Optinal parameter limit and
offset for pagination. A dictionary of key,value-pairs can be given as
addictional query condition for document properties.
Parameters
----------
... | List of all objects in the database. Optinal parameter limit and
offset for pagination. A dictionary of key,value-pairs can be given as
addictional query condition for document properties.
Parameters
----------
query : Dictionary
Filter objects by property-value pair... |
def get_info(pyfile):
'''Retrieve dunder values from a pyfile'''
info = {}
info_re = re.compile(r"^__(\w+)__ = ['\"](.*)['\"]")
with open(pyfile, 'r') as f:
for line in f.readlines():
match = info_re.search(line)
if match:
info[match.group(1)] = match.grou... | Retrieve dunder values from a pyfile |
def render_html(root, options=0, extensions=None):
"""Render a given syntax tree as HTML.
Args:
root (Any): The reference to the root node of the syntax tree.
options (int): The cmark options.
extensions (Any): The reference to the syntax extensions, generally
from :f... | Render a given syntax tree as HTML.
Args:
root (Any): The reference to the root node of the syntax tree.
options (int): The cmark options.
extensions (Any): The reference to the syntax extensions, generally
from :func:`parser_get_syntax_extensions`
Returns:
... |
def view(self, rec):
'''
View the page.
'''
kwd = {
'pager': '',
}
self.render('wiki_page/page_view.html',
postinfo=rec,
kwd=kwd,
author=rec.user_name,
format_date=tools.format_da... | View the page. |
def set_env_info(self, env_state=None, env_id=None, episode_id=None, bump_past=None, fps=None):
"""Atomically set the environment state tracking variables.
"""
with self.cv:
if env_id is None:
env_id = self._env_id
if env_state is None:
env... | Atomically set the environment state tracking variables. |
def __build_config_block(self, config_block_node):
"""parse `config_block` in each section
Args:
config_block_node (TreeNode): Description
Returns:
[line_node1, line_node2, ...]
"""
node_lists = []
for line_node in config_block_node:
... | parse `config_block` in each section
Args:
config_block_node (TreeNode): Description
Returns:
[line_node1, line_node2, ...] |
def save(self, items):
'''
Save a series of items to a sequence.
Args:
items (tuple): The series of items to save into the sequence.
Returns:
The index of the first item
'''
rows = []
indx = self.indx
size = 0
tick = s_co... | Save a series of items to a sequence.
Args:
items (tuple): The series of items to save into the sequence.
Returns:
The index of the first item |
def get_mesh_dict(self):
"""Returns calculated mesh sampling phonons
Returns
-------
dict
keys: qpoints, weights, frequencies, eigenvectors, and
group_velocities
Each value for the corresponding key is explained as below.
qpoints: ... | Returns calculated mesh sampling phonons
Returns
-------
dict
keys: qpoints, weights, frequencies, eigenvectors, and
group_velocities
Each value for the corresponding key is explained as below.
qpoints: ndarray
q-points in ... |
def logsumexp(arr, axis=0):
"""Computes the sum of arr assuming arr is in the log domain.
Returns log(sum(exp(arr))) while minimizing the possibility of
over/underflow.
Examples
--------
>>> import numpy as np
>>> from sklearn.utils.extmath import logsumexp
>>> a = np.arange(10)
>>> ... | Computes the sum of arr assuming arr is in the log domain.
Returns log(sum(exp(arr))) while minimizing the possibility of
over/underflow.
Examples
--------
>>> import numpy as np
>>> from sklearn.utils.extmath import logsumexp
>>> a = np.arange(10)
>>> np.log(np.sum(np.exp(a)))
9.458... |
def execute(self, input_data):
''' This worker computes meta data for any file type. '''
raw_bytes = input_data['sample']['raw_bytes']
self.meta['md5'] = hashlib.md5(raw_bytes).hexdigest()
self.meta['tags'] = input_data['tags']['tags']
self.meta['type_tag'] = input_data['sample']... | This worker computes meta data for any file type. |
def start_session_if_none(self):
"""
Starts a session it is not yet initialized.
"""
if not (self._screen_id and self._session):
self.update_screen_id()
self._session = YouTubeSession(screen_id=self._screen_id) | Starts a session it is not yet initialized. |
def s3_write(self, log, remote_log_location, append=True):
"""
Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote... | Writes the log to the remote_log_location. Fails silently if no hook
was created.
:param log: the log to write to the remote_log_location
:type log: str
:param remote_log_location: the log's location in remote storage
:type remote_log_location: str (path)
:param append: i... |
def recommend_delete(self, num_iid, session):
'''taobao.item.recommend.delete 取消橱窗推荐一个商品
取消当前用户指定商品的橱窗推荐状态 这个Item所属卖家从传入的session中获取,需要session绑定'''
request = TOPRequest('taobao.item.recommend.delete')
request['num_iid'] = num_iid
self.create(self.execute(request, session)... | taobao.item.recommend.delete 取消橱窗推荐一个商品
取消当前用户指定商品的橱窗推荐状态 这个Item所属卖家从传入的session中获取,需要session绑定 |
def config(name='EMAIL_URL', default='console://'):
"""Returns a dictionary with EMAIL_* settings from EMAIL_URL."""
conf = {}
s = env(name, default)
if s:
conf = parse_email_url(s)
return conf | Returns a dictionary with EMAIL_* settings from EMAIL_URL. |
def from_char(
cls, char, name=None, width=None, fill_char=None,
bounce=False, reverse=False, back_char=None, wrapper=None):
""" Create progress bar frames from a "moving" character.
The frames simulate movement of the character, from left to
right through empty s... | Create progress bar frames from a "moving" character.
The frames simulate movement of the character, from left to
right through empty space (`fill_char`).
Arguments:
char : Character to move across the bar.
name : Name for the new ... |
def _wkt(eivals, timescales, normalization, normalized_laplacian):
"""
Computes wave kernel trace from given eigenvalues, timescales, and normalization.
For precise definition, please refer to "NetLSD: Hearing the Shape of a Graph" by A. Tsitsulin, D. Mottin, P. Karras, A. Bronstein, E. Müller. Published a... | Computes wave kernel trace from given eigenvalues, timescales, and normalization.
For precise definition, please refer to "NetLSD: Hearing the Shape of a Graph" by A. Tsitsulin, D. Mottin, P. Karras, A. Bronstein, E. Müller. Published at KDD'18.
Parameters
----------
eivals : numpy.ndarray
... |
def normalize_locale(loc):
'''
Format a locale specifier according to the format returned by `locale -a`.
'''
comps = split_locale(loc)
comps['territory'] = comps['territory'].upper()
comps['codeset'] = comps['codeset'].lower().replace('-', '')
comps['charmap'] = ''
return join_locale(co... | Format a locale specifier according to the format returned by `locale -a`. |
def _GetStatus(self):
"""Retrieves status information.
Returns:
dict[str, object]: status attributes, indexed by name.
"""
if self._parser_mediator:
number_of_produced_events = (
self._parser_mediator.number_of_produced_events)
number_of_produced_sources = (
self._... | Retrieves status information.
Returns:
dict[str, object]: status attributes, indexed by name. |
def _spectrogram(y=None, S=None, n_fft=2048, hop_length=512, power=1,
win_length=None, window='hann', center=True, pad_mode='reflect'):
'''Helper function to retrieve a magnitude spectrogram.
This is primarily used in feature extraction functions that can operate on
either audio time-serie... | Helper function to retrieve a magnitude spectrogram.
This is primarily used in feature extraction functions that can operate on
either audio time-series or spectrogram input.
Parameters
----------
y : None or np.ndarray [ndim=1]
If provided, an audio time series
S : None or np.ndarra... |
def checkKey(self, credentials):
"""
Retrieve the keys of the user specified by the credentials, and check
if one matches the blob in the credentials.
"""
filename = self._keyfile
if not os.path.exists(filename):
return 0
lines = open(filename).xreadli... | Retrieve the keys of the user specified by the credentials, and check
if one matches the blob in the credentials. |
def perp(weights):
r"""Calculate the normalized perplexity :math:`\mathcal{P}` of samples
with ``weights`` :math:`\omega_i`. :math:`\mathcal{P}=0` is
terrible and :math:`\mathcal{P}=1` is perfect.
.. math::
\mathcal{P} = exp(H) / N
where
.. math::
H = - \sum_{i=1}^N \bar{\om... | r"""Calculate the normalized perplexity :math:`\mathcal{P}` of samples
with ``weights`` :math:`\omega_i`. :math:`\mathcal{P}=0` is
terrible and :math:`\mathcal{P}=1` is perfect.
.. math::
\mathcal{P} = exp(H) / N
where
.. math::
H = - \sum_{i=1}^N \bar{\omega}_i log ~ \bar{\omeg... |
def print_splits(cliques, next_cliques):
"""Print shifts for new forks."""
splits = 0
for i, clique in enumerate(cliques):
parent, _ = clique
# If this fork continues
if parent in next_cliques:
# If there is a new fork, print a split
if len(next_cliques[paren... | Print shifts for new forks. |
def read_ipx(self, length):
"""Read Internetwork Packet Exchange.
Structure of IPX header [RFC 1132]:
Octets Bits Name Description
0 0 ipx.cksum Checksum
2 16 ipx.len Packet L... | Read Internetwork Packet Exchange.
Structure of IPX header [RFC 1132]:
Octets Bits Name Description
0 0 ipx.cksum Checksum
2 16 ipx.len Packet Length (header includes)
4... |
def maintenance_center(self, storage_disk_xml=None):
""" Collector for how many disk(s) are in NetApp maintenance center
For more information on maintenance center please see:
bit.ly/19G4ptr
"""
disk_in_maintenance = 0
for filer_disk in storage_disk_xml:
... | Collector for how many disk(s) are in NetApp maintenance center
For more information on maintenance center please see:
bit.ly/19G4ptr |
def build_template(
initial_template=None,
image_list=None,
iterations = 3,
gradient_step = 0.2,
**kwargs ):
"""
Estimate an optimal template from an input image_list
ANTsR function: N/A
Arguments
---------
initial_template : ANTsImage
initialization for the templat... | Estimate an optimal template from an input image_list
ANTsR function: N/A
Arguments
---------
initial_template : ANTsImage
initialization for the template building
image_list : ANTsImages
images from which to estimate template
iterations : integer
number of template b... |
def create_server_app(provider, password=None, cache=True, cache_timeout=3600,
debug=False):
"""
Create a DAAP server, based around a Flask application. The server requires
a content provider, server name and optionally, a password. The content
provider should return raw object dat... | Create a DAAP server, based around a Flask application. The server requires
a content provider, server name and optionally, a password. The content
provider should return raw object data.
Object responses can be cached. This may dramatically speed up connections
for multiple clients. However, this is o... |
def drive_rotational_speed_rpm(self):
"""Gets set of rotational speed of the disks"""
drv_rot_speed_rpm = set()
for member in self._drives_list():
if member.rotation_speed_rpm is not None:
drv_rot_speed_rpm.add(member.rotation_speed_rpm)
return drv_rot_speed_... | Gets set of rotational speed of the disks |
def start(self):
"""Start the background process."""
self._lc = LoopingCall(self._download)
# Run immediately, and then every 30 seconds:
self._lc.start(30, now=True) | Start the background process. |
def sanity(request, sysmeta_pyxb):
"""Check that sysmeta_pyxb is suitable for creating a new object and matches the
uploaded sciobj bytes."""
_does_not_contain_replica_sections(sysmeta_pyxb)
_is_not_archived(sysmeta_pyxb)
_obsoleted_by_not_specified(sysmeta_pyxb)
if 'HTTP_VENDOR_GMN_REMOTE_URL' ... | Check that sysmeta_pyxb is suitable for creating a new object and matches the
uploaded sciobj bytes. |
def _specialKeyEvent(key, upDown):
""" Helper method for special keys.
Source: http://stackoverflow.com/questions/11045814/emulate-media-key-press-on-mac
"""
assert upDown in ('up', 'down'), "upDown argument must be 'up' or 'down'"
key_code = special_key_translate_table[key]
ev = AppKit.NSEve... | Helper method for special keys.
Source: http://stackoverflow.com/questions/11045814/emulate-media-key-press-on-mac |
def run(self):
""" Filter job callback.
"""
from pyrocore import config
try:
config.engine.open()
# TODO: select view into items
items = []
self.run_filter(items)
except (error.LoggableError, xmlrpc.ERRORS) as exc:
self... | Filter job callback. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.