code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def markdown(iterable, renderer=HTMLRenderer):
"""
Output HTML with default settings.
Enables inline and block-level HTML tags.
"""
with renderer() as renderer:
return renderer.render(Document(iterable)) | Output HTML with default settings.
Enables inline and block-level HTML tags. |
def push_account_task(obj_id):
"""
Async: push_account_task.delay(Account.id)
"""
lock_id = "%s-push-account-%s" % (settings.ENV_PREFIX, obj_id)
acquire_lock = lambda: cache.add(lock_id, "true", LOCK_EXPIRE) # noqa: E731
release_lock = lambda: cache.delete(lock_id) # noqa: E731
if acquire_... | Async: push_account_task.delay(Account.id) |
def get_first():
"""
return first droplet
"""
client = po.connect() # this depends on the DIGITALOCEAN_API_KEY envvar
all_droplets = client.droplets.list()
id = all_droplets[0]['id'] # I'm cheating because I only have one droplet
return client.droplets.get(id) | return first droplet |
def _get_hosted_zone_limit(self, limit_type, hosted_zone_id):
"""
Return a hosted zone limit [recordsets|vpc_associations]
:rtype: dict
"""
result = self.conn.get_hosted_zone_limit(
Type=limit_type,
HostedZoneId=hosted_zone_id
)
return r... | Return a hosted zone limit [recordsets|vpc_associations]
:rtype: dict |
def fetch_token(self, client_secret, code, context, scope, redirect_uri,
token_url='https://login.bigcommerce.com/oauth2/token'):
"""
Fetches a token from given token_url, using given parameters, and sets up session headers for
future requests.
redirect_uri should be ... | Fetches a token from given token_url, using given parameters, and sets up session headers for
future requests.
redirect_uri should be the same as your callback URL.
code, context, and scope should be passed as parameters to your callback URL on app installation.
Raises HttpException on ... |
def search(self, fields=None, query=None, filters=None):
"""Search for entities.
At its simplest, this method searches for all entities of a given kind.
For example, to ask for all
:class:`nailgun.entities.LifecycleEnvironment` entities::
LifecycleEnvironment().search()
... | Search for entities.
At its simplest, this method searches for all entities of a given kind.
For example, to ask for all
:class:`nailgun.entities.LifecycleEnvironment` entities::
LifecycleEnvironment().search()
Values on an entity are used to generate a search query, and t... |
def get_columns(self, font):
""" Return the number of columns for the given font.
"""
font = self.get_font(font)
return self.fonts[six.text_type(font)]['columns'] | Return the number of columns for the given font. |
def fit(self, X, y, step_size=0.1, init_weights=None, warm_start: bool=False):
"""Fit the weights on the given predictions.
Args:
X (array-like): Predictions of different models for the labels.
y (array-like): Labels.
step_size (float): Step size for optimizing the w... | Fit the weights on the given predictions.
Args:
X (array-like): Predictions of different models for the labels.
y (array-like): Labels.
step_size (float): Step size for optimizing the weights.
Smaller step sizes most likely improve resulting sc... |
def watch_files(self):
"""watch files for changes, if changed, rebuild blog. this thread
will quit if the main process ends"""
try:
while 1:
sleep(1) # check every 1s
try:
files_stat = self.get_files_stat()
except... | watch files for changes, if changed, rebuild blog. this thread
will quit if the main process ends |
def vertex_normals(self):
"""
The vertex normals of the mesh. If the normals were loaded
we check to make sure we have the same number of vertex
normals and vertices before returning them. If there are
no vertex normals defined or a shape mismatch we calculate
the vertex... | The vertex normals of the mesh. If the normals were loaded
we check to make sure we have the same number of vertex
normals and vertices before returning them. If there are
no vertex normals defined or a shape mismatch we calculate
the vertex normals from the mean normals of the faces th... |
def str_rstrip(x, to_strip=None):
"""Remove trailing characters from a string sample.
:param str to_strip: The string to be removed
:returns: an expression containing the modified string column.
Example:
>>> import vaex
>>> text = ['Something', 'very pretty', 'is coming', 'our', 'way.']
>... | Remove trailing characters from a string sample.
:param str to_strip: The string to be removed
:returns: an expression containing the modified string column.
Example:
>>> import vaex
>>> text = ['Something', 'very pretty', 'is coming', 'our', 'way.']
>>> df = vaex.from_arrays(text=text)
>... |
def create_cookie(self, delete=None):
"""
Creates the value for ``Set-Cookie`` HTTP header.
:param bool delete:
If ``True`` the cookie value will be ``deleted`` and the
Expires value will be ``Thu, 01-Jan-1970 00:00:01 GMT``.
"""
value = 'deleted' if del... | Creates the value for ``Set-Cookie`` HTTP header.
:param bool delete:
If ``True`` the cookie value will be ``deleted`` and the
Expires value will be ``Thu, 01-Jan-1970 00:00:01 GMT``. |
def get_shark_field(self, fields):
"""
:fields: str[]
"""
out = super(BACK, self).get_shark_field(fields)
out.update({'acked_seqs': self.acked_seqs,
'bitmap_str': self.bitmap_str})
return out | :fields: str[] |
def get_config(self):
"""
serialize to a dict all attributes except model weights
Returns
-------
dict
"""
self.update_network_description()
result = dict(self.__dict__)
result['_network'] = None
result['network_weights'] = None
... | serialize to a dict all attributes except model weights
Returns
-------
dict |
def _get_channel(self):
"""Returns a channel according to if there is a redirection to do or
not.
"""
channel = self._transport.open_session()
channel.set_combine_stderr(True)
channel.get_pty()
return channel | Returns a channel according to if there is a redirection to do or
not. |
def build_trips(pfeed, routes, service_by_window):
"""
Given a ProtoFeed and its corresponding routes (DataFrame),
service-by-window (dictionary), return a DataFrame representing
``trips.txt``.
Trip IDs encode route, direction, and service window information
to make it easy to compute stop times... | Given a ProtoFeed and its corresponding routes (DataFrame),
service-by-window (dictionary), return a DataFrame representing
``trips.txt``.
Trip IDs encode route, direction, and service window information
to make it easy to compute stop times later. |
def _parse_launch_error(data):
"""
Parses a LAUNCH_ERROR message and returns a LaunchFailure object.
:type data: dict
:rtype: LaunchFailure
"""
return LaunchFailure(
data.get(ERROR_REASON, None),
data.get(APP_ID),
data.get(REQUEST_ID),... | Parses a LAUNCH_ERROR message and returns a LaunchFailure object.
:type data: dict
:rtype: LaunchFailure |
def add_sparse_covariance_matrix(self,x,y,names,iidx,jidx,data):
"""build a pyemu.SparseMatrix instance implied by Vario2d
Parameters
----------
x : (iterable of floats)
x-coordinate locations
y : (iterable of floats)
y-coordinate locations
names... | build a pyemu.SparseMatrix instance implied by Vario2d
Parameters
----------
x : (iterable of floats)
x-coordinate locations
y : (iterable of floats)
y-coordinate locations
names : (iterable of str)
names of locations. If None, cov must not be... |
def _readBlock(self):
"""Read a block of data from the remote reader."""
if self.interrupted or self.fp is None:
if self.debug:
log.msg('WorkerFileDownloadCommand._readBlock(): end')
return True
length = self.blocksize
if self.bytes_remaining is ... | Read a block of data from the remote reader. |
def datasetsBM(host=biomart_host):
"""
Lists BioMart datasets.
:param host: address of the host server, default='http://www.ensembl.org/biomart'
:returns: nothing
"""
stdout_ = sys.stdout #Keep track of the previous value.
stream = StringIO()
sys.stdout = stream
server = Biomar... | Lists BioMart datasets.
:param host: address of the host server, default='http://www.ensembl.org/biomart'
:returns: nothing |
def generate_unit_squares(image_width, image_height):
"""Generate coordinates for a tiling of unit squares."""
# Iterate over the required rows and cells. The for loops (x, y)
# give the coordinates of the top left-hand corner of each square:
#
# (x, y) +-----+ (x + 1, y)
# | ... | Generate coordinates for a tiling of unit squares. |
def _get_baremetal_connections(self, port,
only_active_switch=False,
from_segment=False):
"""Get switch ips and interfaces from baremetal transaction.
This method is used to extract switch/interface
information from transacti... | Get switch ips and interfaces from baremetal transaction.
This method is used to extract switch/interface
information from transactions where VNIC_TYPE is
baremetal.
:param port: Received port transaction
:param only_active_switch: Indicator for selecting
c... |
def vmomentsurfacemass(self,R,n,m,t=0.,nsigma=None,deg=False,
epsrel=1.e-02,epsabs=1.e-05,phi=0.,
grid=None,gridpoints=101,returnGrid=False,
hierarchgrid=False,nlevels=2,
print_progress=False,
... | NAME:
vmomentsurfacemass
PURPOSE:
calculate the an arbitrary moment of the velocity distribution at (R,phi) times the surfacmass
INPUT:
R - radius at which to calculate the moment (in natural units)
phi= azimuth (rad unless deg=True)
n - vR^n... |
def get_arguments(self):
"""
Extracts the specific arguments of this CLI
"""
MetricCommon.get_arguments(self)
if self.args.metricName is not None:
self.metricName = self.args.metricName
if self.args.displayName is not None:
se... | Extracts the specific arguments of this CLI |
def iter(self, order='', sort=True):
"""Return a :class:`tableiter` object on this column."""
from casacore.tables import tableiter
return tableiter(self._table, [self._column], order, sort) | Return a :class:`tableiter` object on this column. |
def content(self):
"""
Returns lazily content of the FileNode. If possible, would try to
decode content from UTF-8.
"""
content = self._get_content()
if bool(content and '\0' in content):
return content
return safe_unicode(content) | Returns lazily content of the FileNode. If possible, would try to
decode content from UTF-8. |
def subscribe(self, objectID, varIDs=(tc.VAR_ROAD_ID, tc.VAR_LANEPOSITION), begin=0, end=2**31 - 1):
"""subscribe(string, list(integer), int, int) -> None
Subscribe to one or more object values for the given interval.
"""
Domain.subscribe(self, objectID, varIDs, begin, end) | subscribe(string, list(integer), int, int) -> None
Subscribe to one or more object values for the given interval. |
def apply_trend_constraint(self, limit, dt, distribution_skip=False,
**kwargs):
"""
Constrains change in RV to be less than limit over time dt.
Only works if ``dRV`` and ``Plong`` attributes are defined
for population.
:param limit:
Ra... | Constrains change in RV to be less than limit over time dt.
Only works if ``dRV`` and ``Plong`` attributes are defined
for population.
:param limit:
Radial velocity limit on trend. Must be
:class:`astropy.units.Quantity` object, or
else interpreted as m/s.
... |
def _set_get_media_detail(self, v, load=False):
"""
Setter method for get_media_detail, mapped from YANG variable /brocade_interface_ext_rpc/get_media_detail (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_media_detail is considered as a private
method. Bac... | Setter method for get_media_detail, mapped from YANG variable /brocade_interface_ext_rpc/get_media_detail (rpc)
If this variable is read-only (config: false) in the
source YANG file, then _set_get_media_detail is considered as a private
method. Backends looking to populate this variable should
do so via... |
def read_file(file_path_name):
"""
Read the content of the specified file.
@param file_path_name: path and name of the file to read.
@return: content of the specified file.
"""
with io.open(os.path.join(os.path.dirname(__file__), file_path_name), mode='rt', encoding='utf-8') as fd:
r... | Read the content of the specified file.
@param file_path_name: path and name of the file to read.
@return: content of the specified file. |
def upload(client, source_dir):
"""Upload listing files in source_dir. folder herachy."""
print('')
print('upload store listings')
print('---------------------')
listings_folder = os.path.join(source_dir, 'listings')
langfolders = filter(os.path.isdir, list_dir_abspath(listings_folder))
for... | Upload listing files in source_dir. folder herachy. |
def _generateForTokenSecurity(self,
username, password,
tokenUrl,
expiration=None,
client='requestip'):
""" generates a token for a feature service """
query_dict = {'... | generates a token for a feature service |
def _fluent_params(self, fluents, ordering) -> FluentParamsList:
'''Returns the instantiated `fluents` for the given `ordering`.
For each fluent in `fluents`, it instantiates each parameter
type w.r.t. the contents of the object table.
Returns:
Sequence[Tuple[str, List[str]... | Returns the instantiated `fluents` for the given `ordering`.
For each fluent in `fluents`, it instantiates each parameter
type w.r.t. the contents of the object table.
Returns:
Sequence[Tuple[str, List[str]]]: A tuple of pairs of fluent name
and a list of instantiated f... |
def hide(self):
"""Hide the window."""
self.tk.withdraw()
self._visible = False
if self._modal:
self.tk.grab_release() | Hide the window. |
def data(self, index, role=Qt.DisplayRole):
"""return data depending on index, Qt::ItemDataRole and data type of the column.
Args:
index (QtCore.QModelIndex): Index to define column and row you want to return
role (Qt::ItemDataRole): Define which data you want to return.
... | return data depending on index, Qt::ItemDataRole and data type of the column.
Args:
index (QtCore.QModelIndex): Index to define column and row you want to return
role (Qt::ItemDataRole): Define which data you want to return.
Returns:
None if index is invalid
... |
def orchestrate_high(data, test=None, queue=False, pillar=None, **kwargs):
'''
Execute a single state orchestration routine
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt-run state.orchestrate_high '{
stage_one:
{salt.state: [{tgt: "db*"}, {... | Execute a single state orchestration routine
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt-run state.orchestrate_high '{
stage_one:
{salt.state: [{tgt: "db*"}, {sls: postgres_setup}]},
stage_two:
{salt.state: [{tgt: "web... |
def to_dict(self, *, include_keys=None, exclude_keys=None, use_default_excludes=True):
"""Converts the class to a dictionary.
:include_keys: if not None, only the attrs given will be included.
:exclude_keys: if not None, all attrs except those listed will be included, with respect to
use_default_exclud... | Converts the class to a dictionary.
:include_keys: if not None, only the attrs given will be included.
:exclude_keys: if not None, all attrs except those listed will be included, with respect to
use_default_excludes.
:use_default_excludes: if True, then the class-level exclude_keys_serialize will be co... |
def compute(cls, observation, prediction, key=None):
"""Compute a ratio from an observation and a prediction."""
assert isinstance(observation, (dict, float, int, pq.Quantity))
assert isinstance(prediction, (dict, float, int, pq.Quantity))
obs, pred = cls.extract_means_or_values(observa... | Compute a ratio from an observation and a prediction. |
def _projection_to_paths(cls, root_key, projection):
"""
Expand a $sub/$sub. projection to a single projection of True (if
inclusive) or a map of full paths (e.g `employee.company.tel`).
"""
# Referenced projections are handled separately so just flag the
# reference fie... | Expand a $sub/$sub. projection to a single projection of True (if
inclusive) or a map of full paths (e.g `employee.company.tel`). |
def _process_priv_part(perms):
'''
Process part
'''
_tmp = {}
previous = None
for perm in perms:
if previous is None:
_tmp[_PRIVILEGES_MAP[perm]] = False
previous = _PRIVILEGES_MAP[perm]
else:
if perm == '*':
_tmp[previous] = Tr... | Process part |
def get_lib_volume_mounts(base_lib_name, assembled_specs):
""" Returns a list of the formatted volume specs for a lib"""
volumes = [_get_lib_repo_volume_mount(assembled_specs['libs'][base_lib_name])]
volumes.append(get_command_files_volume_mount(base_lib_name, test=True))
for lib_name in assembled_specs... | Returns a list of the formatted volume specs for a lib |
def getcomments(object):
"""Get lines of comments immediately preceding an object's source code."""
try: lines, lnum = findsource(object)
except IOError: return None
if ismodule(object):
# Look for a comment block at the top of the file.
start = 0
if lines and lines[0][:2] == '#... | Get lines of comments immediately preceding an object's source code. |
def on_train_begin(self, **kwargs):
"Create the optimizers for the generator and critic if necessary, initialize smootheners."
if not getattr(self,'opt_gen',None):
self.opt_gen = self.opt.new([nn.Sequential(*flatten_model(self.generator))])
else: self.opt_gen.lr,self.opt_gen.wd = sel... | Create the optimizers for the generator and critic if necessary, initialize smootheners. |
def is_downloaded(self, file_path):
"""
Check if the data file is already downloaded.
"""
if os.path.exists(file_path):
self.chatbot.logger.info('File is already downloaded')
return True
return False | Check if the data file is already downloaded. |
def genlet(generator_function=None, prime=True):
"""
Decorator to convert a generator function to a :py:class:`~chainlink.ChainLink`
:param generator_function: the generator function to convert
:type generator_function: generator
:param prime: advance the generator to the next/first yield
:type... | Decorator to convert a generator function to a :py:class:`~chainlink.ChainLink`
:param generator_function: the generator function to convert
:type generator_function: generator
:param prime: advance the generator to the next/first yield
:type prime: bool
When used as a decorator, this function can... |
def add(self, item, position=5):
"""Add an item to the list unless it is already present.
If the item is an expression, then a semicolon will be appended to it
in the final compiled code.
"""
if item in self.items:
return
self.items[item] = position
... | Add an item to the list unless it is already present.
If the item is an expression, then a semicolon will be appended to it
in the final compiled code. |
def set_jinja2_silent_none(config): # pragma: no cover
""" if variable is None print '' instead of 'None'
"""
config.commit()
jinja2_env = config.get_jinja2_environment()
jinja2_env.finalize = _silent_none | if variable is None print '' instead of 'None' |
def _parse_ignores(self):
""" Parse the ignores setting from the pylintrc file if available. """
error_message = (
colorama.Fore.RED
+ "{} does not appear to be a valid pylintrc file".format(self.rcfile)
+ colorama.Fore.RESET
)
if not os.path.isfile(... | Parse the ignores setting from the pylintrc file if available. |
def t_stringdollar_rbrace(self, t):
r'\}'
t.lexer.braces -= 1
if t.lexer.braces == 0:
# End of the dollar brace, back to the rest of the string
t.lexer.begin('string') | r'\} |
def perform_update(self, serializer):
"""creates a record in the `bulbs.promotion.PZoneHistory`
:param obj: the instance saved
:param created: boolean expressing if the object was newly created (`False` if updated)
"""
instance = serializer.save()
# create history object... | creates a record in the `bulbs.promotion.PZoneHistory`
:param obj: the instance saved
:param created: boolean expressing if the object was newly created (`False` if updated) |
def _compute_attenuation(self, rup, dists, imt, C):
"""
Compute the second term of the equation described on p. 1866:
" [(c4 + c5 * M) * min{ log10(R), log10(70.) }] +
[(c4 + c5 * M) * max{ min{ log10(R/70.), log10(140./70.) }, 0.}] +
[(c8 + c9 * M) * max{ log10(R/140.), 0}] "
... | Compute the second term of the equation described on p. 1866:
" [(c4 + c5 * M) * min{ log10(R), log10(70.) }] +
[(c4 + c5 * M) * max{ min{ log10(R/70.), log10(140./70.) }, 0.}] +
[(c8 + c9 * M) * max{ log10(R/140.), 0}] " |
def at(self, instant):
"""Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object)
"""
for event in self:
if event.begin <= instant <= event.end:
yield event | Iterates (in chronological order) over all events that are occuring during `instant`.
Args:
instant (Arrow object) |
def show_hide(self, *args):
"""Toggles the main window visibility
"""
log.debug("Show_hide called")
if self.forceHide:
self.forceHide = False
return
if not HidePrevention(self.window).may_hide():
return
if not self.win_prepare():
... | Toggles the main window visibility |
def bank_account_number(self):
"""Return the IBAN's Bank Account Number."""
start = get_iban_spec(self.country_code).bban_split_pos + 4
return self._id[start:] | Return the IBAN's Bank Account Number. |
def stream_file(self, url, folder=None, filename=None, overwrite=False):
# type: (str, Optional[str], Optional[str], bool) -> str
"""Stream file from url and store in provided folder or temporary folder if no folder supplied.
Must call setup method first.
Args:
url (str): UR... | Stream file from url and store in provided folder or temporary folder if no folder supplied.
Must call setup method first.
Args:
url (str): URL to download
filename (Optional[str]): Filename to use for downloaded file. Defaults to None (derive from the url).
folder (... |
def get_property(obj, name):
"""
Recursively gets value of object or its subobjects property specified by its name.
The object can be a user defined object, map or array.
The property name correspondently must be object property, map key or array index.
:param obj: an object to... | Recursively gets value of object or its subobjects property specified by its name.
The object can be a user defined object, map or array.
The property name correspondently must be object property, map key or array index.
:param obj: an object to read property from.
:param name: a name... |
def finish(
self,
width=1,
color=None,
fill=None,
roundCap=False,
dashes=None,
even_odd=False,
morph=None,
closePath=True
):
"""Finish the current drawing segment.
Notes:
Appl... | Finish the current drawing segment.
Notes:
Apply stroke and fill colors, dashes, line style and width, or
morphing. Also determines whether any open path should be closed
by a connecting line to its start point. |
def write_flows_to_gssha_time_series_xys(self,
path_to_output_file,
series_name,
series_id,
river_index=None,
... | Write out RAPID output to GSSHA WMS time series xys file.
Parameters
----------
path_to_output_file: str
Path to the output xys file.
series_name: str
The name for the series.
series_id: int
The ID to give the series.
river_index: :obj... |
def authorize(self, me, state=None, next_url=None, scope='read'):
"""Authorize a user via Micropub.
Args:
me (string): the authing user's URL. if it does not begin with
https?://, http:// will be prepended.
state (string, optional): passed through the whole auth process,... | Authorize a user via Micropub.
Args:
me (string): the authing user's URL. if it does not begin with
https?://, http:// will be prepended.
state (string, optional): passed through the whole auth process,
useful if you want to maintain some state, e.g. the starting pag... |
def get_notify_observers_kwargs(self):
""" Return the mapping between the metrics call and the iterated
variables.
Return
----------
notify_observers_kwargs: dict,
the mapping between the iterated variables.
"""
return {
'u_new': self._u_ne... | Return the mapping between the metrics call and the iterated
variables.
Return
----------
notify_observers_kwargs: dict,
the mapping between the iterated variables. |
def cmp(self,junc,tolerance=0):
""" output comparison and allow for tolerance if desired
* -1 if junc comes before self
* 1 if junc comes after self
* 0 if overlaps
* 2 if else
:param junc:
:param tolerance: optional search space (default=0, no tolerance)
:type junc: Junction
:type... | output comparison and allow for tolerance if desired
* -1 if junc comes before self
* 1 if junc comes after self
* 0 if overlaps
* 2 if else
:param junc:
:param tolerance: optional search space (default=0, no tolerance)
:type junc: Junction
:type tolerance: int
:return: value of co... |
def fuzzybreaks(scale, breaks=None, boundary=None,
binwidth=None, bins=30, right=True):
"""
Compute fuzzy breaks
For a continuous scale, fuzzybreaks "preserve" the range of
the scale. The fuzzing is close to numerical roundoff and
is visually imperceptible.
Parameters
-----... | Compute fuzzy breaks
For a continuous scale, fuzzybreaks "preserve" the range of
the scale. The fuzzing is close to numerical roundoff and
is visually imperceptible.
Parameters
----------
scale : scale
Scale
breaks : array_like
Sequence of break points. If provided and the ... |
def from_pypirc(pypi_repository):
""" Load configuration from .pypirc file, cached to only run once """
ret = {}
pypirc_locations = PYPIRC_LOCATIONS
for pypirc_path in pypirc_locations:
pypirc_path = os.path.expanduser(pypirc_path)
if os.path.isfile(pypirc_path):
parser = con... | Load configuration from .pypirc file, cached to only run once |
def set_gae_attributes(span):
"""Set the GAE environment common attributes."""
for env_var, attribute_key in GAE_ATTRIBUTES.items():
attribute_value = os.environ.get(env_var)
if attribute_value is not None:
pair = {attribute_key: attribute_value}
pair_attrs = Attributes(... | Set the GAE environment common attributes. |
def get_render(name, data, trans='en'):
"""
Render string based on template
:param name: -- full template name
:type name: str,unicode
:param data: -- dict of rendered vars
:type data: dict
:param trans: -- translation for render. Default 'en'.
:type trans: str,unicode
:return: -- r... | Render string based on template
:param name: -- full template name
:type name: str,unicode
:param data: -- dict of rendered vars
:type data: dict
:param trans: -- translation for render. Default 'en'.
:type trans: str,unicode
:return: -- rendered string
:rtype: str,unicode |
def findall(self, string):
""" Parse string, returning all outputs as parsed by functions
"""
output = []
for match in self.pattern.findall(string):
if hasattr(match, 'strip'):
match = [match]
self._list_add(output, self.run(match))
return ... | Parse string, returning all outputs as parsed by functions |
def _finishSphering(self):
"""
Compute normalization constants for each feature dimension
based on the collected training samples. Then normalize our
training samples using these constants (so that each input
dimension has mean and variance of zero and one, respectively.)
Then feed these "spher... | Compute normalization constants for each feature dimension
based on the collected training samples. Then normalize our
training samples using these constants (so that each input
dimension has mean and variance of zero and one, respectively.)
Then feed these "sphered" training samples into the underlyin... |
def layout(self, slide):
""" Return layout information for slide """
image = Image.new('RGB', (WIDTH, HEIGHT), 'black')
draw = ImageDraw.Draw(image)
draw.font = self.font
self.vertical_layout(draw, slide)
self.horizontal_layout(draw, slide)
ret... | Return layout information for slide |
def main():
"""Main part of the download script."""
# Read config file. This has to get updated via git
project_root = utils.get_project_root()
infofile = os.path.join(project_root, "raw-datasets/info.yml")
logging.info("Read '%s'...", infofile)
with open(infofile, 'r') as ymlfile:
datas... | Main part of the download script. |
def config():
'''
Shows the current configuration.
'''
config = get_config()
print('Client version: {0}'.format(click.style(__version__, bold=True)))
print('API endpoint: {0}'.format(click.style(str(config.endpoint), bold=True)))
print('API version: {0}'.format(click.style(config.version, bo... | Shows the current configuration. |
async def retrieve(self, url, **kwargs):
"""Issue API requests."""
try:
async with self.websession.request('GET', url, **kwargs) as res:
if res.status != 200:
raise Exception("Could not retrieve information from API")
if res.content_type ==... | Issue API requests. |
def parse_variable(self, variable):
"""Method to parse an input or output variable.
**Example Variable**::
#App:1234:output!String
Args:
variable (string): The variable name to parse.
Returns:
(dictionary): Result of parsed string.
"""
... | Method to parse an input or output variable.
**Example Variable**::
#App:1234:output!String
Args:
variable (string): The variable name to parse.
Returns:
(dictionary): Result of parsed string. |
def lowercase_to_camelcase(python_input, camelcase_input=None):
'''
a function to recursively convert data with lowercase key names into camelcase keys
:param camelcase_input: list or dictionary with lowercase keys
:param python_input: [optional] list or dictionary with default camel... | a function to recursively convert data with lowercase key names into camelcase keys
:param camelcase_input: list or dictionary with lowercase keys
:param python_input: [optional] list or dictionary with default camelcase keys in output
:return: dictionary with camelcase key names |
def _is_number_match_OO(numobj1_in, numobj2_in):
"""Takes two phone number objects and compares them for equality."""
# We only care about the fields that uniquely define a number, so we copy these across explicitly.
numobj1 = _copy_core_fields_only(numobj1_in)
numobj2 = _copy_core_fields_only(numobj2_i... | Takes two phone number objects and compares them for equality. |
def _get_temperature(self, data):
'''Return temperature in celsius'''
temp = (data[2] & ~(1 << 7)) + (data[3] / 100)
sign = (data[2] >> 7) & 1
if sign == 0:
return round(temp, 2)
return round(-1 * temp, 2) | Return temperature in celsius |
def _uptime_windows():
"""
Returns uptime in seconds or None, on Windows. Warning: may return
incorrect answers after 49.7 days on versions older than Vista.
"""
if hasattr(ctypes, 'windll') and hasattr(ctypes.windll, 'kernel32'):
lib = ctypes.windll.kernel32
else:
try:
... | Returns uptime in seconds or None, on Windows. Warning: may return
incorrect answers after 49.7 days on versions older than Vista. |
def validate_arguments(self, start_date, end_date, **kwargs):
"""Validate query arguments."""
if set(kwargs) < set(self.required_filters):
raise InvalidRequestInputError(
'Missing one of the required parameters {0} in '
'query {1}'.format(set(self.required_fil... | Validate query arguments. |
def decodeMessage(self, data):
"""Decode a protobuf message into a list of Tensor events"""
message = proto_pb2.Msg()
message.ParseFromString(data)
return message | Decode a protobuf message into a list of Tensor events |
def zone_schedules_restore(self, filename):
"""Restore all zones on control system from the given file."""
_LOGGER.info("Restoring schedules to ControlSystem %s (%s)...",
self.systemId, self.location)
_LOGGER.info("Reading from backup file: %s...", filename)
with op... | Restore all zones on control system from the given file. |
def signed_session(self, session=None):
"""Create requests session with any required auth headers applied.
If a session object is provided, configure it directly. Otherwise,
create a new session and return it.
:param session: The session to configure for authentication
:type se... | Create requests session with any required auth headers applied.
If a session object is provided, configure it directly. Otherwise,
create a new session and return it.
:param session: The session to configure for authentication
:type session: requests.Session
:rtype: requests.Se... |
def UNTL_to_encodedUNTL(subject):
"""Normalize a UNTL subject heading to be used in SOLR."""
subject = normalize_UNTL(subject)
subject = subject.replace(' ', '_')
subject = subject.replace('_-_', '/')
return subject | Normalize a UNTL subject heading to be used in SOLR. |
def _gcs_delete(args, _):
""" Delete one or more buckets or objects. """
objects = _expand_list(args['bucket'])
objects.extend(_expand_list(args['object']))
errs = []
for obj in objects:
try:
bucket, key = google.datalab.storage._bucket.parse_name(obj)
if bucket and key:
gcs_object = g... | Delete one or more buckets or objects. |
def serialize_on_parent(
self,
parent, # type: ET.Element
value, # type: Any
state # type: _ProcessorState
):
# type: (...) -> None
"""Serialize the value directory on the parent."""
xml_value = _hooks_apply_before_serialize(self._hooks, sta... | Serialize the value directory on the parent. |
def validate(self, pkt, messages=None):
"""Returns True if the given Packet is valid, False otherwise.
Validation error messages are appended to an optional messages
array.
"""
valid = True
for f in self.fields:
try:
value = getattr(pkt, f.nam... | Returns True if the given Packet is valid, False otherwise.
Validation error messages are appended to an optional messages
array. |
def schaffer(self, x):
""" Schaffer function x0 in [-100..100]"""
N = len(x)
s = x[0:N - 1]**2 + x[1:N]**2
return sum(s**0.25 * (np.sin(50 * s**0.1)**2 + 1)) | Schaffer function x0 in [-100..100] |
def _latex_circuit_drawer(circuit,
scale=0.7,
filename=None,
style=None,
plot_barriers=True,
reverse_bits=False,
justify=None):
"""Draw a quantum circuit based ... | Draw a quantum circuit based on latex (Qcircuit package)
Requires version >=2.6.0 of the qcircuit LaTeX package.
Args:
circuit (QuantumCircuit): a quantum circuit
scale (float): scaling factor
filename (str): file path to save image to
style (dict or str): dictionary of style o... |
def getCandScoresMapBruteForce(self, profile):
"""
Returns a dictonary that associates the integer representation of each candidate with the
bayesian losses that we calculate using brute force.
:ivar Profile profile: A Profile object that represents an election profile.
"""
... | Returns a dictonary that associates the integer representation of each candidate with the
bayesian losses that we calculate using brute force.
:ivar Profile profile: A Profile object that represents an election profile. |
def scan_module(self, modpath, node):
"""Scans a module, collecting all used origins, assuming that modules
are obtained only by dotted paths and no other kinds of expressions."""
used_origins = self.map.setdefault(modpath, set())
def get_origins(modpath, name):
"""Returns ... | Scans a module, collecting all used origins, assuming that modules
are obtained only by dotted paths and no other kinds of expressions. |
def Nu_Kitoh(Re, Pr, H=None, G=None, q=None):
r'''Calculates internal convection Nusselt number for turbulent vertical
upward flow in a pipe under supercritical conditions according to [1]_,
also shown in [2]_, [3]_ and [4]_. Depends on fluid enthalpy, mass flux,
and heat flux.
.. math::
... | r'''Calculates internal convection Nusselt number for turbulent vertical
upward flow in a pipe under supercritical conditions according to [1]_,
also shown in [2]_, [3]_ and [4]_. Depends on fluid enthalpy, mass flux,
and heat flux.
.. math::
Nu_b = 0.015Re_b^{0.85} Pr_b^m
... |
def raises(self, expected_exception):
"""
Ensures preceding predicates (specifically, :meth:`called_with()`) result in *expected_exception* being raised.
"""
return unittest_case.assertRaises(expected_exception, self._orig_subject, *self._args, **self._kwargs) | Ensures preceding predicates (specifically, :meth:`called_with()`) result in *expected_exception* being raised. |
def run(cmd, data=None, checks=None, region=None, log_error=True,
log_stdout=False):
"""Run the provided command, logging details and checking for errors.
"""
try:
logger.debug(" ".join(str(x) for x in cmd) if not isinstance(cmd, basestring) else cmd)
_do_run(cmd, checks, log_stdout)... | Run the provided command, logging details and checking for errors. |
def install_cache(expire_after=12 * 3600, cache_post=False):
"""
Patches the requests library with requests_cache.
"""
allowable_methods = ['GET']
if cache_post:
allowable_methods.append('POST')
requests_cache.install_cache(
expire_after=expire_after,
allowable_methods=al... | Patches the requests library with requests_cache. |
def unsubscribe(self, connection, destination):
"""
Unsubscribes a connection from the specified topic destination.
@param connection: The client connection to unsubscribe.
@type connection: L{coilmq.server.StompConnection}
@param destination: The topic destination (e.g. '/top... | Unsubscribes a connection from the specified topic destination.
@param connection: The client connection to unsubscribe.
@type connection: L{coilmq.server.StompConnection}
@param destination: The topic destination (e.g. '/topic/foo')
@type destination: C{str} |
def translate_expression(expression):
"""
Check if the expression is valid, then check turn it into an expression that can be used for filtering.
:return list of lists: One or more matches. Each list has 3 strings.
"""
logger_ts.info("enter translate_expression")
m = re_filter_expr.findall(expre... | Check if the expression is valid, then check turn it into an expression that can be used for filtering.
:return list of lists: One or more matches. Each list has 3 strings. |
def date(self):
""":return: datetime object"""
if self.commit_time:
return datetime.utcfromtimestamp(self.commit_time)
else:
return datetime.now() | :return: datetime object |
def And(*predicates, **kwargs):
"""
`And` predicate. Returns ``False`` at the first sub-predicate that returns ``False``.
"""
if kwargs:
predicates += Query(**kwargs),
return _flatten(_And, *predicates) | `And` predicate. Returns ``False`` at the first sub-predicate that returns ``False``. |
def home_shift_summ(self):
"""
:returns: :py:class:`.ShiftSummary` by player for the home team
:rtype: dict ``{ player_num: shift_summary_obj }``
"""
if not self.__wrapped_home:
self.__wrapped_home = self.__wrap(self._home.by_player)
return self.__wra... | :returns: :py:class:`.ShiftSummary` by player for the home team
:rtype: dict ``{ player_num: shift_summary_obj }`` |
def find_files(self, ID=None, fileGrp=None, pageId=None, mimetype=None, url=None, local_only=False):
"""
Search ``mets:file`` in this METS document.
Args:
ID (string) : ID of the file
fileGrp (string) : USE of the fileGrp to list files of
pageId (string) : ID... | Search ``mets:file`` in this METS document.
Args:
ID (string) : ID of the file
fileGrp (string) : USE of the fileGrp to list files of
pageId (string) : ID of physical page manifested by matching files
url (string) : @xlink:href of mets:Flocat of mets:file
... |
def vstackm(matrices):
"""Generalizes `numpy.vstack` to :class:`Matrix` objects."""
arr = np_vstack(tuple(m.matrix for m in matrices))
# print(tuple(m.matrix.dtype for m in matrices))
# print(arr.dtype)
return Matrix(arr) | Generalizes `numpy.vstack` to :class:`Matrix` objects. |
def noinfo(self, msg, oname):
"""Generic message when no information is found."""
print 'No %s found' % msg,
if oname:
print 'for %s' % oname
else:
print | Generic message when no information is found. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.