code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def do_quality(self, quality):
"""Apply value of quality parameter.
For PIL docs see
<http://pillow.readthedocs.org/en/latest/reference/Image.html#PIL.Image.Image.convert>
"""
if (quality == 'grey' or quality == 'gray'):
# Checking for 1.1 gray or 20.0 grey elsewhere... | Apply value of quality parameter.
For PIL docs see
<http://pillow.readthedocs.org/en/latest/reference/Image.html#PIL.Image.Image.convert> |
def save_models(self, model_path):
"""
Save machine learning models to pickle files.
"""
for group, condition_model_set in self.condition_models.items():
for model_name, model_obj in condition_model_set.items():
out_filename = model_path + \
... | Save machine learning models to pickle files. |
def find(self, pair, default=None):
"""
Returns the value for the kerning pair.
**pair** is a ``tuple`` of two :ref:`type-string`\s, and the returned
values will either be :ref:`type-int-float` or ``None``
if no pair was found. ::
>>> font.kerning[("A", "V")]
... | Returns the value for the kerning pair.
**pair** is a ``tuple`` of two :ref:`type-string`\s, and the returned
values will either be :ref:`type-int-float` or ``None``
if no pair was found. ::
>>> font.kerning[("A", "V")]
-25 |
def get_bucket(self, hash_name, bucket_key):
"""
Returns bucket content as list of tuples (vector, data).
"""
if hash_name in self.buckets:
if bucket_key in self.buckets[hash_name]:
return self.buckets[hash_name][bucket_key]
return [] | Returns bucket content as list of tuples (vector, data). |
def convert_mapper(self, tomap):
"""
Converts our object from using one coordinate map to another.
NOTE: In some cases this only approximately preserves the
equivalent point values when transforming between coordinate
spaces.
"""
frommap = self.crdmap
if ... | Converts our object from using one coordinate map to another.
NOTE: In some cases this only approximately preserves the
equivalent point values when transforming between coordinate
spaces. |
def minimize_t0s(means, weights, combs):
"""Varies t0s to minimize the deviation of the gaussian means from zero.
Parameters
----------
means: numpy array of means of all PMT combinations
weights: numpy array of weights for the squared sum
combs: pmt combinations to use for minimization
Re... | Varies t0s to minimize the deviation of the gaussian means from zero.
Parameters
----------
means: numpy array of means of all PMT combinations
weights: numpy array of weights for the squared sum
combs: pmt combinations to use for minimization
Returns
-------
opt_t0s: optimal t0 values... |
def reduce_loss_dict(loss_dict):
"""
Reduce the loss dictionary from all processes so that process with rank
0 has the averaged results. Returns a dict with the same fields as
loss_dict, after reduction.
"""
world_size = get_world_size()
if world_size < 2:
return loss_dict
with t... | Reduce the loss dictionary from all processes so that process with rank
0 has the averaged results. Returns a dict with the same fields as
loss_dict, after reduction. |
def dispatch_command(function, *args, **kwargs):
"""
A wrapper for :func:`dispatch` that creates a one-command parser.
Uses :attr:`PARSER_FORMATTER`.
This::
dispatch_command(foo)
...is a shortcut for::
parser = ArgumentParser()
set_default_command(parser, foo)
dis... | A wrapper for :func:`dispatch` that creates a one-command parser.
Uses :attr:`PARSER_FORMATTER`.
This::
dispatch_command(foo)
...is a shortcut for::
parser = ArgumentParser()
set_default_command(parser, foo)
dispatch(parser)
This function can be also used as a decora... |
def updateHeader(self, wcsname=None, reusename=False):
""" Update header of image with shifts computed by *perform_fit()*.
"""
# Insure filehandle is open and available...
self.openFile()
verbose_level = 1
if not self.perform_update:
verbose_level = 0
... | Update header of image with shifts computed by *perform_fit()*. |
def rootChild_resetPassword(self, req, webViewer):
"""
Redirect authenticated users to their settings page (hopefully they
have one) when they try to reset their password.
This is the wrong way for this functionality to be implemented. See
#2524.
"""
from xmanti... | Redirect authenticated users to their settings page (hopefully they
have one) when they try to reset their password.
This is the wrong way for this functionality to be implemented. See
#2524. |
def _get_table_info(self):
"""Database-specific method to get field names"""
self.rowid = None
self.fields = []
self.field_info = {}
self.cursor.execute('DESCRIBE %s' %self.name)
for row in self.cursor.fetchall():
field,typ,null,key,default,extra = row
... | Database-specific method to get field names |
def get_cameras_schedule(self):
"""Return the schedule set for cameras."""
resource = "schedule"
schedule_event = self.publish_and_get_event(resource)
if schedule_event:
return schedule_event.get('properties')
return None | Return the schedule set for cameras. |
def add(self, *nodes):
""" Adds nodes as siblings
:param nodes: GraphNode(s)
"""
for node in nodes:
node.set_parent(self)
self.add_sibling(node) | Adds nodes as siblings
:param nodes: GraphNode(s) |
def check_webhook_secret(app_configs=None, **kwargs):
"""
Check that DJSTRIPE_WEBHOOK_SECRET looks correct
"""
from . import settings as djstripe_settings
messages = []
secret = djstripe_settings.WEBHOOK_SECRET
if secret and not secret.startswith("whsec_"):
messages.append(
checks.Warning(
"DJSTRIPE_W... | Check that DJSTRIPE_WEBHOOK_SECRET looks correct |
def iter_actions(self):
"""Yield the service's actions with their arguments.
Yields:
`Action`: the next action.
Each action is an Action namedtuple, consisting of action_name
(a string), in_args (a list of Argument namedtuples consisting of name
and argtype), and ou... | Yield the service's actions with their arguments.
Yields:
`Action`: the next action.
Each action is an Action namedtuple, consisting of action_name
(a string), in_args (a list of Argument namedtuples consisting of name
and argtype), and out_args (ditto), eg::
A... |
def login(self, access_token=""):
"""
Configure and save {prog} authentication credentials.
This command may open a browser window to ask for your
consent to use web service authentication credentials.
"""
if access_token:
credentials = argparse.Namespace(tok... | Configure and save {prog} authentication credentials.
This command may open a browser window to ask for your
consent to use web service authentication credentials. |
def find_children(self, pattern=r".*", flags=0, candidates=None):
"""
Finds the children matching the given patten.
Usage::
>>> node_a = AbstractCompositeNode("MyNodeA")
>>> node_b = AbstractCompositeNode("MyNodeB", node_a)
>>> node_c = AbstractCompositeNode... | Finds the children matching the given patten.
Usage::
>>> node_a = AbstractCompositeNode("MyNodeA")
>>> node_b = AbstractCompositeNode("MyNodeB", node_a)
>>> node_c = AbstractCompositeNode("MyNodeC", node_a)
>>> node_a.find_children("c", re.IGNORECASE)
... |
def coverage(fn):
"""Mark `fn` for line coverage analysis.
Results will be printed to sys.stdout on program termination.
Usage::
def fn(...):
...
fn = coverage(fn)
If you are using Python 2.4, you should be able to use the decorator
syntax::
@coverage
... | Mark `fn` for line coverage analysis.
Results will be printed to sys.stdout on program termination.
Usage::
def fn(...):
...
fn = coverage(fn)
If you are using Python 2.4, you should be able to use the decorator
syntax::
@coverage
def fn(...):
... |
def _split_audio_by_duration(self, audio_abs_path,
results_abs_path, duration_seconds):
"""
Calculates the length of each segment and passes it to
self._audio_segment_extractor
Parameters
----------
audio_abs_path : str
results_ab... | Calculates the length of each segment and passes it to
self._audio_segment_extractor
Parameters
----------
audio_abs_path : str
results_abs_path : str
A place for adding digits needs to be added prior the the format
decleration i.e. name%03.wav. Here, we'... |
def broken_faces(mesh, color=None):
"""
Return the index of faces in the mesh which break the
watertight status of the mesh.
Parameters
--------------
mesh: Trimesh object
color: (4,) uint8, will set broken faces to this color
None, will not alter mesh colors
Returns
... | Return the index of faces in the mesh which break the
watertight status of the mesh.
Parameters
--------------
mesh: Trimesh object
color: (4,) uint8, will set broken faces to this color
None, will not alter mesh colors
Returns
---------------
broken: (n, ) int, indexe... |
def write_long_at(self, n, pos, pack_into=Struct('>I').pack_into):
'''
Write an unsigned 32bit value at a specific position in the buffer.
Used for writing tables and frames.
'''
if 0 <= n <= 0xFFFFFFFF:
pack_into(self._output_buffer, pos, n)
else:
... | Write an unsigned 32bit value at a specific position in the buffer.
Used for writing tables and frames. |
def _multicall_callback(self, values, calls):
"""
Fires when we get information back from the XML-RPC server.
This is processes the raw results of system.multicall into a usable
iterator of values (and/or Faults).
:param values: list of data txkoji.Connection.call()
:pa... | Fires when we get information back from the XML-RPC server.
This is processes the raw results of system.multicall into a usable
iterator of values (and/or Faults).
:param values: list of data txkoji.Connection.call()
:param calls: list of calls we sent in this multicall RPC
:re... |
def get_social_login(self, *args, **kwargs):
"""
Set the social login process state to connect rather than login
Refer to the implementation of get_social_login in base class and to the
allauth.socialaccount.helpers module complete_social_login function.
"""
social_login ... | Set the social login process state to connect rather than login
Refer to the implementation of get_social_login in base class and to the
allauth.socialaccount.helpers module complete_social_login function. |
def auth_required(realm, auth_func):
'''Decorator that protect methods with HTTP authentication.'''
def auth_decorator(func):
def inner(self, *args, **kw):
if self.get_authenticated_user(auth_func, realm):
return func(self, *args, **kw)
return inner
return auth_de... | Decorator that protect methods with HTTP authentication. |
def as_new_format(self, format="ATR"):
""" Create a new disk image in the specified format
"""
first_data = len(self.header)
raw = self.rawdata[first_data:]
data = add_atr_header(raw)
newraw = SegmentData(data)
image = self.__class__(newraw)
return image | Create a new disk image in the specified format |
def isInRoom(self, _id):
""" Check a given user is in given room """
if SockJSRoomHandler._room.has_key(self._gcls() + _id):
if self in SockJSRoomHandler._room[self._gcls() + _id]:
return True
return False | Check a given user is in given room |
def create_snapshot(self, systemId, snapshotSpecificationObject):
"""
Create snapshot for list of volumes
:param systemID: Cluster ID
:param snapshotSpecificationObject: Of class SnapshotSpecification
:rtype: SnapshotGroupId
"""
self.conn.connection._check_login(... | Create snapshot for list of volumes
:param systemID: Cluster ID
:param snapshotSpecificationObject: Of class SnapshotSpecification
:rtype: SnapshotGroupId |
def update(self, read, write, manage):
"""
Update the SyncMapPermissionInstance
:param bool read: Read access.
:param bool write: Write access.
:param bool manage: Manage access.
:returns: Updated SyncMapPermissionInstance
:rtype: twilio.rest.sync.v1.service.syn... | Update the SyncMapPermissionInstance
:param bool read: Read access.
:param bool write: Write access.
:param bool manage: Manage access.
:returns: Updated SyncMapPermissionInstance
:rtype: twilio.rest.sync.v1.service.sync_map.sync_map_permission.SyncMapPermissionInstance |
def listTheExtras(self, deleteAlso):
""" Use ConfigObj's get_extra_values() call to find any extra/unknown
parameters we may have loaded. Return a string similar to findTheLost.
If deleteAlso is True, this will also delete any extra/unknown items.
"""
# get list of extras
... | Use ConfigObj's get_extra_values() call to find any extra/unknown
parameters we may have loaded. Return a string similar to findTheLost.
If deleteAlso is True, this will also delete any extra/unknown items. |
def get_is_group_member(self, grp_name, user):
"""
Check if the given user is a member of the named group.
Note that a group maintainer is not considered a member unless the
user is also explicitly added as a member.
Args:
name (string): Name of group.
u... | Check if the given user is a member of the named group.
Note that a group maintainer is not considered a member unless the
user is also explicitly added as a member.
Args:
name (string): Name of group.
user_name (string): User of interest.
Returns:
... |
def RemoveClass(self, class_name):
"""Removes an entry from the list of known classes.
Args:
class_name: A string with the class name that is to be removed.
Raises:
NonexistentMapping if there is no class with the specified class_name.
"""
if class_name not in self._class_mapping:
... | Removes an entry from the list of known classes.
Args:
class_name: A string with the class name that is to be removed.
Raises:
NonexistentMapping if there is no class with the specified class_name. |
def MeetsConditions(knowledge_base, source):
"""Check conditions on the source."""
source_conditions_met = True
os_conditions = ConvertSupportedOSToConditions(source)
if os_conditions:
source.conditions.append(os_conditions)
for condition in source.conditions:
source_conditions_met &= artifact_utils.C... | Check conditions on the source. |
def safe_listget(list_, index, default='?'):
""" depricate """
if index >= len(list_):
return default
ret = list_[index]
if ret is None:
return default
return ret | depricate |
def multi_to_dict(multi):
'''Transform a Werkzeug multidictionnary into a flat dictionnary'''
return dict(
(key, value[0] if len(value) == 1 else value)
for key, value in multi.to_dict(False).items()
) | Transform a Werkzeug multidictionnary into a flat dictionnary |
def make_pipeline(context):
"""
Create our pipeline.
"""
# Filter for primary share equities. IsPrimaryShare is a built-in filter.
primary_share = IsPrimaryShare()
# Not when-issued equities.
not_wi = ~IEXCompany.symbol.latest.endswith('.WI')
# Equities without LP in their name, .matc... | Create our pipeline. |
def send_request(user_session, method, request):
"""
Send request to SMC
:param Session user_session: session object
:param str method: method for request
:param SMCRequest request: request object
:raises SMCOperationFailure: failure with reason
:rtype: SMCResult
"""
if user_ses... | Send request to SMC
:param Session user_session: session object
:param str method: method for request
:param SMCRequest request: request object
:raises SMCOperationFailure: failure with reason
:rtype: SMCResult |
def jensen_shannon(logu, self_normalized=False, name=None):
"""The Jensen-Shannon Csiszar-function in log-space.
A Csiszar-function is a member of,
```none
F = { f:R_+ to R : f convex }.
```
When `self_normalized = True`, the Jensen-Shannon Csiszar-function is:
```none
f(u) = u log(u) - (1 + u) log(... | The Jensen-Shannon Csiszar-function in log-space.
A Csiszar-function is a member of,
```none
F = { f:R_+ to R : f convex }.
```
When `self_normalized = True`, the Jensen-Shannon Csiszar-function is:
```none
f(u) = u log(u) - (1 + u) log(1 + u) + (u + 1) log(2)
```
When `self_normalized = False` t... |
def _prefix_from_uri(self, uriname):
"""Given a fully qualified XML name, find a prefix
e.g. {http://ns.adobe.com/pdf/1.3/}Producer -> pdf:Producer
"""
uripart, tag = uriname.split('}', maxsplit=1)
uri = uripart.replace('{', '')
return self.REVERSE_NS[uri] + ':' + tag | Given a fully qualified XML name, find a prefix
e.g. {http://ns.adobe.com/pdf/1.3/}Producer -> pdf:Producer |
def status(
message: str = None,
progress: float = None,
section_message: str = None,
section_progress: float = None,
):
"""
Updates the status display, which is only visible while a step is running.
This is useful for providing feedback and information during long-running
... | Updates the status display, which is only visible while a step is running.
This is useful for providing feedback and information during long-running
steps.
A section progress is also available for cases where long running tasks
consist of multiple tasks and you want to display sub-progress messages
... |
def replace_find_selection(self, focus_replace_text=False):
"""Replace and find in the current selection"""
if self.editor is not None:
replace_text = to_text_string(self.replace_text.currentText())
search_text = to_text_string(self.search_text.currentText())
cas... | Replace and find in the current selection |
def get_permissions(self, token, resource_scopes_tuples=None,
submit_request=False, ticket=None):
"""
Request permissions for user from keycloak server.
https://www.keycloak.org/docs/latest/authorization_services/index
.html#_service_protection_permission_api_pap... | Request permissions for user from keycloak server.
https://www.keycloak.org/docs/latest/authorization_services/index
.html#_service_protection_permission_api_papi
:param str token: client access token
:param Iterable[Tuple[str, str]] resource_scopes_tuples:
list of tuples (... |
def add_child(self, child):
"""Add a child object to the current one.
Checks the contained_children list to make sure that the object
is allowable, and throws an exception if not.
"""
# Make sure the child exists before adding it.
if child:
# Add the child if... | Add a child object to the current one.
Checks the contained_children list to make sure that the object
is allowable, and throws an exception if not. |
def select_previous(self):
"""Move to the previous status in the timeline."""
self.footer.clear_message()
if self.selected == 0:
self.footer.draw_message("Cannot move beyond first toot.", Color.GREEN)
return
old_index = self.selected
new_index = self.sel... | Move to the previous status in the timeline. |
def get_orgas(self):
"""Return the list of pk for all orgas"""
r = self._request('orgas/')
if not r:
return None
retour = []
for data in r.json()['data']:
o = Orga()
o.__dict__.update(data)
o.pk = o.id
retour.append(... | Return the list of pk for all orgas |
def enable(self):
"""
Enables all settings
"""
nwin = self.nwin.value()
for label, xs, ys, nx, ny in \
zip(self.label[:nwin], self.xs[:nwin], self.ys[:nwin],
self.nx[:nwin], self.ny[:nwin]):
label.config(state='normal')
... | Enables all settings |
def _create_session(self):
"""
Creates a fresh session with the default header (random UA)
"""
self.driver = requests.Session(**self.driver_args)
# Set default headers
self.update_headers(self.current_headers)
self.update_cookies(self.current_cookies)
self... | Creates a fresh session with the default header (random UA) |
def yiq_to_rgb(y, i=None, q=None):
"""Convert the color from YIQ coordinates to RGB.
Parameters:
:y:
Tte Y component value [0...1]
:i:
The I component value [0...1]
:q:
The Q component value [0...1]
Returns:
The color as an (r, g, b) tuple in the range:
r[0...1],
g[0...... | Convert the color from YIQ coordinates to RGB.
Parameters:
:y:
Tte Y component value [0...1]
:i:
The I component value [0...1]
:q:
The Q component value [0...1]
Returns:
The color as an (r, g, b) tuple in the range:
r[0...1],
g[0...1],
b[0...1]
>>> '({}, {}, {})'.f... |
def write_and_return(
command, ack, serial_connection, timeout=DEFAULT_WRITE_TIMEOUT):
'''Write a command and return the response'''
clear_buffer(serial_connection)
with serial_with_temp_timeout(
serial_connection, timeout) as device_connection:
response = _write_to_device_and_re... | Write a command and return the response |
def _parse_args(self, args):
"""self.parser->self.parsed_data"""
# decode sys.argv to support unicode command-line options
enc = DEFAULT_ENCODING
uargs = [py3compat.cast_unicode(a, enc) for a in args]
self.parsed_data, self.extra_args = self.parser.parse_known_args(uargs) | self.parser->self.parsed_data |
def process_rule(edges: Edges, ast: Function, rule: Mapping[str, Any], spec: BELSpec):
"""Process computed edge rule
Recursively processes BELAst versus a single computed edge rule
Args:
edges (List[Tuple[Union[Function, str], str, Function]]): BEL Edge ASTs
ast (Function): BEL Function AS... | Process computed edge rule
Recursively processes BELAst versus a single computed edge rule
Args:
edges (List[Tuple[Union[Function, str], str, Function]]): BEL Edge ASTs
ast (Function): BEL Function AST
rule (Mapping[str, Any]: computed edge rule |
def get_polygon_pattern_rules(declarations, dirs):
""" Given a list of declarations, return a list of output.Rule objects.
Optionally provide an output directory for local copies of image files.
"""
property_map = {'polygon-pattern-file': 'file', 'polygon-pattern-width': 'width',
... | Given a list of declarations, return a list of output.Rule objects.
Optionally provide an output directory for local copies of image files. |
def _lines_only(shape):
"""
Extract the lines (LineString, MultiLineString) from any geometry. We
expect the input to be mostly lines, such as the result of an intersection
between a line and a polygon. The main idea is to remove points, and any
other geometry which might throw a wrench in the works... | Extract the lines (LineString, MultiLineString) from any geometry. We
expect the input to be mostly lines, such as the result of an intersection
between a line and a polygon. The main idea is to remove points, and any
other geometry which might throw a wrench in the works. |
def activate(self):
"""
Activates the logical volume.
*Raises:*
* HandleError
"""
self.open()
a = lvm_lv_activate(self.handle)
self.close()
if a != 0:
raise CommitError("Failed to activate LV.") | Activates the logical volume.
*Raises:*
* HandleError |
def get_emerg():
"""Get the cached FCPS emergency page, or check it again.
Timeout defined in settings.CACHE_AGE["emerg"]
"""
key = "emerg:{}".format(datetime.datetime.now().date())
cached = cache.get(key)
cached = None # Remove this for production
if cached:
logger.debug("Returni... | Get the cached FCPS emergency page, or check it again.
Timeout defined in settings.CACHE_AGE["emerg"] |
def delete(self, membershipId):
"""Delete a team membership, by ID.
Args:
membershipId(basestring): The team membership ID.
Raises:
TypeError: If the parameter types are incorrect.
ApiError: If the Webex Teams cloud returns an error.
"""
che... | Delete a team membership, by ID.
Args:
membershipId(basestring): The team membership ID.
Raises:
TypeError: If the parameter types are incorrect.
ApiError: If the Webex Teams cloud returns an error. |
def fetch(self):
"""Unfortunately, IEX's API can only retrieve data one day or one month
at a time. Rather than specifying a date range, we will have to run
the read function for each date provided.
:return: DataFrame
"""
tlen = self.end - self.start
dfs = []
... | Unfortunately, IEX's API can only retrieve data one day or one month
at a time. Rather than specifying a date range, we will have to run
the read function for each date provided.
:return: DataFrame |
def set_physical_page_for_file(self, pageId, ocrd_file, order=None, orderlabel=None):
"""
Create a new physical page
"""
# print(pageId, ocrd_file)
# delete any page mapping for this file.ID
for el_fptr in self._tree.getroot().findall(
'mets:structMap[@TY... | Create a new physical page |
def play(self):
"""Starts an animation playing."""
if self.state == PygAnimation.PLAYING:
pass # nothing to do
elif self.state == PygAnimation.STOPPED: # restart from beginning of animation
self.index = 0 # first image in list
self.elapsed = 0
... | Starts an animation playing. |
def get_dataset(self, dsid, info):
"""Load a dataset."""
dsid_name = dsid.name
if dsid_name in self.cache:
logger.debug('Get the data set from cache: %s.', dsid_name)
return self.cache[dsid_name]
if dsid_name in ['lon', 'lat'] and dsid_name not in self.nc:
... | Load a dataset. |
def _get_indices(num_results, sequence_indices, dtype, name=None):
"""Generates starting points for the Halton sequence procedure.
The k'th element of the sequence is generated starting from a positive integer
which must be distinct for each `k`. It is conventional to choose the starting
point as `k` itself (o... | Generates starting points for the Halton sequence procedure.
The k'th element of the sequence is generated starting from a positive integer
which must be distinct for each `k`. It is conventional to choose the starting
point as `k` itself (or `k+1` if k is zero based). This function generates
the starting inte... |
def add_variable(self, variable, card=0):
"""
Add a variable to the model.
Parameters:
-----------
variable: any hashable python object
card: int
Representing the cardinality of the variable to be added.
Examples:
---------
>>> from ... | Add a variable to the model.
Parameters:
-----------
variable: any hashable python object
card: int
Representing the cardinality of the variable to be added.
Examples:
---------
>>> from pgmpy.models import MarkovChain as MC
>>> model = MC()... |
def isdir(self):
"""Returns True if entry is a directory.
"""
if self.type == RAR_BLOCK_FILE:
return (self.flags & RAR_FILE_DIRECTORY) == RAR_FILE_DIRECTORY
return False | Returns True if entry is a directory. |
def _split_classes_by_kind(self, class_name_to_definition):
"""Assign each class to the vertex, edge or non-graph type sets based on its kind."""
for class_name in class_name_to_definition:
inheritance_set = self._inheritance_sets[class_name]
is_vertex = ORIENTDB_BASE_VERTEX_CLA... | Assign each class to the vertex, edge or non-graph type sets based on its kind. |
def rebuildDay( self ):
"""
Rebuilds the current item in day mode.
"""
scene = self.scene()
if ( not scene ):
return
# calculate the base information
start_date = self.dateStart()
end_date = self.dateEnd()
min_date... | Rebuilds the current item in day mode. |
def get_connection(self, command_name, *keys, **options):
"""Get a connection from the pool"""
self._checkpid()
try:
connection = self._available_connections[self._pattern_idx].pop()
except IndexError:
connection = self.make_connection()
self._in_use_conne... | Get a connection from the pool |
def enrich_backend(url, clean, backend_name, backend_params, cfg_section_name,
ocean_index=None,
ocean_index_enrich=None,
db_projects_map=None, json_projects_map=None,
db_sortinghat=None,
no_incremental=False, only_identities... | Enrich Ocean index |
def get_sources(self, skydir=None, distance=None, cuts=None,
minmax_ts=None, minmax_npred=None,
exclude=None, square=False, coordsys='CEL',
names=None):
"""Retrieve list of source objects satisfying the following
selections:
* Angular ... | Retrieve list of source objects satisfying the following
selections:
* Angular separation from ``skydir`` or ROI center (if
``skydir`` is None) less than ``distance``.
* Cuts on source properties defined in ``cuts`` list.
* TS and Npred in range specified by ``... |
def initialize(self, params, qubits):
"""Apply initialize to circuit."""
if isinstance(qubits, QuantumRegister):
qubits = qubits[:]
else:
qubits = _convert_to_bits([qubits], [qbit for qreg in self.qregs for qbit in qreg])[0]
return self.append(Initialize(params), qubits) | Apply initialize to circuit. |
def remove_root_bin(self, bin_id):
"""Removes a root bin.
arg: bin_id (osid.id.Id): the ``Id`` of a bin
raise: NotFound - ``bin_id`` not a root
raise: NullArgument - ``bin_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied... | Removes a root bin.
arg: bin_id (osid.id.Id): the ``Id`` of a bin
raise: NotFound - ``bin_id`` not a root
raise: NullArgument - ``bin_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: ... |
def _get_overlaps_tensor(self, L):
"""Transforms the input label matrix to a three-way overlaps tensor.
Args:
L: (np.array) An n x m array of LF output labels, in {0,...,k} if
self.abstains, else in {1,...,k}, generated by m conditionally
independent LFs on n... | Transforms the input label matrix to a three-way overlaps tensor.
Args:
L: (np.array) An n x m array of LF output labels, in {0,...,k} if
self.abstains, else in {1,...,k}, generated by m conditionally
independent LFs on n data points
Outputs:
O: ... |
def _members(self):
"""
Return a dict of non-private members.
"""
return {
key: value
for key, value in self.__dict__.items()
# NB: ignore internal SQLAlchemy state and nested relationships
if not key.startswith("_") and not isinstance(val... | Return a dict of non-private members. |
def execute_command(self, command):
"""Execute a command on the node
Args:
command (str)
"""
self.info_log("executing command: %s" % command)
try:
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
... | Execute a command on the node
Args:
command (str) |
def spliced_offset(self, position):
"""
Convert from an absolute chromosomal position to the offset into
this transcript"s spliced mRNA.
Position must be inside some exon (otherwise raise exception).
"""
# this code is performance sensitive, so switching from
# t... | Convert from an absolute chromosomal position to the offset into
this transcript"s spliced mRNA.
Position must be inside some exon (otherwise raise exception). |
def _parse_ical_string(ical_string):
"""
SU,MO,TU,WE,TH,FR,SA
DTSTART;TZID=America/New_York:20180804T233251\nRRULE:FREQ=WEEKLY;BYDAY=SA
DTSTART;TZID=America/New_York:20180804T233251\nRRULE:FREQ=DAILY
DTSTART;TZID=America/New_York:20180804T233251\nRRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA
D... | SU,MO,TU,WE,TH,FR,SA
DTSTART;TZID=America/New_York:20180804T233251\nRRULE:FREQ=WEEKLY;BYDAY=SA
DTSTART;TZID=America/New_York:20180804T233251\nRRULE:FREQ=DAILY
DTSTART;TZID=America/New_York:20180804T233251\nRRULE:FREQ=WEEKLY;BYDAY=MO,TU,WE,TH,FR,SA
DTSTART;TZID=America/New_York:20180718T174500 |
def admm_linearized(x, f, g, L, tau, sigma, niter, **kwargs):
r"""Generic linearized ADMM method for convex problems.
ADMM stands for "Alternating Direction Method of Multipliers" and
is a popular convex optimization method. This variant solves problems
of the form ::
min_x [ f(x) + g(Lx) ]
... | r"""Generic linearized ADMM method for convex problems.
ADMM stands for "Alternating Direction Method of Multipliers" and
is a popular convex optimization method. This variant solves problems
of the form ::
min_x [ f(x) + g(Lx) ]
with convex ``f`` and ``g``, and a linear operator ``L``. See S... |
def com_google_fonts_check_production_glyphs_similarity(ttFont, api_gfonts_ttFont):
"""Glyphs are similiar to Google Fonts version?"""
def glyphs_surface_area(ttFont):
"""Calculate the surface area of a glyph's ink"""
from fontTools.pens.areaPen import AreaPen
glyphs = {}
glyph_set = ttFont.getGlyp... | Glyphs are similiar to Google Fonts version? |
def backpropagate_3d_tilted(uSin, angles, res, nm, lD=0,
tilted_axis=[0, 1, 0],
coords=None, weight_angles=True, onlyreal=False,
padding=(True, True), padfac=1.75, padval=None,
intp_order=2, dtype=None,
... | r"""3D backpropagation with a tilted axis of rotation
Three-dimensional diffraction tomography reconstruction
algorithm for scattering of a plane wave
:math:`u_0(\mathbf{r}) = u_0(x,y,z)`
by a dielectric object with refractive index
:math:`n(x,y,z)`.
This method implements the 3D backpropagati... |
def damping_kraus_map(p=0.10):
"""
Generate the Kraus operators corresponding to an amplitude damping
noise channel.
:param float p: The one-step damping probability.
:return: A list [k1, k2] of the Kraus operators that parametrize the map.
:rtype: list
"""
damping_op = np.sqrt(p) * np.... | Generate the Kraus operators corresponding to an amplitude damping
noise channel.
:param float p: The one-step damping probability.
:return: A list [k1, k2] of the Kraus operators that parametrize the map.
:rtype: list |
def trimSegments(self, minPermanence=None, minNumSyns=None):
"""
This method deletes all synapses whose permanence is less than
minPermanence and deletes any segments that have less than
minNumSyns synapses remaining.
:param minPermanence: (float) Any syn whose permanence is 0 or <
``mi... | This method deletes all synapses whose permanence is less than
minPermanence and deletes any segments that have less than
minNumSyns synapses remaining.
:param minPermanence: (float) Any syn whose permanence is 0 or <
``minPermanence`` will be deleted. If None is passed in, then
``... |
def to_holvi_dict(self):
"""Convert our Python object to JSON acceptable to Holvi API"""
self._jsondata["items"] = []
for item in self.items:
self._jsondata["items"].append(item.to_holvi_dict())
self._jsondata["issue_date"] = self.issue_date.isoformat()
self._jsondata... | Convert our Python object to JSON acceptable to Holvi API |
def build_ast(expression, debug = False):
"""build an AST from an Excel formula expression in reverse polish notation"""
#use a directed graph to store the tree
G = DiGraph()
stack = []
for n in expression:
# Since the graph does not maintain the order of adding nodes/edges
# add a... | build an AST from an Excel formula expression in reverse polish notation |
def fromhdf5sorted(source, where=None, name=None, sortby=None, checkCSI=False,
start=None, stop=None, step=None):
"""
Provides access to an HDF5 table, sorted by an indexed column, e.g.::
>>> import petl as etl
>>> import tables
>>> # set up a new hdf5 table to demons... | Provides access to an HDF5 table, sorted by an indexed column, e.g.::
>>> import petl as etl
>>> import tables
>>> # set up a new hdf5 table to demonstrate with
... h5file = tables.open_file('example.h5', mode='w', title='Test file')
>>> h5file.create_group('/', 'testgroup', 'Te... |
def argparser(self):
"""
Argparser option with search functionality specific for ranges.
"""
core_parser = self.core_parser
core_parser.add_argument('-r', '--range', type=str, help="The range to search for use")
return core_parser | Argparser option with search functionality specific for ranges. |
def update_service(name, service_map):
"""Get an update from the specified service.
Arguments:
name (:py:class:`str`): The name of the service.
service_map (:py:class:`dict`): A mapping of service names to
:py:class:`flash.service.core.Service` instances.
Returns:
:py:class:`dict... | Get an update from the specified service.
Arguments:
name (:py:class:`str`): The name of the service.
service_map (:py:class:`dict`): A mapping of service names to
:py:class:`flash.service.core.Service` instances.
Returns:
:py:class:`dict`: The updated data. |
def _set_general_compilers(self):
"""Adds compiler channels to the :attr:`processes` attribute.
This method will iterate over the pipeline's processes and check
if any process is feeding channels to a compiler process. If so, that
compiler process is added to the pipeline and those chan... | Adds compiler channels to the :attr:`processes` attribute.
This method will iterate over the pipeline's processes and check
if any process is feeding channels to a compiler process. If so, that
compiler process is added to the pipeline and those channels are
linked to the compiler via s... |
def github_repos(organization, github_url, github_token):
"""Return all github repositories in an organization."""
# Get github repos
headers = {"Authorization": "token {}".format(github_token)}
next_cursor = None
while next_cursor is not False:
params = {'query': query, 'variables': {
... | Return all github repositories in an organization. |
def cublasZgbmv(handle, trans, m, n, kl, ku, alpha, A, lda,
x, incx, beta, y, incy):
"""
Matrix-vector product for complex general banded matrix.
"""
status = _libcublas.cublasZgbmv_v2(handle,
trans, m, n, kl, ku,
... | Matrix-vector product for complex general banded matrix. |
def caesar(shift, data, shift_ranges=('az', 'AZ')):
"""
Apply a caesar cipher to a string.
The caesar cipher is a substition cipher where each letter in the given
alphabet is replaced by a letter some fixed number down the alphabet.
If ``shift`` is ``1``, *A* will become *B*, *B* will become *C*, ... | Apply a caesar cipher to a string.
The caesar cipher is a substition cipher where each letter in the given
alphabet is replaced by a letter some fixed number down the alphabet.
If ``shift`` is ``1``, *A* will become *B*, *B* will become *C*, etc...
You can define the alphabets that will be shift by s... |
def check_file_list_cache(opts, form, list_cache, w_lock):
'''
Checks the cache file to see if there is a new enough file list cache, and
returns the match (if found, along with booleans used by the fileserver
backend to determine if the cache needs to be refreshed/written).
'''
refresh_cache = ... | Checks the cache file to see if there is a new enough file list cache, and
returns the match (if found, along with booleans used by the fileserver
backend to determine if the cache needs to be refreshed/written). |
def get_random(self, size=10):
"""Returns (size, n_dim) array of random variates from the histogram.
Inside the bins, a uniform distribution is assumed
Note this assumes the histogram is an 'events per bin', not a pdf.
TODO: test more.
"""
# Sample random bin centers
... | Returns (size, n_dim) array of random variates from the histogram.
Inside the bins, a uniform distribution is assumed
Note this assumes the histogram is an 'events per bin', not a pdf.
TODO: test more. |
def put_attribute(self, id, key, value, **kwargs):
"""
Add attribute to the BuildRecord.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
... | Add attribute to the BuildRecord.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
... |
def UNIFAC(T, xs, chemgroups, cached=None, subgroup_data=None,
interaction_data=None, modified=False):
r'''Calculates activity coefficients using the UNIFAC model (optionally
modified), given a mixture's temperature, liquid mole fractions,
and optionally the subgroup data and interaction param... | r'''Calculates activity coefficients using the UNIFAC model (optionally
modified), given a mixture's temperature, liquid mole fractions,
and optionally the subgroup data and interaction parameter data of your
choice. The default is to use the original UNIFAC model, with the latest
parameters publish... |
def idmap_get_new(connection, old, tbl):
"""
From the old ID string, obtain a replacement ID string by either
grabbing it from the _idmap_ table if one has already been assigned
to the old ID, or by using the current value of the Table
instance's next_id class attribute. In the latter case, the new ID
is recorde... | From the old ID string, obtain a replacement ID string by either
grabbing it from the _idmap_ table if one has already been assigned
to the old ID, or by using the current value of the Table
instance's next_id class attribute. In the latter case, the new ID
is recorded in the _idmap_ table, and the class attribute... |
def _wait_for_ip(name, session):
'''
Wait for IP to be available during create()
'''
start_time = datetime.now()
status = None
while status is None:
status = get_vm_ip(name, session)
if status is not None:
# ignore APIPA address
if status.startswith('169'... | Wait for IP to be available during create() |
def get_value(self, key, default={}, nested=True, decrypt=True):
"""
Retrieve a value from the configuration based on its key. The key
may be nested.
:param str key: A path to the value, with nested levels joined by '.'
:param default: Value to return if the key does not exist (... | Retrieve a value from the configuration based on its key. The key
may be nested.
:param str key: A path to the value, with nested levels joined by '.'
:param default: Value to return if the key does not exist (defaults to :code:`dict()`)
:param bool decrypt: If :code:`True`, decrypt an ... |
def redirect_stdout(new_stdout):
"""Redirect the stdout
Args:
new_stdout (io.StringIO): New stdout to use instead
"""
old_stdout, sys.stdout = sys.stdout, new_stdout
try:
yield None
finally:
sys.stdout = old_stdout | Redirect the stdout
Args:
new_stdout (io.StringIO): New stdout to use instead |
def get_datafeeds(self, datafeed_id=None, params=None):
"""
`<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html>`_
:arg datafeed_id: The ID of the datafeeds to fetch
:arg allow_no_datafeeds: Whether to ignore if a wildcard expression
matches... | `<http://www.elastic.co/guide/en/elasticsearch/reference/current/ml-get-datafeed.html>`_
:arg datafeed_id: The ID of the datafeeds to fetch
:arg allow_no_datafeeds: Whether to ignore if a wildcard expression
matches no datafeeds. (This includes `_all` string or when no
datafeeds... |
def tobinary(self):
"""Return self as a binary string."""
entrylen = struct.calcsize(self.ENTRYSTRUCT)
rslt = []
for (dpos, dlen, ulen, flag, typcd, nm) in self.data:
nmlen = len(nm) + 1 # add 1 for a '\0'
# version 4
# rslt.append(struct.pack(se... | Return self as a binary string. |
def rm_env(user, name):
'''
Remove cron environment variable for a specified user.
CLI Example:
.. code-block:: bash
salt '*' cron.rm_env root MAILTO
'''
lst = list_tab(user)
ret = 'absent'
rm_ = None
for ind in range(len(lst['env'])):
if name == lst['env'][ind]['n... | Remove cron environment variable for a specified user.
CLI Example:
.. code-block:: bash
salt '*' cron.rm_env root MAILTO |
def add_key(self, key):
"""Adds a new key to this metric"""
if key not in self.value:
self.value[key] = ReducedMetric(self.reducer) | Adds a new key to this metric |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.