code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def _prepare_outputs(self, data_out, outputs):
""" Open a ROOT file with option 'RECREATE' to create a new file (the file will
be overwritten if it already exists), and using the ZLIB compression algorithm
(with compression level 1) for better compatibility with older ROOT versions
(see ... | Open a ROOT file with option 'RECREATE' to create a new file (the file will
be overwritten if it already exists), and using the ZLIB compression algorithm
(with compression level 1) for better compatibility with older ROOT versions
(see https://root.cern.ch/doc/v614/release-notes.html#important-... |
def _get_trendline(self,date0=None,date1=None,on=None,kind='trend',to_strfmt='%Y-%m-%d',from_strfmt='%d%b%y',**kwargs):
"""
Returns a trendline (line), support or resistance
Parameters:
date0 : string
Trendline starting date
date1 : string
Trendline end date
on : string
Indicate the data... | Returns a trendline (line), support or resistance
Parameters:
date0 : string
Trendline starting date
date1 : string
Trendline end date
on : string
Indicate the data series in which the
trendline should be based.
'close'
'high'
'low'
'open'
kind : string
Def... |
def remove_known_host(host, application_name, user=None):
"""Remove the entry in known_hosts for host.
:param host: hostname to lookup in file.
:type host: str
:param application_name: Name of application eg nova-compute-something
:type application_name: str
:param user: The user that the ssh a... | Remove the entry in known_hosts for host.
:param host: hostname to lookup in file.
:type host: str
:param application_name: Name of application eg nova-compute-something
:type application_name: str
:param user: The user that the ssh asserts are for.
:type user: str |
def iter_elements(element_function, parent_to_parse, **kwargs):
"""
Applies element_function to each of the sub-elements in parent_to_parse.
The passed in function must take at least one element, and an optional
list of kwargs which are relevant to each of the elements in the list:
def elem_func... | Applies element_function to each of the sub-elements in parent_to_parse.
The passed in function must take at least one element, and an optional
list of kwargs which are relevant to each of the elements in the list:
def elem_func(each_elem, **kwargs) |
def top(self):
"""
list of processes in a running container
:return: None or list of dicts
"""
# let's get resources from .stats()
ps_args = "-eo pid,ppid,wchan,args"
# returns {"Processes": [values], "Titles": [values]}
# it's easier to play with list of... | list of processes in a running container
:return: None or list of dicts |
def after_reinstate(analysis_request):
"""Method triggered after a 'reinstate' transition for the Analysis Request
passed in is performed. Sets its status to the last status before it was
cancelled. Reinstates the descendant partitions and all the analyses
associated to the analysis request as well.
... | Method triggered after a 'reinstate' transition for the Analysis Request
passed in is performed. Sets its status to the last status before it was
cancelled. Reinstates the descendant partitions and all the analyses
associated to the analysis request as well. |
def _additive_estimate(events, timeline, _additive_f, _additive_var, reverse):
"""
Called to compute the Kaplan Meier and Nelson-Aalen estimates.
"""
if reverse:
events = events.sort_index(ascending=False)
at_risk = events["entrance"].sum() - events["removed"].cumsum().shift(1).fillna(0... | Called to compute the Kaplan Meier and Nelson-Aalen estimates. |
def adjust(self):
""" If one of the transformations is not defined it is expected to
be the mirror image of the other.
"""
if self._fro is None and self._to is not None:
self._fro = dict(
[(value.lower(), key) for key, value in self._to.items()])
if s... | If one of the transformations is not defined it is expected to
be the mirror image of the other. |
def get(self, callback):
'''
Gets an item based on the path.
'''
derived_path = self.context.request.url
logger.debug('[{log_prefix}]: get.derived_path: {path}'.format(
log_prefix=LOG_PREFIX, path=derived_path))
callback(self.storage.get(self.result_key_for(de... | Gets an item based on the path. |
def get_ordering(self):
"""!
@brief Returns clustering ordering information about the input data set.
@details Clustering ordering of data-set contains the information about the internal clustering structure in line with connectivity radius.
@return (ordering_analyser) Anal... | !
@brief Returns clustering ordering information about the input data set.
@details Clustering ordering of data-set contains the information about the internal clustering structure in line with connectivity radius.
@return (ordering_analyser) Analyser of clustering ordering.
... |
def process_remove_action(processors, action, argument):
"""Process action removals."""
for processor in processors:
processor(action, argument)
db.session.commit() | Process action removals. |
def layer_uri(self, layer_name):
"""Get layer URI.
For a vector layer :
/path/to/the/geopackage.gpkg|layername=my_vector_layer
For a raster :
GPKG:/path/to/the/geopackage.gpkg:my_raster_layer
:param layer_name: The name of the layer to fetch.
:type layer_name: ... | Get layer URI.
For a vector layer :
/path/to/the/geopackage.gpkg|layername=my_vector_layer
For a raster :
GPKG:/path/to/the/geopackage.gpkg:my_raster_layer
:param layer_name: The name of the layer to fetch.
:type layer_name: str
:return: The URI to the layer.
... |
def _get_collection(self, collection_uri, request_headers=None):
"""Generator function that returns collection members."""
# get the collection
status, headers, thecollection = self._rest_get(collection_uri)
if status != 200:
msg = self._get_extended_error(thecollection)
... | Generator function that returns collection members. |
def execute_sql_statement(sql_statement, query, user_name, session, cursor):
"""Executes a single SQL statement"""
database = query.database
db_engine_spec = database.db_engine_spec
parsed_query = ParsedQuery(sql_statement)
sql = parsed_query.stripped()
SQL_MAX_ROWS = app.config.get('SQL_MAX_ROW... | Executes a single SQL statement |
def parse_library(lib_files):
"""
Analizuje pliki podane w liście lib_files
Zwraca instancję MusicLibrary
"""
tracks, playlists = lib_files
lib = MusicLibrary()
lib_length = len(tracks)
i = 0
writer = lib.ix.writer()
previous_procent_done_str = ""
for f in tracks:
tr... | Analizuje pliki podane w liście lib_files
Zwraca instancję MusicLibrary |
def retrieve(self, filter_expression=None,
order_expression=None, slice_key=None):
"""
Retrieve entities from this cache, possibly after filtering, ordering
and slicing.
"""
ents = iter(self.__entities)
if not filter_expression is None:
ents =... | Retrieve entities from this cache, possibly after filtering, ordering
and slicing. |
def setup(self,
hunt_id,
reason, grr_server_url, grr_username, grr_password, approvers=None,
verify=True):
"""Initializes a GRR Hunt file collector.
Args:
hunt_id: Hunt ID to download results from.
reason: justification for GRR access.
grr_server_url: GRR s... | Initializes a GRR Hunt file collector.
Args:
hunt_id: Hunt ID to download results from.
reason: justification for GRR access.
grr_server_url: GRR server URL.
grr_username: GRR username.
grr_password: GRR password.
approvers: comma-separated list of GRR approval recipients.
... |
def get_feature(self, croplayer_id, cropfeature_id):
"""
Gets a crop feature
:param int croplayer_id: ID of a cropping layer
:param int cropfeature_id: ID of a cropping feature
:rtype: CropFeature
"""
target_url = self.client.get_url('CROPFEATURE', 'GET', 'single... | Gets a crop feature
:param int croplayer_id: ID of a cropping layer
:param int cropfeature_id: ID of a cropping feature
:rtype: CropFeature |
def Network_emulateNetworkConditions(self, offline, latency,
downloadThroughput, uploadThroughput, **kwargs):
"""
Function path: Network.emulateNetworkConditions
Domain: Network
Method name: emulateNetworkConditions
Parameters:
Required arguments:
'offline' (type: boolean) -> True to emula... | Function path: Network.emulateNetworkConditions
Domain: Network
Method name: emulateNetworkConditions
Parameters:
Required arguments:
'offline' (type: boolean) -> True to emulate internet disconnection.
'latency' (type: number) -> Minimum latency from request sent to response headers received ... |
def set_relay_on(self):
"""Turn the relay on."""
if not self.get_relay_state():
try:
request = requests.get(
'{}/relay'.format(self.resource), params={'state': '1'},
timeout=self.timeout)
if request.status_code == 200:
... | Turn the relay on. |
def dispatch_shell(self, stream, msg):
"""dispatch shell requests"""
# flush control requests first
if self.control_stream:
self.control_stream.flush()
idents,msg = self.session.feed_identities(msg, copy=False)
try:
msg = self.session.unserialize(... | dispatch shell requests |
def analysis_question_extractor(impact_report, component_metadata):
"""Extracting analysis question from the impact layer.
:param impact_report: the impact report that acts as a proxy to fetch
all the data that extractor needed
:type impact_report: safe.report.impact_report.ImpactReport
:param... | Extracting analysis question from the impact layer.
:param impact_report: the impact report that acts as a proxy to fetch
all the data that extractor needed
:type impact_report: safe.report.impact_report.ImpactReport
:param component_metadata: the component metadata. Used to obtain
informa... |
def searchForMessages(self, query, offset=0, limit=5, thread_id=None):
"""
Find and get :class:`models.Message` objects by query
.. warning::
This method sends request for every found message ID.
:param query: Text to search for
:param offset: Number of messages to ... | Find and get :class:`models.Message` objects by query
.. warning::
This method sends request for every found message ID.
:param query: Text to search for
:param offset: Number of messages to skip
:param limit: Max. number of messages to retrieve
:param thread_id: Us... |
def ws050(self, value=None):
""" Corresponds to IDD Field `ws050`
Wind speed corresponding 5.0% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `ws050`
Unit: m/s
if `value` is None it will not be checked against the... | Corresponds to IDD Field `ws050`
Wind speed corresponding 5.0% annual cumulative frequency of occurrence
Args:
value (float): value for IDD Field `ws050`
Unit: m/s
if `value` is None it will not be checked against the
specification and is assu... |
def get_role_id(self, role_name, mount_point='approle'):
"""GET /auth/<mount_point>/role/<role name>/role-id
:param role_name:
:type role_name:
:param mount_point:
:type mount_point:
:return:
:rtype:
"""
url = '/v1/auth/{0}/role/{1}/role-id'.form... | GET /auth/<mount_point>/role/<role name>/role-id
:param role_name:
:type role_name:
:param mount_point:
:type mount_point:
:return:
:rtype: |
def interface_type(self, ift):
"""
Set the CoRE Link Format if attribute of the resource.
:param ift: the CoRE Link Format if attribute
"""
if not isinstance(ift, str):
ift = str(ift)
self._attributes["if"] = ift | Set the CoRE Link Format if attribute of the resource.
:param ift: the CoRE Link Format if attribute |
def remove_lines(lines, remove=('[[back to top]', '<a class="mk-toclify"')):
"""Removes existing [back to top] links and <a id> tags."""
if not remove:
return lines[:]
out = []
for l in lines:
if l.startswith(remove):
continue
out.append(l)
return out | Removes existing [back to top] links and <a id> tags. |
def _help():
""" Display both SQLAlchemy and Python help statements """
statement = '%s%s' % (shelp, phelp % ', '.join(cntx_.keys()))
print statement.strip() | Display both SQLAlchemy and Python help statements |
def define_property(obj, name, fget=None, fset=None, fdel=None, doc=None):
"""Defines a @property dynamically for an instance rather than a class."""
if hasattr(fget, '__get__'): # can pass a property declaration too
prop = fget
else:
prop = property(fget, fset, fdel, doc)
cls = obj.__c... | Defines a @property dynamically for an instance rather than a class. |
def sign(pkey, data, digest):
"""
Sign a data string using the given key and message digest.
:param pkey: PKey to sign with
:param data: data to be signed
:param digest: message digest to use
:return: signature
.. versionadded:: 0.11
"""
data = _text_to_bytes_and_warn("data", data)... | Sign a data string using the given key and message digest.
:param pkey: PKey to sign with
:param data: data to be signed
:param digest: message digest to use
:return: signature
.. versionadded:: 0.11 |
def analyse(self, traj, network, current_subrun, subrun_list, network_dict):
"""Calculates average Fano Factor of a network.
:param traj:
Trajectory container
Expects:
`results.monitors.spikes_e`: Data from SpikeMonitor for excitatory neurons
Adds:
... | Calculates average Fano Factor of a network.
:param traj:
Trajectory container
Expects:
`results.monitors.spikes_e`: Data from SpikeMonitor for excitatory neurons
Adds:
`results.statistics.mean_fano_factor`: Average Fano Factor
:param ne... |
def add_constraints(self):
"""
Set the base constraints on the relation query.
:rtype: None
"""
if self._constraints:
foreign_key = getattr(self._parent, self._foreign_key, None)
if foreign_key is None:
self._query = None
else:... | Set the base constraints on the relation query.
:rtype: None |
def multiplySeries(requestContext, *seriesLists):
"""
Takes two or more series and multiplies their points. A constant may not be
used. To multiply by a constant, use the scale() function.
Example::
&target=multiplySeries(Series.dividends,Series.divisors)
"""
if not seriesLists or no... | Takes two or more series and multiplies their points. A constant may not be
used. To multiply by a constant, use the scale() function.
Example::
&target=multiplySeries(Series.dividends,Series.divisors) |
def update_cluster(cluster_dict, datacenter=None, cluster=None,
service_instance=None):
'''
Updates a cluster.
config_dict
Dictionary with the config values of the new cluster.
datacenter
Name of datacenter containing the cluster.
Ignored if already contained... | Updates a cluster.
config_dict
Dictionary with the config values of the new cluster.
datacenter
Name of datacenter containing the cluster.
Ignored if already contained by proxy details.
Default value is None.
cluster
Name of cluster.
Ignored if already cont... |
def from_payload(self, payload):
"""Init frame from binary data."""
self.session_id = payload[0]*256 + payload[1]
self.status = CommandSendConfirmationStatus(payload[2]) | Init frame from binary data. |
def stop(self):
"""停止引擎"""
# 将引擎设为停止
self.__active = False
# 停止计时器
self.__timer.stop()
# 等待事件处理线程退出
self.__thread.join() | 停止引擎 |
def shell(cmd, output=None, mode='w', cwd=None, shell=False):
"""Execute a shell command.
You can add a shell command::
server.watch(
'style.less', shell('lessc style.less', output='style.css')
)
:param cmd: a shell command, string or list
:param output: output stdout to t... | Execute a shell command.
You can add a shell command::
server.watch(
'style.less', shell('lessc style.less', output='style.css')
)
:param cmd: a shell command, string or list
:param output: output stdout to the given file
:param mode: only works with output, mode ``w`` mea... |
def _do_setup_step(self, play):
''' get facts from the remote system '''
host_list = self._list_available_hosts(play.hosts)
if play.gather_facts is False:
return {}
elif play.gather_facts is None:
host_list = [h for h in host_list if h not in self.SETUP_CACHE or... | get facts from the remote system |
def bilateral2(data, fSize, sigma_p, sigma_x = 10.):
"""bilateral filter """
dtype = data.dtype.type
dtypes_kernels = {np.float32:"bilat2_float",
np.uint16:"bilat2_short"}
if not dtype in dtypes_kernels:
logger.info("data type %s not supported yet (%s), casting to f... | bilateral filter |
def tunnel(self, local_port, remote_port):
"""
Creates an SSH tunnel.
"""
r = self.local_renderer
r.env.tunnel_local_port = local_port
r.env.tunnel_remote_port = remote_port
r.local(' ssh -i {key_filename} -L {tunnel_local_port}:localhost:{tunnel_remote_port} {use... | Creates an SSH tunnel. |
def _encrypt_private(self, ret, dictkey, target):
'''
The server equivalent of ReqChannel.crypted_transfer_decode_dictentry
'''
# encrypt with a specific AES key
pubfn = os.path.join(self.opts['pki_dir'],
'minions',
target... | The server equivalent of ReqChannel.crypted_transfer_decode_dictentry |
def fit_two_gaussian(spectra, f_ppm, lb=3.6, ub=3.9):
"""
Fit a gaussian function to the difference spectra
This is useful for estimation of the Glx peak, which tends to have two
peaks.
Parameters
----------
spectra : array of shape (n_transients, n_points)
Typically the difference of the ... | Fit a gaussian function to the difference spectra
This is useful for estimation of the Glx peak, which tends to have two
peaks.
Parameters
----------
spectra : array of shape (n_transients, n_points)
Typically the difference of the on/off spectra in each transient.
f_ppm : array
lb, ub :... |
def set_keyspace(self, keyspace):
""" switch all connections to another keyspace """
self.keyspace = keyspace
dfrds = []
for p in self._protos:
dfrds.append(p.submitRequest(ManagedThriftRequest(
'set_keyspace', keyspace)))
return defer.gatherResults(df... | switch all connections to another keyspace |
def search_all_payments(payment_status=None, page_size=20, start_cursor=None, offset=0, use_cache=True,
cache_begin=True, relations=None):
"""
Returns a command to search all payments ordered by creation desc
@param payment_status: The payment status. If None is going to return resul... | Returns a command to search all payments ordered by creation desc
@param payment_status: The payment status. If None is going to return results independent from status
@param page_size: number of payments per page
@param start_cursor: cursor to continue the search
@param offset: offset number of payment... |
def return_ok(self, cookie, request):
"""
If you override .return_ok(), be sure to call this method. If it
returns false, so should your subclass (assuming your subclass wants to
be more strict about which cookies to return).
"""
# Path has already been checked by .path... | If you override .return_ok(), be sure to call this method. If it
returns false, so should your subclass (assuming your subclass wants to
be more strict about which cookies to return). |
def _R2deriv(self,R,z,phi=0.,t=0.):
"""
NAME:
R2deriv
PURPOSE:
evaluate R2 derivative
INPUT:
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
-d K_R (R,z) d R
... | NAME:
R2deriv
PURPOSE:
evaluate R2 derivative
INPUT:
R - Cylindrical Galactocentric radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
-d K_R (R,z) d R
HISTORY:
2012-12-27 - Written - Bovy (IAS... |
def write(self, image, dest_fobj, quality=95, format=None):
"""
Wrapper for ``_write``
:param Image image: This is your engine's ``Image`` object. For
PIL it's PIL.Image.
:keyword int quality: A quality level as a percent. The lower, the
higher the compression, t... | Wrapper for ``_write``
:param Image image: This is your engine's ``Image`` object. For
PIL it's PIL.Image.
:keyword int quality: A quality level as a percent. The lower, the
higher the compression, the worse the artifacts.
:keyword str format: The format to save to. If o... |
def get_client(self):
"""Returns the Client
"""
context = self.context
parent = api.get_parent(context)
if context.portal_type == "Client":
return context
elif parent.portal_type == "Client":
return parent
elif context.portal_type == "Batch... | Returns the Client |
def bloquear_sat(retorno):
"""Constrói uma :class:`RespostaSAT` para o retorno (unicode) da função
:meth:`~satcfe.base.FuncoesSAT.bloquear_sat`.
"""
resposta = analisar_retorno(forcar_unicode(retorno),
funcao='BloquearSAT')
if resposta.EEEEE not in ('16000',):
... | Constrói uma :class:`RespostaSAT` para o retorno (unicode) da função
:meth:`~satcfe.base.FuncoesSAT.bloquear_sat`. |
def get_relative_to_remote(self):
"""Return the number of commits we are relative to the remote. Negative
is behind, positive in front, zero means we are matched to remote.
"""
s = self.git("status", "--short", "-b")[0]
r = re.compile("\[([^\]]+)\]")
toks = r.findall(s)
... | Return the number of commits we are relative to the remote. Negative
is behind, positive in front, zero means we are matched to remote. |
def potential_from_grid(self, grid):
"""
Calculate the potential at a given set of arc-second gridded coordinates.
Parameters
----------
grid : grids.RegularGrid
The grid of (y,x) arc-second coordinates the deflection angles are computed on.
"""
pote... | Calculate the potential at a given set of arc-second gridded coordinates.
Parameters
----------
grid : grids.RegularGrid
The grid of (y,x) arc-second coordinates the deflection angles are computed on. |
def load_atomic(self, ptr, ordering, align, name=''):
"""
Load value from pointer, with optional guaranteed alignment:
name = *ptr
"""
if not isinstance(ptr.type, types.PointerType):
raise TypeError("cannot load from value of type %s (%r): not a pointer"
... | Load value from pointer, with optional guaranteed alignment:
name = *ptr |
def add(self, varname, result, pointer=None):
"""Adds the specified python-typed result and an optional Ftype pointer
to use when cleaning up this object.
:arg result: a python-typed representation of the result.
:arg pointer: an instance of Ftype with pointer information for deallocati... | Adds the specified python-typed result and an optional Ftype pointer
to use when cleaning up this object.
:arg result: a python-typed representation of the result.
:arg pointer: an instance of Ftype with pointer information for deallocating
the c-pointer. |
def logout(self):
""" logout page
"""
sess = cherrypy.session
username = sess.get(SESSION_KEY, None)
sess[SESSION_KEY] = None
if username:
cherrypy.request.login = None
cherrypy.log.error(
msg="user '%(user)s' logout" % {'user': username},... | logout page |
def geom_reflect(g, nv):
""" Reflection symmetry operation.
nv is normal vector to reflection plane
g is assumed already translated to center of mass @ origin
.. todo:: Complete geom_reflect docstring
"""
# Imports
import numpy as np
# Force g to n-vector
g = make_nd_vec(g, nd=N... | Reflection symmetry operation.
nv is normal vector to reflection plane
g is assumed already translated to center of mass @ origin
.. todo:: Complete geom_reflect docstring |
def unpack_layer(plane):
"""Return a correctly shaped numpy array given the feature layer bytes."""
size = point.Point.build(plane.size)
if size == (0, 0):
# New layer that isn't implemented in this SC2 version.
return None
data = np.frombuffer(plane.data, dtype=Feature.dtypes[plane.bits_per... | Return a correctly shaped numpy array given the feature layer bytes. |
def do_cat(self, path):
"""display the contents of a file"""
path = path[0]
tmp_file_path = self.TMP_PATH + 'tmp'
if not os.path.exists(self.TMP_PATH):
os.makedirs(self.TMP_PATH)
f = self.n.downloadFile(self.current_path + path, tmp_file_path)
f = open(tmp_f... | display the contents of a file |
def cancel_job(self, job_resource_name: str):
"""Cancels the given job.
See also the cancel method on EngineJob.
Params:
job_resource_name: A string of the form
`projects/project_id/programs/program_id/jobs/job_id`.
"""
self.service.projects().progra... | Cancels the given job.
See also the cancel method on EngineJob.
Params:
job_resource_name: A string of the form
`projects/project_id/programs/program_id/jobs/job_id`. |
def compile_string(self, mof, ns, filename=None):
"""
Compile a string of MOF statements into a namespace of the associated
CIM repository.
Parameters:
mof (:term:`string`):
The string of MOF statements to be compiled.
ns (:term:`string`):
T... | Compile a string of MOF statements into a namespace of the associated
CIM repository.
Parameters:
mof (:term:`string`):
The string of MOF statements to be compiled.
ns (:term:`string`):
The name of the CIM namespace in the associated CIM repository
... |
def p_initial(self, p):
'initial : INITIAL initial_statement'
p[0] = Initial(p[2], lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1)) | initial : INITIAL initial_statement |
def create_exception_by_name(
name,
detailCode='0',
description='',
traceInformation=None,
identifier=None,
nodeId=None,
):
"""Create a DataONEException based object by name.
Args:
name: str
The type name of a DataONE Exception. E.g. NotFound.
If an unknown type n... | Create a DataONEException based object by name.
Args:
name: str
The type name of a DataONE Exception. E.g. NotFound.
If an unknown type name is used, it is automatically set to ServiceFailure. As
the XML Schema for DataONE Exceptions does not restrict the type names, this
may... |
def tremolo(self, freq, depth=40):
"""tremolo takes two parameters: frequency and depth (max 100)"""
self.command.append("tremolo")
self.command.append(freq)
self.command.append(depth)
return self | tremolo takes two parameters: frequency and depth (max 100) |
def apply_color_scheme(self, color_scheme):
"""
Apply a pygments color scheme to the console.
As there is not a 1 to 1 mapping between color scheme formats and
console formats, we decided to make the following mapping (it usually
looks good for most of the available pygments sty... | Apply a pygments color scheme to the console.
As there is not a 1 to 1 mapping between color scheme formats and
console formats, we decided to make the following mapping (it usually
looks good for most of the available pygments styles):
- stdout_color = normal color
- s... |
def orient_averaged_adaptive(tm):
"""Compute the T-matrix using variable orientation scatterers.
This method uses a very slow adaptive routine and should mainly be used
for reference purposes. Uses the set particle orientation PDF, ignoring
the alpha and beta attributes.
Args:
tm: TMat... | Compute the T-matrix using variable orientation scatterers.
This method uses a very slow adaptive routine and should mainly be used
for reference purposes. Uses the set particle orientation PDF, ignoring
the alpha and beta attributes.
Args:
tm: TMatrix (or descendant) instance
Returns... |
def datetime2literal_rnc(d: datetime.datetime, c: Optional[Dict]) -> str:
"""Format a DateTime object as something MySQL will actually accept."""
# dt = d.strftime("%Y-%m-%d %H:%M:%S")
# ... can fail with e.g.
# ValueError: year=1850 is before 1900; the datetime strftime() methods
# require year... | Format a DateTime object as something MySQL will actually accept. |
def _findlinestarts(code):
"""Find the offsets in a byte code which are start of lines in the source
Generate pairs offset,lineno as described in Python/compile.c
This is a modified version of dis.findlinestarts, which allows multiplelinestarts
with the same line number"""
lineno... | Find the offsets in a byte code which are start of lines in the source
Generate pairs offset,lineno as described in Python/compile.c
This is a modified version of dis.findlinestarts, which allows multiplelinestarts
with the same line number |
def fit(self, X, y=None):
"""Perform robust single linkage clustering from features or
distance matrix.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array... | Perform robust single linkage clustering from features or
distance matrix.
Parameters
----------
X : array or sparse (CSR) matrix of shape (n_samples, n_features), or \
array of shape (n_samples, n_samples)
A feature array, or array of distances between sampl... |
def bootstrap(
self, controller_name, region=None, agent_version=None,
auto_upgrade=False, bootstrap_constraints=None,
bootstrap_series=None, config=None, constraints=None,
credential=None, default_model=None, keep_broken=False,
metadata_source=None, no_gui=Fa... | Initialize a cloud environment.
:param str controller_name: Name of controller to create
:param str region: Cloud region in which to bootstrap
:param str agent_version: Version of tools to use for Juju agents
:param bool auto_upgrade: Upgrade to latest path release tools on first
... |
def add_variant(self, variant):
"""Add a variant to the variant collection
If the variant exists we update the count else we insert a new variant object.
Args:
variant (dict): A variant dictionary
"""
LOG.debug("Upserting variant... | Add a variant to the variant collection
If the variant exists we update the count else we insert a new variant object.
Args:
variant (dict): A variant dictionary |
def get(self, query_path=None, return_type=list, preceding_depth=None, throw_null_return_error=False):
""" Traverses the list of query paths to find the data requested
:param query_path: (list(str), str), list of query path branches or query string
Default b... | Traverses the list of query paths to find the data requested
:param query_path: (list(str), str), list of query path branches or query string
Default behavior: returns list(str) of possible config headers
:param return_type: (list, str, dict, OrderedDict), d... |
def is_not_empty(value, **kwargs):
"""Indicate whether ``value`` is empty.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is empty, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplica... | Indicate whether ``value`` is empty.
:param value: The value to evaluate.
:returns: ``True`` if ``value`` is empty, ``False`` if it is not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates
keyword parameters passed to the... |
def get_vault_form_for_create(self, vault_record_types):
"""Gets the vault form for creating new vaults.
A new form should be requested for each create transaction.
arg: vault_record_types (osid.type.Type[]): array of vault
record types
return: (osid.authorization.Va... | Gets the vault form for creating new vaults.
A new form should be requested for each create transaction.
arg: vault_record_types (osid.type.Type[]): array of vault
record types
return: (osid.authorization.VaultForm) - the vault form
raise: NullArgument - ``vault_rec... |
def mean_by_panel(self, length):
"""
Compute the mean across fixed sized panels of each record.
Splits each record into panels of size `length`,
and then computes the mean across panels.
Panel length must subdivide record exactly.
Parameters
----------
l... | Compute the mean across fixed sized panels of each record.
Splits each record into panels of size `length`,
and then computes the mean across panels.
Panel length must subdivide record exactly.
Parameters
----------
length : int
Fixed length with which to su... |
def __collect_fields(self):
""" Use field values from config.json and collect from request """
form = FormData()
form.add_field(self.__username_field, required=True,
error=self.__username_error)
form.add_field(self.__password_field, required=True,
... | Use field values from config.json and collect from request |
def delete(self, robj, rw=None, r=None, w=None, dw=None, pr=None, pw=None,
timeout=None):
"""
Delete an object.
"""
# We could detect quorum_controls here but HTTP ignores
# unknown flags/params.
params = {'rw': rw, 'r': r, 'w': w, 'dw': dw, 'pr': pr, 'pw':... | Delete an object. |
def _compute_childtab(self, lcptab):
"""Computes the child 'up' and 'down' arrays in O(n) based on the LCP table.
Abouelhoda et al. (2004).
"""
last_index = -1
stack = [0]
n = len(lcptab)
childtab_up = np.zeros(n, dtype=np.int) # Zeros / -1 ?
childtab_do... | Computes the child 'up' and 'down' arrays in O(n) based on the LCP table.
Abouelhoda et al. (2004). |
def index_bams(job, config):
"""
Convenience job for handling bam indexing to make the workflow declaration cleaner
:param JobFunctionWrappingJob job: passed automatically by Toil
:param Namespace config: Argparse Namespace object containing argument inputs
"""
job.fileStore.logToMaster('Indexe... | Convenience job for handling bam indexing to make the workflow declaration cleaner
:param JobFunctionWrappingJob job: passed automatically by Toil
:param Namespace config: Argparse Namespace object containing argument inputs |
def _waiting_expect(self):
'''``True`` when the client is waiting for 100 Continue.
'''
if self._expect_sent is None:
if self.environ.get('HTTP_EXPECT', '').lower() == '100-continue':
return True
self._expect_sent = ''
return False | ``True`` when the client is waiting for 100 Continue. |
def _stdlib_paths():
"""Return a set of paths from which Python imports the standard library.
"""
attr_candidates = [
'prefix',
'real_prefix', # virtualenv: only set inside a virtual environment.
'base_prefix', # venv: always set, equal to prefix if outside.
]
prefixes = (g... | Return a set of paths from which Python imports the standard library. |
def stack(args):
"""
%prog stack fastafile
Create landscape plots that show the amounts of genic sequences, and repetitive
sequences along the chromosomes.
"""
p = OptionParser(stack.__doc__)
p.add_option("--top", default=10, type="int",
help="Draw the first N chromosomes [... | %prog stack fastafile
Create landscape plots that show the amounts of genic sequences, and repetitive
sequences along the chromosomes. |
def show_item_v3(h):
"""Show any RAR3 record.
"""
st = rar3_type(h.type)
xprint("%s: hdrlen=%d datlen=%d", st, h.header_size, h.add_size)
if h.type in (rf.RAR_BLOCK_FILE, rf.RAR_BLOCK_SUB):
if h.host_os == rf.RAR_OS_UNIX:
s_mode = "0%o" % h.mode
else:
s_mode =... | Show any RAR3 record. |
def pluralize(self, measure, singular, plural):
""" Returns a string that contains the measure (amount) and its plural
or singular form depending on the amount.
Parameters:
:param measure: Amount, value, always a numerical value
:param singular: The singular form of the ... | Returns a string that contains the measure (amount) and its plural
or singular form depending on the amount.
Parameters:
:param measure: Amount, value, always a numerical value
:param singular: The singular form of the chosen word
:param plural: The plural form of th... |
def _contains_blinded_text(stats_xml):
""" Heuristic to determine whether the treebank has blinded texts or not """
tree = ET.parse(stats_xml)
root = tree.getroot()
total_tokens = int(root.find('size/total/tokens').text)
unique_lemmas = int(root.find('lemmas').get('unique'))
# assume the corpus... | Heuristic to determine whether the treebank has blinded texts or not |
def make_proxy_method(cls, name):
"""Creates a proxy function that can be used by Flasks routing. The
proxy instantiates the Mocha subclass and calls the appropriate
method.
:param name: the name of the method to create a proxy for
"""
i = cls()
view = getattr(i,... | Creates a proxy function that can be used by Flasks routing. The
proxy instantiates the Mocha subclass and calls the appropriate
method.
:param name: the name of the method to create a proxy for |
def delete_namespaced_service(self, name, namespace, **kwargs): # noqa: E501
"""delete_namespaced_service # noqa: E501
delete a Service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> t... | delete_namespaced_service # noqa: E501
delete a Service # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_service(name, namespace, async_req=True)
>>> result... |
def make_spindles(events, power_peaks, powers, dat_det, dat_orig, time,
s_freq):
"""Create dict for each spindle, based on events of time points.
Parameters
----------
events : ndarray (dtype='int')
N x 3 matrix with start, peak, end samples, and peak frequency
power_peaks... | Create dict for each spindle, based on events of time points.
Parameters
----------
events : ndarray (dtype='int')
N x 3 matrix with start, peak, end samples, and peak frequency
power_peaks : ndarray (dtype='float')
peak in power spectrum for each event
powers : ndarray (dtype='floa... |
def compact(db_spec, poll_interval=0):
"""
Compact a CouchDB database with optional synchronicity.
The ``compact`` function will compact a CouchDB database stored on an
running CouchDB server. By default, this process occurs *asynchronously*,
meaning that the compaction will occur in the b... | Compact a CouchDB database with optional synchronicity.
The ``compact`` function will compact a CouchDB database stored on an
running CouchDB server. By default, this process occurs *asynchronously*,
meaning that the compaction will occur in the background. Often, you'll want
to know when the proce... |
def triangulate(self):
"""
Triangulates the set of vertices and stores the triangles in faces and
the convex hull in convex_hull.
"""
npts = self._vertices.shape[0]
if np.any(self._vertices[0] != self._vertices[1]):
# start != end, so edges must wrap around to... | Triangulates the set of vertices and stores the triangles in faces and
the convex hull in convex_hull. |
def deep_del(data, fn):
"""Create dict copy with removed items.
Recursively remove items where fn(value) is True.
Returns:
dict: New dict with matching items removed.
"""
result = {}
for k, v in data.iteritems():
if not fn(v):
if isinstance(v, dict):
... | Create dict copy with removed items.
Recursively remove items where fn(value) is True.
Returns:
dict: New dict with matching items removed. |
def set_key(key, value, host=None, port=None, db=None, password=None):
'''
Set redis key value
CLI Example:
.. code-block:: bash
salt '*' redis.set_key foo bar
'''
server = _connect(host, port, db, password)
return server.set(key, value) | Set redis key value
CLI Example:
.. code-block:: bash
salt '*' redis.set_key foo bar |
def get(self, columns=None):
"""
Execute the query as a "select" statement.
:type columns: list
:rtype: orator.Collection
"""
if columns is None:
columns = ["*"]
if self._query.get_query().columns:
columns = []
select = self._ge... | Execute the query as a "select" statement.
:type columns: list
:rtype: orator.Collection |
def contour_to_geojson(contour, geojson_filepath=None, min_angle_deg=None,
ndigits=5, unit='', stroke_width=1, geojson_properties=None, strdump=False,
serialize=True):
"""Transform matplotlib.contour to geojson."""
collections = contour.collections
contour_index... | Transform matplotlib.contour to geojson. |
def k_ion(self, E):
"""
Geometric focusing force due to ion column for given plasma density as a function of *E*
"""
return self.n_p * _np.power(_spc.e, 2) / (2*_sltr.GeV2joule(E) * _spc.epsilon_0) | Geometric focusing force due to ion column for given plasma density as a function of *E* |
def env_present(name,
value=None,
user='root'):
'''
Verifies that the specified environment variable is present in the crontab
for the specified user.
name
The name of the environment variable to set in the user crontab
user
The name of the user whos... | Verifies that the specified environment variable is present in the crontab
for the specified user.
name
The name of the environment variable to set in the user crontab
user
The name of the user whose crontab needs to be modified, defaults to
the root user
value
The val... |
def exclude(self, *fields):
"""
Projection columns which not included in the fields
:param fields: field names
:return: new collection
:rtype: :class:`odps.df.expr.expression.CollectionExpr`
"""
if len(fields) == 1 and isinstance(fields[0], list):
ex... | Projection columns which not included in the fields
:param fields: field names
:return: new collection
:rtype: :class:`odps.df.expr.expression.CollectionExpr` |
def sphinx(self):
"""Generate Sphinx-formatted documentation for the Property"""
try:
assert __IPYTHON__
classdoc = ''
except (NameError, AssertionError):
scls = self.sphinx_class()
classdoc = ' ({})'.format(scls) if scls else ''
prop_doc ... | Generate Sphinx-formatted documentation for the Property |
def getProvince(self, default=None):
"""Return the Province from the Physical or Postal Address
"""
physical_address = self.getPhysicalAddress().get("state", default)
postal_address = self.getPostalAddress().get("state", default)
return physical_address or postal_address | Return the Province from the Physical or Postal Address |
def create_review(self, commit=github.GithubObject.NotSet, body=None, event=github.GithubObject.NotSet, comments=github.GithubObject.NotSet):
"""
:calls: `POST /repos/:owner/:repo/pulls/:number/reviews <https://developer.github.com/v3/pulls/reviews/>`_
:param commit: github.Commit.Commit
... | :calls: `POST /repos/:owner/:repo/pulls/:number/reviews <https://developer.github.com/v3/pulls/reviews/>`_
:param commit: github.Commit.Commit
:param body: string
:param event: string
:param comments: list
:rtype: :class:`github.PullRequestReview.PullRequestReview` |
def get_header(self, name, default=None):
"""
Retrieves the value of a header
"""
return self._handler.headers.get(name, default) | Retrieves the value of a header |
def infer_active_forms(stmts):
"""Return inferred ActiveForm from RegulateActivity + Modification.
This function looks for combinations of Activation/Inhibition
Statements and Modification Statements, and infers an ActiveForm
from them. For example, if we know that A activates B and
... | Return inferred ActiveForm from RegulateActivity + Modification.
This function looks for combinations of Activation/Inhibition
Statements and Modification Statements, and infers an ActiveForm
from them. For example, if we know that A activates B and
A phosphorylates B, then we can infer... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.