code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def modifiedaminoacids(df, kind='pie'):
"""
Generate a plot of relative numbers of modified amino acids in source DataFrame.
Plot a pie or bar chart showing the number and percentage of modified amino
acids in the supplied data frame. The amino acids displayed will be
determined from the supplied d... | Generate a plot of relative numbers of modified amino acids in source DataFrame.
Plot a pie or bar chart showing the number and percentage of modified amino
acids in the supplied data frame. The amino acids displayed will be
determined from the supplied data/modification type.
:param df: processed Dat... |
def assert_angles_allclose(x, y, **kwargs):
"""
Like numpy's assert_allclose, but for angles (in radians).
"""
c2 = (np.sin(x)-np.sin(y))**2 + (np.cos(x)-np.cos(y))**2
diff = np.arccos((2.0 - c2)/2.0) # a = b = 1
assert np.allclose(diff, 0.0, **kwargs) | Like numpy's assert_allclose, but for angles (in radians). |
def twoDimensionalScatter(title, title_x, title_y,
x, y,
lim_x = None, lim_y = None,
color = 'b', size = 20, alpha=None):
"""
Create a two-dimensional scatter plot.
INPUTS
"""
plt.figure()
plt.scatter(x, y, c=color, ... | Create a two-dimensional scatter plot.
INPUTS |
def _compute_projection_filters(G, sf, estimated_source):
"""Least-squares projection of estimated source on the subspace spanned by
delayed versions of reference sources, with delays between 0 and
filters_len-1
"""
# epsilon
eps = np.finfo(np.float).eps
# shapes
(nsampl, nchan) = estim... | Least-squares projection of estimated source on the subspace spanned by
delayed versions of reference sources, with delays between 0 and
filters_len-1 |
def field(ctx, text, index, delimiter=' '):
"""
Reference a field in string separated by a delimiter
"""
splits = text.split(delimiter)
# remove our delimiters and whitespace
splits = [f for f in splits if f != delimiter and len(f.strip()) > 0]
index = conversions.to_integer(index, ctx)
... | Reference a field in string separated by a delimiter |
def work_get(self, wallet, account):
"""
Retrieves work for **account** in **wallet**
.. enable_control required
.. version 8.0 required
:param wallet: Wallet to get account work for
:type wallet: str
:param account: Account to get work for
:type accoun... | Retrieves work for **account** in **wallet**
.. enable_control required
.. version 8.0 required
:param wallet: Wallet to get account work for
:type wallet: str
:param account: Account to get work for
:type account: str
:raises: :py:exc:`nano.rpc.RPCException`
... |
def chunks(arr, size):
"""Splits a list into chunks
:param arr: list to split
:type arr: :class:`list`
:param size: number of elements in each chunk
:type size: :class:`int`
:return: generator object
:rtype: :class:`generator`
"""
for i in _range(0, len(arr), size):
yield ar... | Splits a list into chunks
:param arr: list to split
:type arr: :class:`list`
:param size: number of elements in each chunk
:type size: :class:`int`
:return: generator object
:rtype: :class:`generator` |
def post(self, request, *args, **kwargs):
"""
Handle the datas for posting a quick entry,
and redirect to the admin in case of error or
to the entry's page in case of success.
"""
now = timezone.now()
data = {
'title': request.POST.get('title'),
... | Handle the datas for posting a quick entry,
and redirect to the admin in case of error or
to the entry's page in case of success. |
def getCmd(snmpEngine, authData, transportTarget, contextData,
*varBinds, **options):
"""Creates a generator to perform one or more SNMP GET queries.
On each iteration, new SNMP GET request is send (:RFC:`1905#section-4.2.1`).
The iterator blocks waiting for response to arrive or error to occur.... | Creates a generator to perform one or more SNMP GET queries.
On each iteration, new SNMP GET request is send (:RFC:`1905#section-4.2.1`).
The iterator blocks waiting for response to arrive or error to occur.
Parameters
----------
snmpEngine : :py:class:`~pysnmp.hlapi.SnmpEngine`
Class inst... |
def wrap_requests(requests_func):
"""Wrap the requests function to trace it."""
def call(url, *args, **kwargs):
blacklist_hostnames = execution_context.get_opencensus_attr(
'blacklist_hostnames')
parsed_url = urlparse(url)
if parsed_url.port is None:
dest_url = pa... | Wrap the requests function to trace it. |
def getcols(sheetMatch=None,colMatch="Decay"):
"""find every column in every sheet and put it in a new sheet or book."""
book=BOOK()
if sheetMatch is None:
matchingSheets=book.sheetNames
print('all %d sheets selected '%(len(matchingSheets)))
else:
matchingSheets=[x for x in book.... | find every column in every sheet and put it in a new sheet or book. |
def asxc(cls, obj):
"""Convert object into Xcfunc."""
if isinstance(obj, cls): return obj
if is_string(obj): return cls.from_name(obj)
raise TypeError("Don't know how to convert <%s:%s> to Xcfunc" % (type(obj), str(obj))) | Convert object into Xcfunc. |
def assertTimeZoneIsNotNone(self, dt, msg=None):
'''Fail unless ``dt`` has a non-null ``tzinfo`` attribute.
Parameters
----------
dt : datetime
msg : str
If not provided, the :mod:`marbles.mixins` or
:mod:`unittest` standard message will be used.
... | Fail unless ``dt`` has a non-null ``tzinfo`` attribute.
Parameters
----------
dt : datetime
msg : str
If not provided, the :mod:`marbles.mixins` or
:mod:`unittest` standard message will be used.
Raises
------
TypeError
If ``dt... |
def remove_pardir_symbols(path, sep=os.sep, pardir=os.pardir):
"""
Remove relative path symobls such as '..'
Args:
path (str): A target path string
sep (str): A strint to refer path delimiter (Default: `os.sep`)
pardir (str): A string to refer parent directory (Default: `os.pardir`)... | Remove relative path symobls such as '..'
Args:
path (str): A target path string
sep (str): A strint to refer path delimiter (Default: `os.sep`)
pardir (str): A string to refer parent directory (Default: `os.pardir`)
Returns:
str |
def shell(self, name='default', site=None, use_root=0, **kwargs):
"""
Opens a SQL shell to the given database, assuming the configured database
and user supports this feature.
"""
r = self.database_renderer(name=name, site=site)
if int(use_root):
kwargs = dic... | Opens a SQL shell to the given database, assuming the configured database
and user supports this feature. |
def explode(self, obj):
""" Determine if the object should be exploded. """
if obj in self._done:
return False
result = False
for item in self._explode:
if hasattr(item, '_moId'):
# If it has a _moId it is an instance
if obj._moId =... | Determine if the object should be exploded. |
def extract_command(outputdir, domain_methods, text_domain, keywords,
comment_tags, base_dir, project, version,
msgid_bugs_address):
"""Extracts strings into .pot files
:arg domain: domains to generate strings for or 'all' for all domains
:arg outputdir: output dir f... | Extracts strings into .pot files
:arg domain: domains to generate strings for or 'all' for all domains
:arg outputdir: output dir for .pot files; usually
locale/templates/LC_MESSAGES/
:arg domain_methods: DOMAIN_METHODS setting
:arg text_domain: TEXT_DOMAIN settings
:arg keywords: KEYWORDS ... |
def create_concept_scheme(rdf, ns, lname=''):
"""Create a skos:ConceptScheme in the model and return it."""
ont = None
if not ns:
# see if there's an owl:Ontology and use that to determine namespace
onts = list(rdf.subjects(RDF.type, OWL.Ontology))
if len(onts) > 1:
onts... | Create a skos:ConceptScheme in the model and return it. |
def restore(self, bAsync = True):
"""
Unmaximize and unminimize the window.
@see: L{maximize}, L{minimize}
@type bAsync: bool
@param bAsync: Perform the request asynchronously.
@raise WindowsError: An error occured while processing this request.
"""
if... | Unmaximize and unminimize the window.
@see: L{maximize}, L{minimize}
@type bAsync: bool
@param bAsync: Perform the request asynchronously.
@raise WindowsError: An error occured while processing this request. |
def data(self, data, part=False, dataset=''):
"""Parse data and update links.
Parameters
----------
data
Data to parse.
part : `bool`, optional
True if data is partial (default: `False`).
dataset : `str`, optional
Dataset key prefix (d... | Parse data and update links.
Parameters
----------
data
Data to parse.
part : `bool`, optional
True if data is partial (default: `False`).
dataset : `str`, optional
Dataset key prefix (default: ''). |
def get_handler(self, *args, **options):
"""
Returns the static files serving handler wrapping the default handler,
if static files should be served. Otherwise just returns the default
handler.
"""
handler = super(Command, self).get_handler(*args, **options)
inse... | Returns the static files serving handler wrapping the default handler,
if static files should be served. Otherwise just returns the default
handler. |
def delete(self):
"""
If a dynamic version, delete it the standard way and remove it from the
inventory, else delete all dynamic versions.
"""
if self.dynamic_version_of is None:
self._delete_dynamic_versions()
else:
super(DynamicFieldMixin, self).... | If a dynamic version, delete it the standard way and remove it from the
inventory, else delete all dynamic versions. |
def normalize_feature_objects(feature_objs):
"""Takes an iterable of GeoJSON-like Feature mappings or
an iterable of objects with a geo interface and
normalizes it to the former."""
for obj in feature_objs:
if hasattr(obj, "__geo_interface__") and \
'type' in obj.__geo_interface__.key... | Takes an iterable of GeoJSON-like Feature mappings or
an iterable of objects with a geo interface and
normalizes it to the former. |
def calendar(type='holiday', direction='next', last=1, startDate=None, token='', version=''):
'''This call allows you to fetch a number of trade dates or holidays from a given date. For example, if you want the next trading day, you would call /ref-data/us/dates/trade/next/1.
https://iexcloud.io/docs/api/#u-s-... | This call allows you to fetch a number of trade dates or holidays from a given date. For example, if you want the next trading day, you would call /ref-data/us/dates/trade/next/1.
https://iexcloud.io/docs/api/#u-s-exchanges
8am, 9am, 12pm, 1pm UTC daily
Args:
type (string); "holiday" or "trade"
... |
def install_binary_dist(self, members, virtualenv_compatible=True, prefix=None,
python=None, track_installed_files=False):
"""
Install a binary distribution into the given prefix.
:param members: An iterable of tuples with two values each:
1.... | Install a binary distribution into the given prefix.
:param members: An iterable of tuples with two values each:
1. A :class:`tarfile.TarInfo` object.
2. A file-like object.
:param prefix: The "prefix" under which the requirements should be
... |
def as_dict(self):
"""
As in :Class: `pymatgen.core.Molecule` except
with using `to_dict_of_dicts` from NetworkX
to store graph information.
"""
d = {"@module": self.__class__.__module__,
"@class": self.__class__.__name__,
"molecule": self.molec... | As in :Class: `pymatgen.core.Molecule` except
with using `to_dict_of_dicts` from NetworkX
to store graph information. |
def _set_defined_policy(self, v, load=False):
"""
Setter method for defined_policy, mapped from YANG variable /rbridge_id/secpolicy/defined_policy (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_defined_policy is considered as a private
method. Backends l... | Setter method for defined_policy, mapped from YANG variable /rbridge_id/secpolicy/defined_policy (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_defined_policy is considered as a private
method. Backends looking to populate this variable should
do so via call... |
def load_datafile(name, search_path, codecs=get_codecs(), **kwargs):
"""
find datafile and load them from codec
TODO only does the first one
kwargs:
default = if passed will return that on failure instead of throwing
"""
return munge.load_datafile(name, search_path, codecs, **kwargs) | find datafile and load them from codec
TODO only does the first one
kwargs:
default = if passed will return that on failure instead of throwing |
def stats(self, request, *args, **kwargs):
"""
To get count of alerts per severities - run **GET** request against */api/alerts/stats/*.
This endpoint supports all filters that are available for alerts list (*/api/alerts/*).
Response example:
.. code-block:: javascript
... | To get count of alerts per severities - run **GET** request against */api/alerts/stats/*.
This endpoint supports all filters that are available for alerts list (*/api/alerts/*).
Response example:
.. code-block:: javascript
{
"debug": 2,
"error": 1,
... |
def _get_snmpv3(self, oid):
"""
Try to send an SNMP GET operation using SNMPv3 for the specified OID.
Parameters
----------
oid : str
The SNMP OID that you want to get.
Returns
-------
string : str
The string as part of the value ... | Try to send an SNMP GET operation using SNMPv3 for the specified OID.
Parameters
----------
oid : str
The SNMP OID that you want to get.
Returns
-------
string : str
The string as part of the value from the OID you are trying to retrieve. |
def to_frame(self, data, state):
"""
Extract a single frame from the data buffer. The consumed
data should be removed from the buffer. If no complete frame
can be read, must raise a ``NoFrames`` exception.
:param data: A ``bytearray`` instance containing the data so
... | Extract a single frame from the data buffer. The consumed
data should be removed from the buffer. If no complete frame
can be read, must raise a ``NoFrames`` exception.
:param data: A ``bytearray`` instance containing the data so
far read.
:param state: An instanc... |
def arch(self):
"""
Return an architecture for this task.
:returns: an arch string (eg "noarch", or "ppc64le"), or None this task
has no architecture associated with it.
"""
if self.method in ('buildArch', 'createdistrepo', 'livecd'):
return self.pa... | Return an architecture for this task.
:returns: an arch string (eg "noarch", or "ppc64le"), or None this task
has no architecture associated with it. |
def filter_params(self, value):
""" return filtering params """
if value is None:
return {}
val_min = value.get('min', None)
val_max = value.get('max', None)
params = {}
if val_min == val_max:
return { self.target: val_min }
key = self.t... | return filtering params |
def grant_usage_install_privileges(cls, cur, schema_name, roles):
"""
Sets search path
"""
cur.execute('GRANT SELECT, INSERT, UPDATE, DELETE ON ALL TABLES IN SCHEMA {0} TO {1};'
'GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA {0} TO {1};'
'GRANT USAGE, S... | Sets search path |
def parent(self, parent):
"""Setter for the parent state of the state element
:param rafcon.core.states.state.State parent: Parent state or None
"""
if parent is None:
self._parent = None
else:
from rafcon.core.states.state import State
assert... | Setter for the parent state of the state element
:param rafcon.core.states.state.State parent: Parent state or None |
def create_columns(self):
"""For each column in file create a TransactionCsvImportColumn"""
reader = self._get_csv_reader()
headings = six.next(reader)
try:
examples = six.next(reader)
except StopIteration:
examples = []
found_fields = set()
... | For each column in file create a TransactionCsvImportColumn |
def cmdline(argv=sys.argv[1:]):
"""
Script for rebasing a text file
"""
parser = ArgumentParser(
description='Rebase a text from his stop words')
parser.add_argument('language', help='The language used to rebase')
parser.add_argument('source', help='Text file to rebase')
options = pa... | Script for rebasing a text file |
def extract_datetime_hour(cls, datetime_str):
"""
Tries to extract a `datetime` object from the given string, including only hours.
Raises `DateTimeFormatterException` if the extraction fails.
"""
if not datetime_str:
raise DateTimeFormatterException('datetime_str mu... | Tries to extract a `datetime` object from the given string, including only hours.
Raises `DateTimeFormatterException` if the extraction fails. |
def fromML(vec):
"""
Convert a vector from the new mllib-local representation.
This does NOT copy the data; it copies references.
:param vec: a :py:class:`pyspark.ml.linalg.Vector`
:return: a :py:class:`pyspark.mllib.linalg.Vector`
.. versionadded:: 2.0.0
"""
... | Convert a vector from the new mllib-local representation.
This does NOT copy the data; it copies references.
:param vec: a :py:class:`pyspark.ml.linalg.Vector`
:return: a :py:class:`pyspark.mllib.linalg.Vector`
.. versionadded:: 2.0.0 |
def get_resource_by_agent(self, agent_id):
"""Gets the ``Resource`` associated with the given agent.
arg: agent_id (osid.id.Id): ``Id`` of the ``Agent``
return: (osid.resource.Resource) - associated resource
raise: NotFound - ``agent_id`` is not found
raise: NullArgument - ... | Gets the ``Resource`` associated with the given agent.
arg: agent_id (osid.id.Id): ``Id`` of the ``Agent``
return: (osid.resource.Resource) - associated resource
raise: NotFound - ``agent_id`` is not found
raise: NullArgument - ``agent_id`` is ``null``
raise: OperationFail... |
def clear_max_string_length(self):
"""stub"""
if (self.get_max_string_length_metadata().is_read_only() or
self.get_max_string_length_metadata().is_required()):
raise NoAccess()
self.my_osid_object_form._my_map['maxStringLength'] = \
self.get_max_string_len... | stub |
def update(context, id, export_control, active):
"""update(context, id, export_control, active)
Update a component
>>> dcictl component-update [OPTIONS]
:param string id: ID of the component [required]
:param boolean export-control: Set the component visible for users
:param boolean active: S... | update(context, id, export_control, active)
Update a component
>>> dcictl component-update [OPTIONS]
:param string id: ID of the component [required]
:param boolean export-control: Set the component visible for users
:param boolean active: Set the component in the active state |
def all_state_variables_read(self):
""" recursive version of variables_read
"""
if self._all_state_variables_read is None:
self._all_state_variables_read = self._explore_functions(
lambda x: x.state_variables_read)
return self._all_state_variables_read | recursive version of variables_read |
def sendConnect(self, data):
"""Send a CONNECT command to the broker
:param data: List of other broker main socket URL"""
# Imported dynamically - Not used if only one broker
if self.backend == 'ZMQ':
import zmq
self.context = zmq.Context()
self.so... | Send a CONNECT command to the broker
:param data: List of other broker main socket URL |
def update_peer(self,
current_name,
new_name, new_url, username, password, peer_type="REPLICATION"):
"""
Update a replication peer.
@param current_name: The name of the peer to updated.
@param new_name: The new name for the peer.
@param new_url: The new url for the peer.
@param user... | Update a replication peer.
@param current_name: The name of the peer to updated.
@param new_name: The new name for the peer.
@param new_url: The new url for the peer.
@param username: The admin username to use to setup the remote side of the peer connection.
@param password: The password of the adm... |
def bake_content(request):
"""Invoke the baking process - trigger post-publication"""
ident_hash = request.matchdict['ident_hash']
try:
id, version = split_ident_hash(ident_hash)
except IdentHashError:
raise httpexceptions.HTTPNotFound()
if not version:
raise httpexceptions.... | Invoke the baking process - trigger post-publication |
def add_tcp_callback(port, callback, threaded_callback=False):
"""
Adds a unix socket server callback, which will be invoked when values
arrive from a connected socket client. The callback must accept two
parameters, eg. ``def callback(socket, msg)``.
"""
_rpio.add_tcp_callback(port, callback, t... | Adds a unix socket server callback, which will be invoked when values
arrive from a connected socket client. The callback must accept two
parameters, eg. ``def callback(socket, msg)``. |
def put(self, locator = None, component = None):
"""
Puts a new reference into this reference map.
:param locator: a component reference to be added.
:param component: a locator to find the reference by.
"""
if component == None:
raise Exception("Component c... | Puts a new reference into this reference map.
:param locator: a component reference to be added.
:param component: a locator to find the reference by. |
def shift_and_scale(matrix, shift, scale):
""" Shift and scale matrix so its minimum value is placed at `shift` and
its maximum value is scaled to `scale` """
zeroed = matrix - matrix.min()
scaled = (scale - shift) * (zeroed / zeroed.max())
return scaled + shift | Shift and scale matrix so its minimum value is placed at `shift` and
its maximum value is scaled to `scale` |
def strip_rate(self, idx):
"""strip(1 byte) radiotap.datarate
note that, unit of this field is originally 0.5 Mbps
:idx: int
:return: int
idx
:return: double
rate in terms of Mbps
"""
val, = struct.unpack_from('<B', self._rtap, idx)
... | strip(1 byte) radiotap.datarate
note that, unit of this field is originally 0.5 Mbps
:idx: int
:return: int
idx
:return: double
rate in terms of Mbps |
def nullspace(A, atol=1e-13, rtol=0):
"""Compute an approximate basis for the nullspace of A.
The algorithm used by this function is based on the singular value
decomposition of `A`.
Parameters
----------
A : numpy.ndarray
A should be at most 2-D. A 1-D array with length k will be trea... | Compute an approximate basis for the nullspace of A.
The algorithm used by this function is based on the singular value
decomposition of `A`.
Parameters
----------
A : numpy.ndarray
A should be at most 2-D. A 1-D array with length k will be treated
as a 2-D with shape (1, k)
at... |
def get_wulff_shape(self, material_id):
"""
Constructs a Wulff shape for a material.
Args:
material_id (str): Materials Project material_id, e.g. 'mp-123'.
Returns:
pymatgen.analysis.wulff.WulffShape
"""
from pymatgen.symmetry.analyzer import Spac... | Constructs a Wulff shape for a material.
Args:
material_id (str): Materials Project material_id, e.g. 'mp-123'.
Returns:
pymatgen.analysis.wulff.WulffShape |
def mimetype_icon(path, fallback=None):
"""
Tries to create an icon from theme using the file mimetype.
E.g.::
return self.mimetype_icon(
path, fallback=':/icons/text-x-python.png')
:param path: file path for which the icon must be created
:param fa... | Tries to create an icon from theme using the file mimetype.
E.g.::
return self.mimetype_icon(
path, fallback=':/icons/text-x-python.png')
:param path: file path for which the icon must be created
:param fallback: fallback icon path (qrc or file system)
:ret... |
def unique_filename(**kwargs):
"""Create new filename guaranteed not to exist previously
Use mkstemp to create the file, then remove it and return the name
If dir is specified, the tempfile will be created in the path specified
otherwise the file will be created in a directory following this scheme:
... | Create new filename guaranteed not to exist previously
Use mkstemp to create the file, then remove it and return the name
If dir is specified, the tempfile will be created in the path specified
otherwise the file will be created in a directory following this scheme:
:file:'/tmp/inasafe/<dd-mm-yyyy>/<... |
def naiveWordAlignment(tg, utteranceTierName, wordTierName, isleDict,
phoneHelperTierName=None,
removeOverlappingSegments=False):
'''
Performs naive alignment for utterances in a textgrid
Naive alignment gives each segment equal duration. Word duration is
... | Performs naive alignment for utterances in a textgrid
Naive alignment gives each segment equal duration. Word duration is
determined by the duration of an utterance and the number of phones in
the word.
By 'utterance' I mean a string of words separated by a space bounded
in time eg (0.5, ... |
def configure_logging(logger_name, filename=None):
""" Configure logging and return the named logger and the location of the logging configuration file loaded.
This function expects a Splunk app directory structure::
<app-root>
bin
...
default
..... | Configure logging and return the named logger and the location of the logging configuration file loaded.
This function expects a Splunk app directory structure::
<app-root>
bin
...
default
...
local
...
This function ... |
def reporter(self, analysistype='genesippr'):
"""
Creates a report of the genesippr results
:param analysistype: The variable to use when accessing attributes in the metadata object
"""
logging.info('Creating {} report'.format(analysistype))
# Create a dictionary to link ... | Creates a report of the genesippr results
:param analysistype: The variable to use when accessing attributes in the metadata object |
def parse_pv(header):
"""
Parses the PV array from an astropy FITS header.
Args:
header: astropy.io.fits.header.Header
The header containing the PV values.
Returns:
cd: 2d array (list(list(float))
[[PV1_0, PV1_1, ... PV1_N], [PV2_0, PV2_1, ... PV2_N]]
Note that N de... | Parses the PV array from an astropy FITS header.
Args:
header: astropy.io.fits.header.Header
The header containing the PV values.
Returns:
cd: 2d array (list(list(float))
[[PV1_0, PV1_1, ... PV1_N], [PV2_0, PV2_1, ... PV2_N]]
Note that N depends on the order of the fit. Fo... |
def download(url):
"""Uses requests to download an URL, maybe from a file"""
session = requests.Session()
session.mount('file://', FileAdapter())
try:
res = session.get(url)
except requests.exceptions.ConnectionError as e:
raise e
res.raise_for_status()
return res | Uses requests to download an URL, maybe from a file |
def get_nets_radb(self, response, is_http=False):
"""
The function for parsing network blocks from ASN origin data.
Args:
response (:obj:`str`): The response from the RADB whois/http
server.
is_http (:obj:`bool`): If the query is RADB HTTP instead of whoi... | The function for parsing network blocks from ASN origin data.
Args:
response (:obj:`str`): The response from the RADB whois/http
server.
is_http (:obj:`bool`): If the query is RADB HTTP instead of whois,
set to True. Defaults to False.
Returns:
... |
def _ReadTab(Year):
'''
Reads OMNI data tab with Tsyganenko parameters.
Input:
Year: Integer year to read
'''
dtype_in = [('Year','int32'),('DayNo','int32'),('Hr','int32'),('Mn','int32'),
('Bx','float32'),('By','float32'),('Bz','float32'),
('Vx','float32'),('Vy','float32'),('Vz','float32'),
('Den','... | Reads OMNI data tab with Tsyganenko parameters.
Input:
Year: Integer year to read |
def set_firewall_settings(profile,
inbound=None,
outbound=None,
store='local'):
'''
Set the firewall inbound/outbound settings for the specified profile and
store
Args:
profile (str):
The firewall profile... | Set the firewall inbound/outbound settings for the specified profile and
store
Args:
profile (str):
The firewall profile to configure. Valid options are:
- domain
- public
- private
inbound (str):
The inbound setting. If ``None`` is... |
def init_app(self, app):
"""Flask application initialization."""
super(InvenioIIIFAPI, self).init_app(app)
api = Api(app=app)
self.iiif_ext.init_restful(api, prefix=app.config['IIIF_API_PREFIX']) | Flask application initialization. |
def trace_memory_start(self):
""" Starts measuring memory consumption """
self.trace_memory_clean_caches()
objgraph.show_growth(limit=30)
gc.collect()
self._memory_start = self.worker.get_memory()["total"] | Starts measuring memory consumption |
def on_fork():
"""
Should be called by any program integrating Mitogen each time the process
is forked, in the context of the new child.
"""
reset_logging_framework() # Must be first!
fixup_prngs()
mitogen.core.Latch._on_fork()
mitogen.core.Side._on_fork()
mitogen.core.ExternalConte... | Should be called by any program integrating Mitogen each time the process
is forked, in the context of the new child. |
def _JzStaeckelIntegrandSquared(v,E,Lz,I3V,delta,u0,cosh2u0,sinh2u0,
potu0pi2,pot):
#potu0pi2= potentialStaeckel(u0,nu.pi/2.,pot,delta)
"""The J_z integrand: p_v(v)/2/delta^2"""
sin2v= nu.sin(v)**2.
dV= cosh2u0*potu0pi2\
-(sinh2u0+sin2v)*potentialStaeckel(u0,v,pot... | The J_z integrand: p_v(v)/2/delta^2 |
def swipe_right(self, steps=10, *args, **selectors):
"""
Swipe the UI object with *selectors* from center to right
See `Swipe Left` for more details.
"""
self.device(**selectors).swipe.right(steps=steps) | Swipe the UI object with *selectors* from center to right
See `Swipe Left` for more details. |
def add_table(self, table, row=None, col=0, row_spaces=1):
"""
Adds a table to the worksheet at (row, col).
Return the (row, col) where the table has been put.
:param xltable.Table table: Table to add to the worksheet.
:param int row: Row to start the table at (defaults to the n... | Adds a table to the worksheet at (row, col).
Return the (row, col) where the table has been put.
:param xltable.Table table: Table to add to the worksheet.
:param int row: Row to start the table at (defaults to the next free row).
:param int col: Column to start the table at.
:p... |
def query(self):
"""
Request object passed to datasource.query function:
{
'timezone': 'browser',
'panelId': 38,
'range': {
'from': '2018-08-29T02:38:09.633Z',
'to': '2018-08-29T03:38:09.633Z',
'raw': {'from': '... | Request object passed to datasource.query function:
{
'timezone': 'browser',
'panelId': 38,
'range': {
'from': '2018-08-29T02:38:09.633Z',
'to': '2018-08-29T03:38:09.633Z',
'raw': {'from': 'now-1h', 'to': 'now'}
},
... |
def start(self, host, nornir):
"""
Run the task for the given host.
Arguments:
host (:obj:`nornir.core.inventory.Host`): Host we are operating with. Populated right
before calling the ``task``
nornir(:obj:`nornir.core.Nornir`): Populated right before callin... | Run the task for the given host.
Arguments:
host (:obj:`nornir.core.inventory.Host`): Host we are operating with. Populated right
before calling the ``task``
nornir(:obj:`nornir.core.Nornir`): Populated right before calling
the ``task``
Returns:
... |
def _format_arg_list(args, variadic=False):
"""Format a list of arguments for pretty printing.
:param a: list of arguments.
:type a: list
:param v: tell if the function accepts variadic arguments
:type v: bool
"""
def sugar(s):
"""Shorten strings that are too long for decency."""
... | Format a list of arguments for pretty printing.
:param a: list of arguments.
:type a: list
:param v: tell if the function accepts variadic arguments
:type v: bool |
def info(self):
""" retreive metadata and currenct price data """
url = "{}/v7/finance/quote?symbols={}".format(
self._base_url, self.ticker)
r = _requests.get(url=url).json()["quoteResponse"]["result"]
if len(r) > 0:
return r[0]
return {} | retreive metadata and currenct price data |
def fonts(self):
"""Generator yielding all fonts of this typeface
Yields:
Font: the next font in this typeface
"""
for width in (w for w in FontWidth if w in self):
for slant in (s for s in FontSlant if s in self[width]):
for weight in (w for w i... | Generator yielding all fonts of this typeface
Yields:
Font: the next font in this typeface |
def parts_to_url(parts=None, scheme=None, netloc=None, path=None, query=None, fragment=None):
""" Build url urlunsplit style, but optionally handle path as a list and/or query as a dict """
if isinstance(parts, _urllib_parse.SplitResult):
scheme, netloc, path, query, fragment = parts
elif parts and... | Build url urlunsplit style, but optionally handle path as a list and/or query as a dict |
def _processDocstring(self, node, tail='', **kwargs):
"""
Handles a docstring for functions, classes, and modules.
Basically just figures out the bounds of the docstring and sends it
off to the parser to do the actual work.
"""
typeName = type(node).__name__
# Mo... | Handles a docstring for functions, classes, and modules.
Basically just figures out the bounds of the docstring and sends it
off to the parser to do the actual work. |
def backup_key(self, name, mount_point=DEFAULT_MOUNT_POINT):
"""Return a plaintext backup of a named key.
The backup contains all the configuration data and keys of all the versions along with the HMAC key. The
response from this endpoint can be used with the /restore endpoint to restore the ke... | Return a plaintext backup of a named key.
The backup contains all the configuration data and keys of all the versions along with the HMAC key. The
response from this endpoint can be used with the /restore endpoint to restore the key.
Supported methods:
GET: /{mount_point}/backup/{n... |
def _set_overlay_policy_map(self, v, load=False):
"""
Setter method for overlay_policy_map, mapped from YANG variable /overlay_policy_map (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_overlay_policy_map is considered as a private
method. Backends looking to ... | Setter method for overlay_policy_map, mapped from YANG variable /overlay_policy_map (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_overlay_policy_map is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._s... |
def remove_listener(self, callback):
"""
Remove a listener.
"""
listeners = filter(lambda x: x['callback'] == callback, self.listeners)
for l in listeners:
self.listeners.remove(l) | Remove a listener. |
def _operation_status_message(self):
"""Returns the most relevant status string and failed action.
This string is meant for display only.
Returns:
A printable status string and name of failed action (if any).
"""
msg = None
action = None
if not google_v2_operations.is_done(self._op):... | Returns the most relevant status string and failed action.
This string is meant for display only.
Returns:
A printable status string and name of failed action (if any). |
def get_scoped_package_version_metadata_from_recycle_bin(self, feed_id, package_scope, unscoped_package_name, package_version):
"""GetScopedPackageVersionMetadataFromRecycleBin.
[Preview API] Get information about a scoped package version in the recycle bin.
:param str feed_id: Name or ID of the... | GetScopedPackageVersionMetadataFromRecycleBin.
[Preview API] Get information about a scoped package version in the recycle bin.
:param str feed_id: Name or ID of the feed.
:param str package_scope: Scope of the package (the 'scope' part of @scope/name)
:param str unscoped_package_name: N... |
def removeUserGroups(self, users=None):
"""Removes users' groups.
Args:
users (str): A comma delimited list of user names.
Defaults to ``None``.
Warning:
When ``users`` is not provided (``None``), all users
in the organization... | Removes users' groups.
Args:
users (str): A comma delimited list of user names.
Defaults to ``None``.
Warning:
When ``users`` is not provided (``None``), all users
in the organization will have their groups deleted! |
def initialize(self, timeouts):
""" Bind or connect the nanomsg socket to some address """
# Bind or connect to address
if self.bind is True:
self.socket.bind(self.address)
else:
self.socket.connect(self.address)
# Set send and recv timeouts
self... | Bind or connect the nanomsg socket to some address |
def fromjson(source, *args, **kwargs):
"""
Extract data from a JSON file. The file must contain a JSON array as
the top level object, and each member of the array will be treated as a
row of data. E.g.::
>>> import petl as etl
>>> data = '''
... [{"foo": "a", "bar": 1},
... | Extract data from a JSON file. The file must contain a JSON array as
the top level object, and each member of the array will be treated as a
row of data. E.g.::
>>> import petl as etl
>>> data = '''
... [{"foo": "a", "bar": 1},
... {"foo": "b", "bar": 2},
... {"foo": "c"... |
def _convert_to_indexer(self, obj, axis=None, is_setter=False,
raise_missing=False):
"""
Convert indexing key into something we can use to do actual fancy
indexing on an ndarray
Examples
ix[:5] -> slice(0, 5)
ix[[1,2,3]] -> [1,2,3]
ix[... | Convert indexing key into something we can use to do actual fancy
indexing on an ndarray
Examples
ix[:5] -> slice(0, 5)
ix[[1,2,3]] -> [1,2,3]
ix[['foo', 'bar', 'baz']] -> [i, j, k] (indices of foo, bar, baz)
Going by Zen of Python?
'In the face of ambiguity, re... |
def load_dynamic_config(config_file=DEFAULT_DYNAMIC_CONFIG_FILE):
"""Load and parse dynamic config"""
dynamic_configurations = {}
# Insert config path so we can import it
sys.path.insert(0, path.dirname(path.abspath(config_file)))
try:
config_module = __import__('config')
dynamic_c... | Load and parse dynamic config |
def pull_commits(self, pr_number):
"""Get pull request commits"""
payload = {
'per_page': PER_PAGE,
}
commit_url = urijoin("pulls", str(pr_number), "commits")
return self.fetch_items(commit_url, payload) | Get pull request commits |
def get_free_diskbytes(dir_):
r"""
Args:
dir_ (str):
Returns:
int: bytes_ folder/drive free space (in bytes)
References::
http://stackoverflow.com/questions/51658/cross-platform-space-remaining-on-volume-using-python
http://linux.die.net/man/2/statvfs
CommandLine:
... | r"""
Args:
dir_ (str):
Returns:
int: bytes_ folder/drive free space (in bytes)
References::
http://stackoverflow.com/questions/51658/cross-platform-space-remaining-on-volume-using-python
http://linux.die.net/man/2/statvfs
CommandLine:
python -m utool.util_cplat... |
def paginate_queryset(self, queryset, page_size):
"""
Returns tuple containing paginator instance, page instance,
object list, and whether there are other pages.
:param queryset: the queryset instance to paginate.
:param page_size: the number of instances per page.
:rtyp... | Returns tuple containing paginator instance, page instance,
object list, and whether there are other pages.
:param queryset: the queryset instance to paginate.
:param page_size: the number of instances per page.
:rtype: tuple. |
def get_daemon_stats(self, details=False):
"""Increase the stats provided by the Daemon base class
:return: stats dictionary
:rtype: dict
"""
# call the daemon one
res = super(BaseSatellite, self).get_daemon_stats(details=details)
counters = res['counters']
... | Increase the stats provided by the Daemon base class
:return: stats dictionary
:rtype: dict |
def extend_instance(instance, *bases, **kwargs):
"""
Apply subclass (mixin) to a class object or its instance
By default, the mixin is placed at the start of bases
to ensure its called first as per MRO. If you wish to
have it injected last, which is useful for monkeypatching,
then you can speci... | Apply subclass (mixin) to a class object or its instance
By default, the mixin is placed at the start of bases
to ensure its called first as per MRO. If you wish to
have it injected last, which is useful for monkeypatching,
then you can specify 'last=True'. See here:
http://stackoverflow.com/a/1001... |
def copy_figure(self):
"""Copy figure from figviewer to clipboard."""
if self.figviewer and self.figviewer.figcanvas.fig:
self.figviewer.figcanvas.copy_figure() | Copy figure from figviewer to clipboard. |
def publishing(self, service):
"""
the purpose of this tasks is to get the data from the cache
then publish them
:param service: service object where we will publish
:type service: object
"""
# flag to know if we have to update
to_update = ... | the purpose of this tasks is to get the data from the cache
then publish them
:param service: service object where we will publish
:type service: object |
def galactic_to_equatorial(gl, gb):
'''This converts from galactic coords to equatorial coordinates.
Parameters
----------
gl : float or array-like
Galactic longitude values(s) in decimal degrees.
gb : float or array-like
Galactic latitude value(s) in decimal degrees.
Returns... | This converts from galactic coords to equatorial coordinates.
Parameters
----------
gl : float or array-like
Galactic longitude values(s) in decimal degrees.
gb : float or array-like
Galactic latitude value(s) in decimal degrees.
Returns
-------
tuple of (float, float) o... |
def get_resources(self):
"""Gets all ``Resources``.
In plenary mode, the returned list contains all known resources
or an error results. Otherwise, the returned list may contain
only those resources that are accessible through this session.
return: (osid.resource.ResourceList) ... | Gets all ``Resources``.
In plenary mode, the returned list contains all known resources
or an error results. Otherwise, the returned list may contain
only those resources that are accessible through this session.
return: (osid.resource.ResourceList) - a list of ``Resources``
ra... |
def get_analyzable_segments(workflow, sci_segs, cat_files, out_dir, tags=None):
"""
Get the analyzable segments after applying ini specified vetoes and any
other restrictions on the science segs, e.g. a minimum segment length, or
demanding that only coincident segments are analysed.
Parameters
... | Get the analyzable segments after applying ini specified vetoes and any
other restrictions on the science segs, e.g. a minimum segment length, or
demanding that only coincident segments are analysed.
Parameters
-----------
workflow : Workflow object
Instance of the workflow object
sci_s... |
def _start_workflow_stages(pb: ProcessingBlock, pb_id: str,
workflow_stage_dict: dict,
workflow_stage: WorkflowStage,
docker: DockerSwarmClient):
"""Start a workflow stage by starting a number of docker services.
This function fir... | Start a workflow stage by starting a number of docker services.
This function first assesses if the specified workflow stage can be
started based on its dependencies. If this is found to be the case,
the workflow stage is stared by first resolving and template arguments
in the workflow stage configurat... |
def open_pickle(path: str):
"""Open a pickle and return loaded pickle object.
:type path: str
:param : path: File path to pickle file to be opened.
:rtype : object
"""
try:
with open(path, 'rb') as opened_pickle:
try:
return pickle.load(opened_pickle)
... | Open a pickle and return loaded pickle object.
:type path: str
:param : path: File path to pickle file to be opened.
:rtype : object |
def get_members(self, role=github.GithubObject.NotSet):
"""
:calls: `GET /teams/:id/members <https://developer.github.com/v3/teams/members/#list-team-members>`_
:param role: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
... | :calls: `GET /teams/:id/members <https://developer.github.com/v3/teams/members/#list-team-members>`_
:param role: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser` |
def decrypt(s, base64 = False):
"""
对称解密函数
"""
return _cipher().decrypt(base64 and b64decode(s) or s) | 对称解密函数 |
def _query(self): # pylint: disable=E0202
"""
Query WMI using WMI Query Language (WQL) & parse the results.
Returns: List of WMI objects or `TimeoutException`.
"""
formated_property_names = ",".join(self.property_names)
wql = "Select {property_names} from {class_name}{f... | Query WMI using WMI Query Language (WQL) & parse the results.
Returns: List of WMI objects or `TimeoutException`. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.