code stringlengths 75 104k | docstring stringlengths 1 46.9k |
|---|---|
def add_system(self, system):
'''
Add system to the world.
All systems will be processed on World.process()
system is of type System
'''
if system not in self._systems:
system.set_world(self)
self._systems.append(system)
else:
r... | Add system to the world.
All systems will be processed on World.process()
system is of type System |
def alias_repository(self, repository_id=None, alias_id=None):
"""Adds an ``Id`` to a ``Repository`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Repository`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a poi... | Adds an ``Id`` to a ``Repository`` for the purpose of creating compatibility.
The primary ``Id`` of the ``Repository`` is determined by the
provider. The new ``Id`` is an alias to the primary ``Id``. If
the alias is a pointer to another repository, it is reassigned
to the given reposito... |
def createRandomObjectDescriptions(numObjects,
numLocationsPerObject,
featurePool=("A", "B", "C")):
"""
Returns {"Object 1": [(0, "C"), (1, "B"), (2, "C"), ...],
"Object 2": [(0, "C"), (1, "A"), (2, "B"), ...]}
"""
return dict(("Ob... | Returns {"Object 1": [(0, "C"), (1, "B"), (2, "C"), ...],
"Object 2": [(0, "C"), (1, "A"), (2, "B"), ...]} |
def simBirth(self,which_agents):
'''
Makes new consumers for the given indices. Initialized variables include aNrm and pLvl, as
well as time variables t_age and t_cycle. Normalized assets and permanent income levels
are drawn from lognormal distributions given by aNrmInitMean and aNrmI... | Makes new consumers for the given indices. Initialized variables include aNrm and pLvl, as
well as time variables t_age and t_cycle. Normalized assets and permanent income levels
are drawn from lognormal distributions given by aNrmInitMean and aNrmInitStd (etc).
Parameters
----------
... |
def generate_openmp_enabled_py(packagename, srcdir='.', disable_openmp=None):
"""
Generate ``package.openmp_enabled.is_openmp_enabled``, which can then be used
to determine, post build, whether the package was built with or without
OpenMP support.
"""
if packagename.lower() == 'astropy':
... | Generate ``package.openmp_enabled.is_openmp_enabled``, which can then be used
to determine, post build, whether the package was built with or without
OpenMP support. |
def interpolate_to_isosurface(level_var, interp_var, level, **kwargs):
r"""Linear interpolation of a variable to a given vertical level from given values.
This function assumes that highest vertical level (lowest pressure) is zeroth index.
A classic use of this function would be to compute the potential te... | r"""Linear interpolation of a variable to a given vertical level from given values.
This function assumes that highest vertical level (lowest pressure) is zeroth index.
A classic use of this function would be to compute the potential temperature on the
dynamic tropopause (2 PVU surface).
Parameters
... |
def getlist(self, key):
"""Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>>... | Returns a Storage value as a list.
If the value is a list it will be returned as-is.
If object is None, an empty list will be returned.
Otherwise, `[value]` will be returned.
Example output for a query string of `?x=abc&y=abc&y=def`::
>>> request = Storage()
>>... |
def start(self):
"""Launch this postgres server. If it's already running, do nothing.
If the backing storage directory isn't configured, raise
NotInitializedError.
This method is optional. If you're running in an environment
where the DBMS is provided as part of the basic inf... | Launch this postgres server. If it's already running, do nothing.
If the backing storage directory isn't configured, raise
NotInitializedError.
This method is optional. If you're running in an environment
where the DBMS is provided as part of the basic infrastructure,
you pro... |
def get_process_curses_data(self, p, first, args):
"""Get curses data to display for a process.
- p is the process to display
- first is a tag=True if the process is the first on the list
"""
ret = [self.curse_new_line()]
# CPU
if 'cpu_percent' in p and p['cpu_pe... | Get curses data to display for a process.
- p is the process to display
- first is a tag=True if the process is the first on the list |
def get(self, key, failobj=None, exact=0):
"""Raises exception if key is ambiguous"""
if not exact:
key = self.getfullkey(key,new=1)
return self.data.get(key,failobj) | Raises exception if key is ambiguous |
def parse(self, limit=None):
"""
Override Source.parse()
Parses version and interaction information from CTD
Args:
:param limit (int, optional) limit the number of rows processed
Returns:
:return None
"""
if limit is not None:
LOG.info(... | Override Source.parse()
Parses version and interaction information from CTD
Args:
:param limit (int, optional) limit the number of rows processed
Returns:
:return None |
def hydrate_target(hydrated_struct):
target_adaptor = hydrated_struct.value
"""Construct a HydratedTarget from a TargetAdaptor and hydrated versions of its adapted fields."""
# Hydrate the fields of the adaptor and re-construct it.
hydrated_fields = yield [Get(HydratedField, HydrateableField, fa)
... | Construct a HydratedTarget from a TargetAdaptor and hydrated versions of its adapted fields. |
def eject_media(self):
"""Ejects Virtual Media.
:raises: SushyError, on an error from iLO.
"""
try:
super(VirtualMedia, self).eject_media()
except sushy_exceptions.SushyError:
target_uri = self._get_action_element('eject').target_uri
self._con... | Ejects Virtual Media.
:raises: SushyError, on an error from iLO. |
def RepackTemplate(self,
template_path,
output_dir,
upload=False,
token=None,
sign=False,
context=None,
signed_template=False):
"""Repack binaries based on the configura... | Repack binaries based on the configuration.
We repack all templates in the templates directory. We expect to find only
functioning templates, all other files should be removed. Each template
contains a build.yaml that specifies how it was built and how it should be
repacked.
Args:
template_p... |
def is_choked_turbulent_l(dP, P1, Psat, FF, FL=None, FLP=None, FP=None):
r'''Calculates if a liquid flow in IEC 60534 calculations is critical or
not, for use in IEC 60534 liquid valve sizing calculations.
Either FL may be provided or FLP and FP, depending on the calculation
process.
.. math::
... | r'''Calculates if a liquid flow in IEC 60534 calculations is critical or
not, for use in IEC 60534 liquid valve sizing calculations.
Either FL may be provided or FLP and FP, depending on the calculation
process.
.. math::
\Delta P > F_L^2(P_1 - F_F P_{sat})
.. math::
\Delta P >= \l... |
def colored_pygments_excepthook(type_, value, tb):
"""
References:
https://stackoverflow.com/questions/14775916/color-exceptions-python
CommandLine:
python -m utool.util_inject --test-colored_pygments_excepthook
"""
tbtext = ''.join(traceback.format_exception(type_, value, tb))
... | References:
https://stackoverflow.com/questions/14775916/color-exceptions-python
CommandLine:
python -m utool.util_inject --test-colored_pygments_excepthook |
def read_rels(archive):
"""Read relationships for a workbook"""
xml_source = archive.read(ARC_WORKBOOK_RELS)
tree = fromstring(xml_source)
for element in safe_iterator(tree, '{%s}Relationship' % PKG_REL_NS):
rId = element.get('Id')
pth = element.get("Target")
typ = element.get('T... | Read relationships for a workbook |
def removeApplicationManifest(self, pchApplicationManifestFullPath):
"""Removes an application manifest from the list to load when building the list of installed applications."""
fn = self.function_table.removeApplicationManifest
result = fn(pchApplicationManifestFullPath)
return result | Removes an application manifest from the list to load when building the list of installed applications. |
def messages(request, year=None, month=None, day=None,
template="gnotty/messages.html"):
"""
Show messages for the given query or day.
"""
query = request.REQUEST.get("q")
prev_url, next_url = None, None
messages = IRCMessage.objects.all()
if hide_joins_and_leaves(request):
... | Show messages for the given query or day. |
def ChangeUserStatus(self, Status):
"""Changes the online status for the current user.
:Parameters:
Status : `enums`.cus*
New online status for the user.
:note: This function waits until the online status changes. Alternatively, use the
`CurrentUserStatus` ... | Changes the online status for the current user.
:Parameters:
Status : `enums`.cus*
New online status for the user.
:note: This function waits until the online status changes. Alternatively, use the
`CurrentUserStatus` property to perform an immediate change of stat... |
def unlock_key(key_name,
stash,
passphrase,
backend):
"""Unlock a key to allow it to be modified, deleted or purged
`KEY_NAME` is the name of the key to unlock
"""
stash = _get_stash(backend, stash, passphrase)
try:
click.echo('Unlocking key...'... | Unlock a key to allow it to be modified, deleted or purged
`KEY_NAME` is the name of the key to unlock |
def encodeSplines(x, n_bases=10, spline_order=3, start=None, end=None, warn=True):
"""**Deprecated**. Function version of the transformer class `EncodeSplines`.
Get B-spline base-function expansion
# Details
First, the knots for B-spline basis functions are placed
equidistantly on the [star... | **Deprecated**. Function version of the transformer class `EncodeSplines`.
Get B-spline base-function expansion
# Details
First, the knots for B-spline basis functions are placed
equidistantly on the [start, end] range.
(inferred from the data if None). Next, b_n(x) value is
is ... |
def dimap_V(D, I):
"""
FUNCTION TO MAP DECLINATION, INCLINATIONS INTO EQUAL AREA PROJECTION, X,Y
Usage: dimap_V(D, I)
D and I are both numpy arrays
"""
# GET CARTESIAN COMPONENTS OF INPUT DIRECTION
DI = np.array([D, I]).transpose()
X = dir2cart(DI).transpose()
# CALCULATE THE X,Y C... | FUNCTION TO MAP DECLINATION, INCLINATIONS INTO EQUAL AREA PROJECTION, X,Y
Usage: dimap_V(D, I)
D and I are both numpy arrays |
def load_model(path):
"""
Load a saved H2O model from disk. (Note that ensemble binary models can now be loaded using this method.)
:param path: the full path of the H2O Model to be imported.
:returns: an :class:`H2OEstimator` object
:examples:
>>> path = h2o.save_model(my_model, dir=my_p... | Load a saved H2O model from disk. (Note that ensemble binary models can now be loaded using this method.)
:param path: the full path of the H2O Model to be imported.
:returns: an :class:`H2OEstimator` object
:examples:
>>> path = h2o.save_model(my_model, dir=my_path)
>>> h2o.load_model(pa... |
def _escalation_rules_to_string(escalation_rules):
'convert escalation_rules dict to a string for comparison'
result = ''
for rule in escalation_rules:
result += 'escalation_delay_in_minutes: {0} '.format(rule['escalation_delay_in_minutes'])
for target in rule['targets']:
result ... | convert escalation_rules dict to a string for comparison |
def merge(self, workdir, ddb_files, out_ddb, description, delete_source_ddbs=True):
"""Merge DDB file, return the absolute path of the new database in workdir."""
# We work with absolute paths.
ddb_files = [os.path.abspath(s) for s in list_strings(ddb_files)]
if not os.path.isabs(out_ddb... | Merge DDB file, return the absolute path of the new database in workdir. |
def initialize_zones(self):
"""initialize receiver zones"""
zone_list = self.location_info.get('zone_list', {'main': True})
for zone_id in zone_list:
if zone_list[zone_id]: # Location setup is valid
self.zones[zone_id] = Zone(self, zone_id=zone_id)
else:... | initialize receiver zones |
def get_root():
"""Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py .
"""
root = os.path.realpath(os.path.abspath(os.getcwd()))
setup_py = os.path.join(root, "setup.py")
versio... | Get the project root directory.
We require that all commands are run from the project root, i.e. the
directory that contains setup.py, setup.cfg, and versioneer.py . |
def valueReadPreprocessor(valueString, replaceParamsFile=None):
"""
Apply global pre-processing to values during reading throughout the project.
Args:
valueString (str): String representing the value to be preprocessed.
replaceParamsFile (gsshapy.orm.ReplaceParamFile, optional): Instance of... | Apply global pre-processing to values during reading throughout the project.
Args:
valueString (str): String representing the value to be preprocessed.
replaceParamsFile (gsshapy.orm.ReplaceParamFile, optional): Instance of the replace param file. Required if
replacement variables are i... |
def deletescript(self, name):
"""Delete a script from the server
See MANAGESIEVE specifications, section 2.10
:param name: script's name
:rtype: boolean
"""
code, data = self.__send_command(
"DELETESCRIPT", [name.encode("utf-8")])
if code == "OK":
... | Delete a script from the server
See MANAGESIEVE specifications, section 2.10
:param name: script's name
:rtype: boolean |
def parse_serialdiff(sd_dict):
"helper for translate_check"
if isinstance(sd_dict,list):
if len(sd_dict)!=2 or sd_dict[0]!='checkstale': raise NotImplementedError(sd_dict[0],len(sd_dict))
return CheckStale(sd_dict[1])
if isinstance(sd_dict['deltas'],list): # i.e. for VHString the whole deltas field is a s... | helper for translate_check |
def get_log_entry_ids_by_log(self, log_id):
"""Gets the list of ``LogEntry`` ``Ids`` associated with a ``Log``.
arg: log_id (osid.id.Id): ``Id`` of a ``Log``
return: (osid.id.IdList) - list of related logEntry ``Ids``
raise: NotFound - ``log_id`` is not found
raise: NullAr... | Gets the list of ``LogEntry`` ``Ids`` associated with a ``Log``.
arg: log_id (osid.id.Id): ``Id`` of a ``Log``
return: (osid.id.IdList) - list of related logEntry ``Ids``
raise: NotFound - ``log_id`` is not found
raise: NullArgument - ``log_id`` is ``null``
raise: Operati... |
def raw(node):
"""
Add some raw html (possibly as a block)
"""
o = nodes.raw(node.literal, node.literal, format='html')
if node.sourcepos is not None:
o.line = node.sourcepos[0][0]
for n in MarkDown(node):
o += n
return o | Add some raw html (possibly as a block) |
def _read_header(filename):
"""Read the text header for each file
Parameters
----------
channel_file : Path
path to single filename with the header
Returns
-------
dict
header
"""
with filename.open('rb') as f:
h = f.read(HDR_LENGTH).decode()
header... | Read the text header for each file
Parameters
----------
channel_file : Path
path to single filename with the header
Returns
-------
dict
header |
def custom_config(request):
"""
Save user-specific configuration property.
POST parameters (JSON keys):
app_name: application name for which the configuration property is
valid (e.g., proso_models)
key: name of the property (e.g., predictive_model.class)
value: value of ... | Save user-specific configuration property.
POST parameters (JSON keys):
app_name: application name for which the configuration property is
valid (e.g., proso_models)
key: name of the property (e.g., predictive_model.class)
value: value of the property (number, string, boolean, .... |
def reg_load(self, reg, value):
"""
Load a value into a register. The value can be a string or binary (in
which case the value is passed to :meth:`alloc_data`), another
:class:`Register`, an :class:`Offset` or :class:`Buffer`, an integer
immediate, a ``list`` or ``tuple`` or a sy... | Load a value into a register. The value can be a string or binary (in
which case the value is passed to :meth:`alloc_data`), another
:class:`Register`, an :class:`Offset` or :class:`Buffer`, an integer
immediate, a ``list`` or ``tuple`` or a syscall invocation.
Arguments:
re... |
def get_waveform_filter_norm(approximant, psd, length, delta_f, f_lower):
""" Return the normalization vector for the approximant
"""
if approximant in _filter_norms:
return _filter_norms[approximant](psd, length, delta_f, f_lower)
else:
return None | Return the normalization vector for the approximant |
def handle_message(self, ch, method, properties, body):
"""
this is a pika.basic_consumer callback
handles client inputs, runs appropriate workflows and views
Args:
ch: amqp channel
method: amqp method
properties:
body: message body
... | this is a pika.basic_consumer callback
handles client inputs, runs appropriate workflows and views
Args:
ch: amqp channel
method: amqp method
properties:
body: message body |
def make_accessors(self, columns):
""" Accessors can be numeric keys for sequence row data, string keys
for mapping row data, or a callable function. For numeric and string
accessors they can be inside a 2 element tuple where the 2nd value is
the default value; Similar to dict.get(look... | Accessors can be numeric keys for sequence row data, string keys
for mapping row data, or a callable function. For numeric and string
accessors they can be inside a 2 element tuple where the 2nd value is
the default value; Similar to dict.get(lookup, default). |
def get_id(self, natural_key, enhancement=None):
"""
Returns the technical ID for a natural key or None if the given natural key is not valid.
:param T natural_key: The natural key.
:param T enhancement: Enhancement data of the dimension row.
:rtype: int|None
"""
... | Returns the technical ID for a natural key or None if the given natural key is not valid.
:param T natural_key: The natural key.
:param T enhancement: Enhancement data of the dimension row.
:rtype: int|None |
def write(pkg_file, pkg_rels, parts):
"""
Write a physical package (.pptx file) to *pkg_file* containing
*pkg_rels* and *parts* and a content types stream based on the
content types of the parts.
"""
phys_writer = PhysPkgWriter(pkg_file)
PackageWriter._write_conte... | Write a physical package (.pptx file) to *pkg_file* containing
*pkg_rels* and *parts* and a content types stream based on the
content types of the parts. |
def spine_to_terminal_wedge(mol):
"""Arrange stereo wedge direction from spine to terminal atom
"""
for i, a in mol.atoms_iter():
if mol.neighbor_count(i) == 1:
ni, nb = list(mol.neighbors(i).items())[0]
if nb.order == 1 and nb.type in (1, 2) \
and ni > i ... | Arrange stereo wedge direction from spine to terminal atom |
def create(self, sid=values.unset, phone_number=values.unset,
is_reserved=values.unset):
"""
Create a new PhoneNumberInstance
:param unicode sid: The SID of a Twilio IncomingPhoneNumber resource
:param unicode phone_number: The phone number in E.164 format
:param ... | Create a new PhoneNumberInstance
:param unicode sid: The SID of a Twilio IncomingPhoneNumber resource
:param unicode phone_number: The phone number in E.164 format
:param bool is_reserved: Whether the new phone number should be reserved
:returns: Newly created PhoneNumberInstance
... |
def compare(self, otherdigest, ishex=False):
"""Compute difference in bits between own digest and another.
returns -127 to 128; 128 is the same, -127 is different"""
bits = 0
myd = self.digest()
if ishex:
# convert to 32-tuple of unsighed two-byte INTs
... | Compute difference in bits between own digest and another.
returns -127 to 128; 128 is the same, -127 is different |
def api_version_elb_backend(*args, **kwargs):
"""
ELB and ELBV2 (Classic and Application load balancers) use the same
hostname and url space. To differentiate them we must read the
`Version` parameter out of the url-encoded request body. TODO: There
has _got_ to be a better way to do this. Please he... | ELB and ELBV2 (Classic and Application load balancers) use the same
hostname and url space. To differentiate them we must read the
`Version` parameter out of the url-encoded request body. TODO: There
has _got_ to be a better way to do this. Please help us think of
one. |
def delete_resource(self, resource_id):
"""Deletes a ``Resource``.
arg: resource_id (osid.id.Id): the ``Id`` of the ``Resource``
to remove
raise: NotFound - ``resource_id`` not found
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed -... | Deletes a ``Resource``.
arg: resource_id (osid.id.Id): the ``Id`` of the ``Resource``
to remove
raise: NotFound - ``resource_id`` not found
raise: NullArgument - ``resource_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: Permiss... |
def correct(self, images,
bgImages=None,
exposure_time=None,
light_spectrum=None,
threshold=0.1,
keep_size=True,
date=None,
deblur=False,
denoise=False):
'''
exposure... | exposure_time [s]
date -> string e.g. '30. Nov 15' to get a calibration on from date
-> {'dark current':'30. Nov 15',
'flat field':'15. Nov 15',
'lens':'14. Nov 15',
'noise':'01. Nov 15'} |
def MessageToJson(message, include_fields=None):
"""Convert the given message to JSON."""
result = _ProtoJsonApiTools.Get().encode_message(message)
return _IncludeFields(result, message, include_fields) | Convert the given message to JSON. |
def from_filename(cls, filename, sync_from_start=True):
"""
Create a `Lexer` from a filename.
"""
# Inline imports: the Pygments dependency is optional!
from pygments.util import ClassNotFound
from pygments.lexers import get_lexer_for_filename
try:
py... | Create a `Lexer` from a filename. |
def get_default_config_help(self):
"""
Returns the help text for the configuration options for this handler
"""
config = super(StatsdHandler, self).get_default_config_help()
config.update({
'host': '',
'port': '',
'batch': '',
})
... | Returns the help text for the configuration options for this handler |
def update(self):
"""Update value based on :math:`HV=BBV/BNV`.
Required Parameters:
|BBV|
|BNV|
Examples:
>>> from hydpy.models.lstream import *
>>> parameterstep('1d')
>>> bbv(left=10., right=40.)
>>> bnv(left=10., right=... | Update value based on :math:`HV=BBV/BNV`.
Required Parameters:
|BBV|
|BNV|
Examples:
>>> from hydpy.models.lstream import *
>>> parameterstep('1d')
>>> bbv(left=10., right=40.)
>>> bnv(left=10., right=20.)
>>> derived.... |
def get_web_auth_session_key(self, url, token=""):
"""
Retrieves the session key of a web authorization process by its URL.
"""
session_key, _username = self.get_web_auth_session_key_username(url, token)
return session_key | Retrieves the session key of a web authorization process by its URL. |
def make_grid(xx, yy):
"""
Returns two n-by-n matrices. The first one contains all the x values
and the second all the y values of a cartesian product between `xx` and `yy`.
"""
n = len(xx)
xx, yy = np.meshgrid(xx, yy)
grid = np.array([xx.ravel(), yy.ravel()]).T
x = grid[:, 0].reshape(n... | Returns two n-by-n matrices. The first one contains all the x values
and the second all the y values of a cartesian product between `xx` and `yy`. |
def get_login_information(self, code=None):
"""Return Clef user info after exchanging code for OAuth token."""
# do the handshake to get token
access_token = self._get_access_token(code)
# make request with token to get user details
return self._get_user_info(access_token) | Return Clef user info after exchanging code for OAuth token. |
def make_osm_query(query):
"""
Make a request to OSM and return the parsed JSON.
Parameters
----------
query : str
A string in the Overpass QL format.
Returns
-------
data : dict
"""
osm_url = 'http://www.overpass-api.de/api/interpreter'
req = requests.get(osm_url,... | Make a request to OSM and return the parsed JSON.
Parameters
----------
query : str
A string in the Overpass QL format.
Returns
-------
data : dict |
def read_stream (stream):
"""Python 3 compat note: we're assuming `stream` gives bytes not unicode."""
section = None
key = None
data = None
for fullline in stream:
line = fullline.split ('#', 1)[0]
m = sectionre.match (line)
if m is not None:
# New section
... | Python 3 compat note: we're assuming `stream` gives bytes not unicode. |
def interpolate2dStructuredPointSpreadIDW(grid, mask, kernel=15, power=2,
maxIter=1e5, copy=True):
'''
same as interpolate2dStructuredIDW but using the point spread method
this is faster if there are bigger connected masked areas and the border
length is sm... | same as interpolate2dStructuredIDW but using the point spread method
this is faster if there are bigger connected masked areas and the border
length is smaller
replace all values in [grid] indicated by [mask]
with the inverse distance weighted interpolation of all values within
px+-kernel
... |
def psq2(d1, d2):
"""Compute the PSQ2 measure.
Args:
d1 (np.ndarray): The first distribution.
d2 (np.ndarray): The second distribution.
"""
d1, d2 = flatten(d1), flatten(d2)
def f(p):
return sum((p ** 2) * np.nan_to_num(np.log(p * len(p))))
return abs(f(d1) - f(d2)) | Compute the PSQ2 measure.
Args:
d1 (np.ndarray): The first distribution.
d2 (np.ndarray): The second distribution. |
def add_proxy_auth(possible_proxy_url, proxy_auth):
"""
Add a username and password to a proxy URL, if the input value is a proxy URL.
:param str possible_proxy_url: Proxy URL or ``DIRECT``.
:param requests.auth.HTTPProxyAuth proxy_auth: Proxy authentication info.
:returns: Proxy URL with auth info... | Add a username and password to a proxy URL, if the input value is a proxy URL.
:param str possible_proxy_url: Proxy URL or ``DIRECT``.
:param requests.auth.HTTPProxyAuth proxy_auth: Proxy authentication info.
:returns: Proxy URL with auth info added, or ``DIRECT``.
:rtype: str |
def owner(self):
r""" Return the name of the owner of this file or directory.
This follows symbolic links.
On Windows, this returns a name of the form ur'DOMAIN\User Name'.
On Windows, a group can own a file or directory.
"""
if os.name == 'nt':
if win32secu... | r""" Return the name of the owner of this file or directory.
This follows symbolic links.
On Windows, this returns a name of the form ur'DOMAIN\User Name'.
On Windows, a group can own a file or directory. |
def netconf_session_end_termination_reason(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
netconf_session_end = ET.SubElement(config, "netconf-session-end", xmlns="urn:ietf:params:xml:ns:yang:ietf-netconf-notifications")
termination_reason = ET.SubEleme... | Auto Generated Code |
def main():
'''Main entry point for the bioinfo CLI.'''
args = docopt(__doc__, version=__version__)
if 'bam_coverage' in args:
bam_coverage(args['<reference>'],
args['<alignments>'],
int(args['<minmatch>']),
min_mapq=int(args['--mapq'])... | Main entry point for the bioinfo CLI. |
def makeLUTfromCTF(sclist, N=None):
"""
Use a Color Transfer Function to generate colors in a vtk lookup table.
See `here <http://www.vtk.org/doc/nightly/html/classvtkColorTransferFunction.html>`_.
:param list sclist: a list in the form ``[(scalar1, [r,g,b]), (scalar2, 'blue'), ...]``.
:return: the... | Use a Color Transfer Function to generate colors in a vtk lookup table.
See `here <http://www.vtk.org/doc/nightly/html/classvtkColorTransferFunction.html>`_.
:param list sclist: a list in the form ``[(scalar1, [r,g,b]), (scalar2, 'blue'), ...]``.
:return: the lookup table object ``vtkLookupTable``. This ca... |
def blast_seqs_to_pdb(self, seq_ident_cutoff=0, evalue=0.0001, all_genes=False, display_link=False,
outdir=None, force_rerun=False):
"""BLAST each representative protein sequence to the PDB. Saves raw BLAST results (XML files).
Args:
seq_ident_cutoff (float, option... | BLAST each representative protein sequence to the PDB. Saves raw BLAST results (XML files).
Args:
seq_ident_cutoff (float, optional): Cutoff results based on percent coverage (in decimal form)
evalue (float, optional): Cutoff for the E-value - filters for significant hits. 0.001 is libe... |
def get_mean_and_stddevs(self, sctx, rctx, dctx, imt, stddev_types):
"""
See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values.
"""
# Get original mean and standard deviations
mean, stddevs = sup... | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. |
def reset_generation(self, trigger):
"""Re-arms the analog output according to current settings
:param trigger: name of the trigger terminal. ``None`` value means generation begins immediately on run
:type trigger: str
"""
self.tone_lock.acquire()
npts = self.stim.size... | Re-arms the analog output according to current settings
:param trigger: name of the trigger terminal. ``None`` value means generation begins immediately on run
:type trigger: str |
def visit_BinOp(self, node: AST, dfltChaining: bool = True) -> str:
"""Return `node`s operator and operands as inlined expression."""
op = node.op
with self.op_man(op):
if isinstance(op, ast.Pow):
# Pow chains right-to-left
src = self.visit(op).join((s... | Return `node`s operator and operands as inlined expression. |
def create(self, fail_on_found=False, force_on_exists=False, **kwargs):
"""Create a notification template.
All required configuration-related fields (required according to
notification_type) must be provided.
There are two types of notification template creation: isolatedly
cre... | Create a notification template.
All required configuration-related fields (required according to
notification_type) must be provided.
There are two types of notification template creation: isolatedly
creating a new notification template and creating a new notification
template ... |
def _get_link_status_code(link, allow_redirects=False, timeout=5):
""" Get the status code of a link.
If the timeout is exceeded, will return a 404.
For a list of available status codes, see:
https://en.wikipedia.org/wiki/List_of_HTTP_status_codes
"""
status_code = None
try:
... | Get the status code of a link.
If the timeout is exceeded, will return a 404.
For a list of available status codes, see:
https://en.wikipedia.org/wiki/List_of_HTTP_status_codes |
def toDict(self):
"""
Get information about a title alignment as a dictionary.
@return: A C{dict} representation of the title aligment.
"""
return {
'hsps': [hsp.toDict() for hsp in self.hsps],
'read': self.read.toDict(),
} | Get information about a title alignment as a dictionary.
@return: A C{dict} representation of the title aligment. |
def _compute(self):
"""
The main method of the class, which computes an MCS given its
over-approximation. The over-approximation is defined by a model
for the hard part of the formula obtained in :func:`compute`.
The method is essentially a simple loop going over... | The main method of the class, which computes an MCS given its
over-approximation. The over-approximation is defined by a model
for the hard part of the formula obtained in :func:`compute`.
The method is essentially a simple loop going over all literals
unsatisfied by the... |
def delete_user(self, user_email):
'''**Description**
Deletes a user from Sysdig Monitor.
**Arguments**
- **user_email**: the email address of the user that will be deleted from Sysdig Monitor
**Example**
`examples/user_team_mgmt.py <https://github.com/draio... | **Description**
Deletes a user from Sysdig Monitor.
**Arguments**
- **user_email**: the email address of the user that will be deleted from Sysdig Monitor
**Example**
`examples/user_team_mgmt.py <https://github.com/draios/python-sdc-client/blob/master/examples/user_... |
def get_content_metadata(self, enterprise_customer):
"""
Return all content metadata contained in the catalogs associated with the EnterpriseCustomer.
Arguments:
enterprise_customer (EnterpriseCustomer): The EnterpriseCustomer to return content metadata for.
Returns:
... | Return all content metadata contained in the catalogs associated with the EnterpriseCustomer.
Arguments:
enterprise_customer (EnterpriseCustomer): The EnterpriseCustomer to return content metadata for.
Returns:
list: List of dicts containing content metadata. |
def get_failing_line(xml_string, exc_msg):
"""
Extract the failing line from the XML string, as indicated by the
line/column information in the exception message.
Returns a tuple (lineno, colno, new_pos, line), where lineno and colno
and marker_pos may be None.
"""
max_before = 500 # max c... | Extract the failing line from the XML string, as indicated by the
line/column information in the exception message.
Returns a tuple (lineno, colno, new_pos, line), where lineno and colno
and marker_pos may be None. |
def get_evcodes(self, inc_set=None, exc_set=None):
"""Get evidence code for all but NOT 'No biological data'"""
codes = self.get_evcodes_all(inc_set, exc_set)
codes.discard('ND')
return codes | Get evidence code for all but NOT 'No biological data |
def get_unit_property_names(self, unit_id=None):
'''Get a list of property names for a given unit, or for all units if unit_id is None
Parameters
----------
unit_id: int
The unit id for which the property names will be returned
If None (default), will return prop... | Get a list of property names for a given unit, or for all units if unit_id is None
Parameters
----------
unit_id: int
The unit id for which the property names will be returned
If None (default), will return property names for all units
Returns
----------
... |
def _convert_to_bytes(type_name, value):
"""Convert a typed value to a binary array"""
int_types = {'uint8_t': 'B', 'int8_t': 'b', 'uint16_t': 'H', 'int16_t': 'h', 'uint32_t': 'L', 'int32_t': 'l'}
type_name = type_name.lower()
if type_name not in int_types and type_name not in ['string', 'binary']:
... | Convert a typed value to a binary array |
def grep(expression, file, flags=0, invert=False):
"""
Search a file and return a list of all lines that match a regular expression.
:param str expression: The regex to search for.
:param file: The file to search in.
:type file: str, file
:param int flags: The regex flags to use when searching.
:param bool inve... | Search a file and return a list of all lines that match a regular expression.
:param str expression: The regex to search for.
:param file: The file to search in.
:type file: str, file
:param int flags: The regex flags to use when searching.
:param bool invert: Select non matching lines instead.
:return: All the ... |
def _escape_sequence(self, char):
"""
Handle characters seen when in an escape sequence. Most non-vt52
commands start with a left-bracket after the escape and then a
stream of parameters and a command.
"""
num = ord(char)
if char == "[":
self.state =... | Handle characters seen when in an escape sequence. Most non-vt52
commands start with a left-bracket after the escape and then a
stream of parameters and a command. |
def plot_campaign_outline(self, campaign=0, facecolor="#666666", text=None):
"""Plot the outline of a campaign as a contiguous gray patch.
Parameters
----------
campaign : int
K2 Campaign number.
facecolor : str
Color of the patch.
"""
# ... | Plot the outline of a campaign as a contiguous gray patch.
Parameters
----------
campaign : int
K2 Campaign number.
facecolor : str
Color of the patch. |
def asbaseline(self, pos):
"""Convert a position measure into a baseline measure. No actual
baseline is calculated, since operations can be done on positions,
with subtractions to obtain baselines at a later stage.
:param pos: a position measure
:returns: a baseline measure
... | Convert a position measure into a baseline measure. No actual
baseline is calculated, since operations can be done on positions,
with subtractions to obtain baselines at a later stage.
:param pos: a position measure
:returns: a baseline measure |
def get_permissions(self):
"""
:returns: list of dicts, or an empty list if there are no permissions.
"""
path = Client.urls['all_permissions']
conns = self._call(path, 'GET')
return conns | :returns: list of dicts, or an empty list if there are no permissions. |
def get_memory_usage(pid=None,timeout=1):
'''get_memory_usage returns a dictionary of resident set size (rss) and virtual
memory size (vms) for a process of interest, for as long as the process is running
:param pid: the pid to use:
:param timeout: the timeout
:: notes
example:
... | get_memory_usage returns a dictionary of resident set size (rss) and virtual
memory size (vms) for a process of interest, for as long as the process is running
:param pid: the pid to use:
:param timeout: the timeout
:: notes
example:
sleep 3 & exec python -m memory "$!" |
def _add_entry(self, formdata=None, data=unset_value, index=None):
'''
Fill the form with previous data if necessary to handle partial update
'''
if formdata:
prefix = '-'.join((self.name, str(index)))
basekey = '-'.join((prefix, '{0}'))
idkey = baseke... | Fill the form with previous data if necessary to handle partial update |
def bunzip2(filename):
"""Uncompress `filename` in place"""
log.debug("Uncompressing %s", filename)
tmpfile = "%s.tmp" % filename
os.rename(filename, tmpfile)
b = bz2.BZ2File(tmpfile)
f = open(filename, "wb")
while True:
block = b.read(512 * 1024)
if not block:
br... | Uncompress `filename` in place |
def commit(self, **kwargs):
r"""Store changes of the current record instance in the database.
#. Send a signal :data:`invenio_records.signals.before_record_update`
with the current record to be committed as parameter.
#. Validate the current record data.
#. Commit the curre... | r"""Store changes of the current record instance in the database.
#. Send a signal :data:`invenio_records.signals.before_record_update`
with the current record to be committed as parameter.
#. Validate the current record data.
#. Commit the current record in the database.
... |
def open_new_window(self, switch_to=True):
""" Opens a new browser tab/window and switches to it by default. """
self.driver.execute_script("window.open('');")
time.sleep(0.01)
if switch_to:
self.switch_to_window(len(self.driver.window_handles) - 1) | Opens a new browser tab/window and switches to it by default. |
def _add_app_menu(self):
"""
Create a default Cocoa menu that shows 'Services', 'Hide',
'Hide Others', 'Show All', and 'Quit'. Will append the application name
to some menu items if it's available.
"""
# Set the main menu for the application
mainMenu = AppKit.NSMe... | Create a default Cocoa menu that shows 'Services', 'Hide',
'Hide Others', 'Show All', and 'Quit'. Will append the application name
to some menu items if it's available. |
def ingest(self):
"""*Import the IFS catalogue into the sherlock-catalogues database*
The method first generates a list of python dictionaries from the IFS datafile, imports this list of dictionaries into a database table and then generates the HTMIDs for that table.
**Usage:**
S... | *Import the IFS catalogue into the sherlock-catalogues database*
The method first generates a list of python dictionaries from the IFS datafile, imports this list of dictionaries into a database table and then generates the HTMIDs for that table.
**Usage:**
See class docstring for usage |
def _get_minutes_from_last_update(self, time):
""" How much minutes passed from last update to given time """
time_from_last_update = time - self.last_update_time
return int(time_from_last_update.total_seconds() / 60) | How much minutes passed from last update to given time |
def course_modal(context, course=None):
"""
Django template tag that returns course information to display in a modal.
You may pass in a particular course if you like. Otherwise, the modal will look for course context
within the parent context.
Usage:
{% course_modal %}
{% course_m... | Django template tag that returns course information to display in a modal.
You may pass in a particular course if you like. Otherwise, the modal will look for course context
within the parent context.
Usage:
{% course_modal %}
{% course_modal course %} |
def display(self, data, x=None, y=None, xlabel=None, ylabel=None,
style=None, nlevels=None, levels=None, contour_labels=None,
store_data=True, col=0, unzoom=True, auto_contrast=False,
contrast_level=0, **kws):
"""
generic display, using imshow (default) or... | generic display, using imshow (default) or contour |
def highlight(self, **kwargs):
"""
kwargs:
style: css
highlight_time: int; default: .3
"""
self.debug_log("Highlighting element")
style = kwargs.get('style')
highlight_time = kwargs.get('highlight_time', .3)
driver = self._el... | kwargs:
style: css
highlight_time: int; default: .3 |
def persist(self, storageLevel=StorageLevel.MEMORY_AND_DISK):
"""Sets the storage level to persist the contents of the :class:`DataFrame` across
operations after the first time it is computed. This can only be used to assign
a new storage level if the :class:`DataFrame` does not have a storage l... | Sets the storage level to persist the contents of the :class:`DataFrame` across
operations after the first time it is computed. This can only be used to assign
a new storage level if the :class:`DataFrame` does not have a storage level set yet.
If no storage level is specified defaults to (C{MEM... |
def preview(src_path):
''' Generates a preview of src_path as PNG.
:returns: A list of preview paths, one for each page.
'''
previews = []
for page in list_artboards(src_path):
previews.append(page.export())
for artboard in page.artboards:
previews.append(artboard.export())
return previews | Generates a preview of src_path as PNG.
:returns: A list of preview paths, one for each page. |
def barmatch2(data, tups, cutters, longbar, matchdict, fnum):
"""
cleaner barmatch func...
"""
## how many reads to store before writing to disk
waitchunk = int(1e6)
## pid name for this engine
epid = os.getpid()
## counters for total reads, those with cutsite, and those that matched
... | cleaner barmatch func... |
def write(self, country_code, frames, scaling_factors=None):
"""Write the OHLCV data for one country to the HDF5 file.
Parameters
----------
country_code : str
The ISO 3166 alpha-2 country code for this country.
frames : dict[str, pd.DataFrame]
A dict map... | Write the OHLCV data for one country to the HDF5 file.
Parameters
----------
country_code : str
The ISO 3166 alpha-2 country code for this country.
frames : dict[str, pd.DataFrame]
A dict mapping each OHLCV field to a dataframe with a row
for each dat... |
def create(self, target, configuration_url=values.unset,
configuration_method=values.unset,
configuration_filters=values.unset,
configuration_triggers=values.unset,
configuration_flow_sid=values.unset,
configuration_retry_count=values.unset,
... | Create a new WebhookInstance
:param WebhookInstance.Target target: The target of this webhook.
:param unicode configuration_url: The absolute url the webhook request should be sent to.
:param WebhookInstance.Method configuration_method: The HTTP method to be used when sending a webhook request.... |
def process_tick(self, tup):
"""Called every slide_interval
"""
curtime = int(time.time())
window_info = WindowContext(curtime - self.window_duration, curtime)
tuple_batch = []
for (tup, tm) in self.current_tuples:
tuple_batch.append(tup)
self.processWindow(window_info, tuple_batch)
... | Called every slide_interval |
def inverse(self):
"""
Return the inverse of the graph.
@rtype: graph
@return: Complement graph for the graph.
"""
inv = self.__class__()
inv.add_nodes(self.nodes())
inv.complete()
for each in self.edges():
if (inv.has_edge(ea... | Return the inverse of the graph.
@rtype: graph
@return: Complement graph for the graph. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.